| "FURB152", # math-constant | "FURB152", # math-constant | ||||
| "UP007", # non-pep604-annotation | "UP007", # non-pep604-annotation | ||||
| "UP032", # f-string | "UP032", # f-string | ||||
| "UP045", # non-pep604-annotation-optional | |||||
| "B005", # strip-with-multi-characters | "B005", # strip-with-multi-characters | ||||
| "B006", # mutable-argument-default | "B006", # mutable-argument-default | ||||
| "B007", # unused-loop-control-variable | "B007", # unused-loop-control-variable | ||||
| "B026", # star-arg-unpacking-after-keyword-arg | "B026", # star-arg-unpacking-after-keyword-arg | ||||
| "B903", # class-as-data-structure | |||||
| "B904", # raise-without-from-inside-except | "B904", # raise-without-from-inside-except | ||||
| "B905", # zip-without-explicit-strict | "B905", # zip-without-explicit-strict | ||||
| "N806", # non-lowercase-variable-in-function | "N806", # non-lowercase-variable-in-function |
| ) | ) | ||||
| CONSOLE_WEB_URL: str = Field( | CONSOLE_WEB_URL: str = Field( | ||||
| description="Base URL for the console web interface," "used for frontend references and CORS configuration", | |||||
| description="Base URL for the console web interface,used for frontend references and CORS configuration", | |||||
| default="", | default="", | ||||
| ) | ) | ||||
| """ | """ | ||||
| HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field( | HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field( | ||||
| description="Mode for fetching app templates: remote, db, or builtin" " default to remote,", | |||||
| description="Mode for fetching app templates: remote, db, or builtin default to remote,", | |||||
| default="remote", | default="remote", | ||||
| ) | ) | ||||
| app = App.query.filter(App.id == args["app_id"]).first() | app = App.query.filter(App.id == args["app_id"]).first() | ||||
| if not app: | if not app: | ||||
| raise NotFound(f'App \'{args["app_id"]}\' is not found') | |||||
| raise NotFound(f"App '{args['app_id']}' is not found") | |||||
| site = app.site | site = app.site | ||||
| if not site: | if not site: |
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) |
| ) | ) | ||||
| except InvokeAuthorizationError: | except InvokeAuthorizationError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) | ||||
| return response.model_dump(), 200 | return response.model_dump(), 200 | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) |
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) | ||||
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) | ||||
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) | ||||
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) |
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) | ||||
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) | ||||
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ProviderNotInitializeError( | raise ProviderNotInitializeError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ProviderNotInitializeError(ex.description) | raise ProviderNotInitializeError(ex.description) |
| else: | else: | ||||
| if isinstance(data, DeclarativeMeta) or hasattr(data, "_sa_instance_state"): | if isinstance(data, DeclarativeMeta) or hasattr(data, "_sa_instance_state"): | ||||
| raise TypeError( | raise TypeError( | ||||
| "Critical Error: Passing SQLAlchemy Model instances " | |||||
| "that cause thread safety issues is not allowed." | |||||
| "Critical Error: Passing SQLAlchemy Model instances that cause thread safety issues is not allowed." | |||||
| ) | ) | ||||
| # get extension | # get extension | ||||
| if "." in message_file.url: | if "." in message_file.url: | ||||
| extension = f'.{message_file.url.split(".")[-1]}' | |||||
| extension = f".{message_file.url.split('.')[-1]}" | |||||
| if len(extension) > 10: | if len(extension) > 10: | ||||
| extension = ".bin" | extension = ".bin" | ||||
| else: | else: |
| if not api_based_extension: | if not api_based_extension: | ||||
| raise ValueError( | raise ValueError( | ||||
| "[External data tool] API query failed, variable: {}, " | |||||
| "error: api_based_extension_id is invalid".format(self.variable) | |||||
| "[External data tool] API query failed, variable: {}, error: api_based_extension_id is invalid".format( | |||||
| self.variable | |||||
| ) | |||||
| ) | ) | ||||
| # decrypt api_key | # decrypt api_key |
| def markdown(self) -> str: | def markdown(self) -> str: | ||||
| url = self.generate_url() | url = self.generate_url() | ||||
| if self.type == FileType.IMAGE: | if self.type == FileType.IMAGE: | ||||
| text = f'' | |||||
| text = f"" | |||||
| else: | else: | ||||
| text = f"[{self.filename or url}]({url})" | text = f"[{self.filename or url}]({url})" | ||||
| ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) | ai_model_entity = self._get_ai_model_entity(base_model_name=base_model_name, model=model) | ||||
| if not ai_model_entity: | if not ai_model_entity: | ||||
| raise CredentialsValidateFailedError(f'Base Model Name {credentials["base_model_name"]} is invalid') | |||||
| raise CredentialsValidateFailedError(f"Base Model Name {credentials['base_model_name']} is invalid") | |||||
| try: | try: | ||||
| client = AzureOpenAI(**self._to_credential_kwargs(credentials)) | client = AzureOpenAI(**self._to_credential_kwargs(credentials)) |
| raise CredentialsValidateFailedError("Base Model Name is required") | raise CredentialsValidateFailedError("Base Model Name is required") | ||||
| if not self._get_ai_model_entity(credentials["base_model_name"], model): | if not self._get_ai_model_entity(credentials["base_model_name"], model): | ||||
| raise CredentialsValidateFailedError(f'Base Model Name {credentials["base_model_name"]} is invalid') | |||||
| raise CredentialsValidateFailedError(f"Base Model Name {credentials['base_model_name']} is invalid") | |||||
| try: | try: | ||||
| credentials_kwargs = self._to_credential_kwargs(credentials) | credentials_kwargs = self._to_credential_kwargs(credentials) |
| @staticmethod | @staticmethod | ||||
| def _check_endpoint_url_model_repository_name(credentials: dict, model_name: str): | def _check_endpoint_url_model_repository_name(credentials: dict, model_name: str): | ||||
| try: | try: | ||||
| url = f'{HUGGINGFACE_ENDPOINT_API}{credentials["huggingface_namespace"]}' | |||||
| url = f"{HUGGINGFACE_ENDPOINT_API}{credentials['huggingface_namespace']}" | |||||
| headers = { | headers = { | ||||
| "Authorization": f'Bearer {credentials["huggingfacehub_api_token"]}', | |||||
| "Authorization": f"Bearer {credentials['huggingfacehub_api_token']}", | |||||
| "Content-Type": "application/json", | "Content-Type": "application/json", | ||||
| } | } | ||||
| for index, response in enumerate(responses): | for index, response in enumerate(responses): | ||||
| if response.status_code not in {200, HTTPStatus.OK}: | if response.status_code not in {200, HTTPStatus.OK}: | ||||
| raise ServiceUnavailableError( | raise ServiceUnavailableError( | ||||
| f"Failed to invoke model {model}, status code: {response.status_code}, " | |||||
| f"message: {response.message}" | |||||
| f"Failed to invoke model {model}, status code: {response.status_code}, message: {response.message}" | |||||
| ) | ) | ||||
| resp_finish_reason = response.output.choices[0].finish_reason | resp_finish_reason = response.output.choices[0].finish_reason |
| elif credentials["completion_type"] == "completion": | elif credentials["completion_type"] == "completion": | ||||
| completion_type = LLMMode.COMPLETION.value | completion_type = LLMMode.COMPLETION.value | ||||
| else: | else: | ||||
| raise ValueError(f'completion_type {credentials["completion_type"]} is not supported') | |||||
| raise ValueError(f"completion_type {credentials['completion_type']} is not supported") | |||||
| entity = AIModelEntity( | entity = AIModelEntity( | ||||
| model=model, | model=model, |
| resp = response.json() | resp = response.json() | ||||
| if "error" in resp: | if "error" in resp: | ||||
| if resp["error"] == "invalid_client": | if resp["error"] == "invalid_client": | ||||
| raise InvalidAPIKeyError(f'Invalid API key or secret key: {resp["error_description"]}') | |||||
| raise InvalidAPIKeyError(f"Invalid API key or secret key: {resp['error_description']}") | |||||
| elif resp["error"] == "unknown_error": | elif resp["error"] == "unknown_error": | ||||
| raise InternalServerError(f'Internal server error: {resp["error_description"]}') | |||||
| raise InternalServerError(f"Internal server error: {resp['error_description']}") | |||||
| elif resp["error"] == "invalid_request": | elif resp["error"] == "invalid_request": | ||||
| raise BadRequestError(f'Bad request: {resp["error_description"]}') | |||||
| raise BadRequestError(f"Bad request: {resp['error_description']}") | |||||
| elif resp["error"] == "rate_limit_exceeded": | elif resp["error"] == "rate_limit_exceeded": | ||||
| raise RateLimitReachedError(f'Rate limit reached: {resp["error_description"]}') | |||||
| raise RateLimitReachedError(f"Rate limit reached: {resp['error_description']}") | |||||
| else: | else: | ||||
| raise Exception(f'Unknown error: {resp["error_description"]}') | |||||
| raise Exception(f"Unknown error: {resp['error_description']}") | |||||
| return resp["access_token"] | return resp["access_token"] | ||||
| elif credentials["completion_type"] == "completion": | elif credentials["completion_type"] == "completion": | ||||
| completion_type = LLMMode.COMPLETION.value | completion_type = LLMMode.COMPLETION.value | ||||
| else: | else: | ||||
| raise ValueError(f'completion_type {credentials["completion_type"]} is not supported') | |||||
| raise ValueError(f"completion_type {credentials['completion_type']} is not supported") | |||||
| else: | else: | ||||
| extra_args = XinferenceHelper.get_xinference_extra_parameter( | extra_args = XinferenceHelper.get_xinference_extra_parameter( | ||||
| server_url=credentials["server_url"], | server_url=credentials["server_url"], | ||||
| api_key = credentials.get("api_key") or "abc" | api_key = credentials.get("api_key") or "abc" | ||||
| client = OpenAI( | client = OpenAI( | ||||
| base_url=f'{credentials["server_url"]}/v1', | |||||
| base_url=f"{credentials['server_url']}/v1", | |||||
| api_key=api_key, | api_key=api_key, | ||||
| max_retries=int(credentials.get("max_retries") or DEFAULT_MAX_RETRIES), | max_retries=int(credentials.get("max_retries") or DEFAULT_MAX_RETRIES), | ||||
| timeout=int(credentials.get("invoke_timeout") or DEFAULT_INVOKE_TIMEOUT), | timeout=int(credentials.get("invoke_timeout") or DEFAULT_INVOKE_TIMEOUT), |
| "markdown": data.get("markdown"), | "markdown": data.get("markdown"), | ||||
| } | } | ||||
| else: | else: | ||||
| raise Exception(f'Failed to scrape URL. Error: {response_data["error"]}') | |||||
| raise Exception(f"Failed to scrape URL. Error: {response_data['error']}") | |||||
| elif response.status_code in {402, 409, 500}: | elif response.status_code in {402, 409, 500}: | ||||
| error_message = response.json().get("error", "Unknown error occurred") | error_message = response.json().get("error", "Unknown error occurred") |
| if not data_source_binding: | if not data_source_binding: | ||||
| raise Exception( | raise Exception( | ||||
| f"No notion data source binding found for tenant {tenant_id} " | |||||
| f"and notion workspace {notion_workspace_id}" | |||||
| f"No notion data source binding found for tenant {tenant_id} and notion workspace {notion_workspace_id}" | |||||
| ) | ) | ||||
| return cast(str, data_source_binding.access_token) | return cast(str, data_source_binding.access_token) |
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to create task: {response.get("msg")}') | |||||
| raise Exception(f"Failed to create task: {response.get('msg')}") | |||||
| return response.get("data", {}).get("id") | return response.get("data", {}).get("id") | ||||
| elif model == "wenxin": | elif model == "wenxin": | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to generate content: {response.get("msg")}') | |||||
| raise Exception(f"Failed to generate content: {response.get('msg')}") | |||||
| return response.get("data", "") | return response.get("data", "") | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to generate ppt: {response.get("msg")}') | |||||
| raise Exception(f"Failed to generate ppt: {response.get('msg')}") | |||||
| id = response.get("data", {}).get("id") | id = response.get("data", {}).get("id") | ||||
| cover_url = response.get("data", {}).get("cover_url") | cover_url = response.get("data", {}).get("cover_url") | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to generate ppt: {response.get("msg")}') | |||||
| raise Exception(f"Failed to generate ppt: {response.get('msg')}") | |||||
| export_code = response.get("data") | export_code = response.get("data") | ||||
| if not export_code: | if not export_code: | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to generate ppt: {response.get("msg")}') | |||||
| raise Exception(f"Failed to generate ppt: {response.get('msg')}") | |||||
| if response.get("msg") == "导出中": | if response.get("msg") == "导出中": | ||||
| current_iteration += 1 | current_iteration += 1 | ||||
| raise Exception(f"Failed to connect to aippt: {response.text}") | raise Exception(f"Failed to connect to aippt: {response.text}") | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to connect to aippt: {response.get("msg")}') | |||||
| raise Exception(f"Failed to connect to aippt: {response.get('msg')}") | |||||
| token = response.get("data", {}).get("token") | token = response.get("data", {}).get("token") | ||||
| expire = response.get("data", {}).get("time_expire") | expire = response.get("data", {}).get("time_expire") | ||||
| if cls._style_cache[key]["expire"] < now: | if cls._style_cache[key]["expire"] < now: | ||||
| del cls._style_cache[key] | del cls._style_cache[key] | ||||
| key = f'{credentials["aippt_access_key"]}#@#{user_id}' | |||||
| key = f"{credentials['aippt_access_key']}#@#{user_id}" | |||||
| if key in cls._style_cache: | if key in cls._style_cache: | ||||
| return cls._style_cache[key]["colors"], cls._style_cache[key]["styles"] | return cls._style_cache[key]["colors"], cls._style_cache[key]["styles"] | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to connect to aippt: {response.get("msg")}') | |||||
| raise Exception(f"Failed to connect to aippt: {response.get('msg')}") | |||||
| colors = [ | colors = [ | ||||
| { | { | ||||
| "id": f'id-{item.get("id")}', | |||||
| "id": f"id-{item.get('id')}", | |||||
| "name": item.get("name"), | "name": item.get("name"), | ||||
| "en_name": item.get("en_name", item.get("name")), | "en_name": item.get("en_name", item.get("name")), | ||||
| } | } | ||||
| ] | ] | ||||
| styles = [ | styles = [ | ||||
| { | { | ||||
| "id": f'id-{item.get("id")}', | |||||
| "id": f"id-{item.get('id')}", | |||||
| "name": item.get("title"), | "name": item.get("title"), | ||||
| } | } | ||||
| for item in response.get("data", {}).get("suit_style") or [] | for item in response.get("data", {}).get("suit_style") or [] | ||||
| response = response.json() | response = response.json() | ||||
| if response.get("code") != 0: | if response.get("code") != 0: | ||||
| raise Exception(f'Failed to connect to aippt: {response.get("msg")}') | |||||
| raise Exception(f"Failed to connect to aippt: {response.get('msg')}") | |||||
| if len(response.get("data", {}).get("list") or []) > 0: | if len(response.get("data", {}).get("list") or []) > 0: | ||||
| return response.get("data", {}).get("list")[0].get("id") | return response.get("data", {}).get("list")[0].get("id") |
| if async_mode: | if async_mode: | ||||
| return self.create_text_message( | return self.create_text_message( | ||||
| f"Video generation started.\nInvocation ARN: {invocation_arn}\n" | |||||
| f"Video will be available at: {video_uri}" | |||||
| f"Video generation started.\nInvocation ARN: {invocation_arn}\nVideo will be available at: {video_uri}" | |||||
| ) | ) | ||||
| return self._wait_for_completion(bedrock, s3_client, invocation_arn) | return self._wait_for_completion(bedrock, s3_client, invocation_arn) |
| if "trans_result" in result: | if "trans_result" in result: | ||||
| result_text = result["trans_result"][0]["dst"] | result_text = result["trans_result"][0]["dst"] | ||||
| else: | else: | ||||
| result_text = f'{result["error_code"]}: {result["error_msg"]}' | |||||
| result_text = f"{result['error_code']}: {result['error_msg']}" | |||||
| return self.create_text_message(str(result_text)) | return self.create_text_message(str(result_text)) | ||||
| except requests.RequestException as e: | except requests.RequestException as e: |
| result_text = "" | result_text = "" | ||||
| if result["error_code"] != 0: | if result["error_code"] != 0: | ||||
| result_text = f'{result["error_code"]}: {result["error_msg"]}' | |||||
| result_text = f"{result['error_code']}: {result['error_msg']}" | |||||
| else: | else: | ||||
| result_text = result["data"]["src"] | result_text = result["data"]["src"] | ||||
| result_text = self.mapping_result(description_language, result_text) | result_text = self.mapping_result(description_language, result_text) |
| if "trans_result" in result: | if "trans_result" in result: | ||||
| result_text = result["trans_result"][0]["dst"] | result_text = result["trans_result"][0]["dst"] | ||||
| else: | else: | ||||
| result_text = f'{result["error_code"]}: {result["error_msg"]}' | |||||
| result_text = f"{result['error_code']}: {result['error_msg']}" | |||||
| return self.create_text_message(str(result_text)) | return self.create_text_message(str(result_text)) | ||||
| except requests.RequestException as e: | except requests.RequestException as e: |
| headers = {"Ocp-Apim-Subscription-Key": subscription_key, "Accept-Language": accept_language} | headers = {"Ocp-Apim-Subscription-Key": subscription_key, "Accept-Language": accept_language} | ||||
| query = quote(query) | query = quote(query) | ||||
| server_url = f'{server_url}?q={query}&mkt={market_code}&count={limit}&responseFilter={",".join(filters)}' | |||||
| server_url = f"{server_url}?q={query}&mkt={market_code}&count={limit}&responseFilter={','.join(filters)}" | |||||
| response = get(server_url, headers=headers) | response = get(server_url, headers=headers) | ||||
| if response.status_code != 200: | if response.status_code != 200: | ||||
| results = [] | results = [] | ||||
| if search_results: | if search_results: | ||||
| for result in search_results: | for result in search_results: | ||||
| url = f': {result["url"]}' if "url" in result else "" | |||||
| results.append(self.create_text_message(text=f'{result["name"]}{url}')) | |||||
| url = f": {result['url']}" if "url" in result else "" | |||||
| results.append(self.create_text_message(text=f"{result['name']}{url}")) | |||||
| if entities: | if entities: | ||||
| for entity in entities: | for entity in entities: | ||||
| url = f': {entity["url"]}' if "url" in entity else "" | |||||
| results.append(self.create_text_message(text=f'{entity.get("name", "")}{url}')) | |||||
| url = f": {entity['url']}" if "url" in entity else "" | |||||
| results.append(self.create_text_message(text=f"{entity.get('name', '')}{url}")) | |||||
| if news: | if news: | ||||
| for news_item in news: | for news_item in news: | ||||
| url = f': {news_item["url"]}' if "url" in news_item else "" | |||||
| results.append(self.create_text_message(text=f'{news_item.get("name", "")}{url}')) | |||||
| url = f": {news_item['url']}" if "url" in news_item else "" | |||||
| results.append(self.create_text_message(text=f"{news_item.get('name', '')}{url}")) | |||||
| if related_searches: | if related_searches: | ||||
| for related in related_searches: | for related in related_searches: | ||||
| url = f': {related["displayText"]}' if "displayText" in related else "" | |||||
| results.append(self.create_text_message(text=f'{related.get("displayText", "")}{url}')) | |||||
| url = f": {related['displayText']}" if "displayText" in related else "" | |||||
| results.append(self.create_text_message(text=f"{related.get('displayText', '')}{url}")) | |||||
| return results | return results | ||||
| elif result_type == "json": | elif result_type == "json": | ||||
| text = "" | text = "" | ||||
| if search_results: | if search_results: | ||||
| for i, result in enumerate(search_results): | for i, result in enumerate(search_results): | ||||
| text += f'{i + 1}: {result.get("name", "")} - {result.get("snippet", "")}\n' | |||||
| text += f"{i + 1}: {result.get('name', '')} - {result.get('snippet', '')}\n" | |||||
| if computation and "expression" in computation and "value" in computation: | if computation and "expression" in computation and "value" in computation: | ||||
| text += "\nComputation:\n" | text += "\nComputation:\n" | ||||
| text += f'{computation["expression"]} = {computation["value"]}\n' | |||||
| text += f"{computation['expression']} = {computation['value']}\n" | |||||
| if entities: | if entities: | ||||
| text += "\nEntities:\n" | text += "\nEntities:\n" | ||||
| for entity in entities: | for entity in entities: | ||||
| url = f'- {entity["url"]}' if "url" in entity else "" | |||||
| text += f'{entity.get("name", "")}{url}\n' | |||||
| url = f"- {entity['url']}" if "url" in entity else "" | |||||
| text += f"{entity.get('name', '')}{url}\n" | |||||
| if news: | if news: | ||||
| text += "\nNews:\n" | text += "\nNews:\n" | ||||
| for news_item in news: | for news_item in news: | ||||
| url = f'- {news_item["url"]}' if "url" in news_item else "" | |||||
| text += f'{news_item.get("name", "")}{url}\n' | |||||
| url = f"- {news_item['url']}" if "url" in news_item else "" | |||||
| text += f"{news_item.get('name', '')}{url}\n" | |||||
| if related_searches: | if related_searches: | ||||
| text += "\n\nRelated Searches:\n" | text += "\n\nRelated Searches:\n" | ||||
| for related in related_searches: | for related in related_searches: | ||||
| url = f'- {related["webSearchUrl"]}' if "webSearchUrl" in related else "" | |||||
| text += f'{related.get("displayText", "")}{url}\n' | |||||
| url = f"- {related['webSearchUrl']}" if "webSearchUrl" in related else "" | |||||
| text += f"{related.get('displayText', '')}{url}\n" | |||||
| return self.create_text_message(text=self.summary(user_id=user_id, content=text)) | return self.create_text_message(text=self.summary(user_id=user_id, content=text)) | ||||
| if status["status"] == "done": | if status["status"] == "done": | ||||
| return status | return status | ||||
| elif status["status"] == "error" or status["status"] == "rejected": | elif status["status"] == "error" or status["status"] == "rejected": | ||||
| raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error", {}).get("description")}') | |||||
| raise HTTPError(f"Talks {id} failed: {status['status']} {status.get('error', {}).get('description')}") | |||||
| time.sleep(poll_interval) | time.sleep(poll_interval) |
| if response is None: | if response is None: | ||||
| raise HTTPError("Failed to initiate crawl after multiple retries") | raise HTTPError("Failed to initiate crawl after multiple retries") | ||||
| elif response.get("success") == False: | elif response.get("success") == False: | ||||
| raise HTTPError(f'Failed to crawl: {response.get("error")}') | |||||
| raise HTTPError(f"Failed to crawl: {response.get('error')}") | |||||
| job_id: str = response["id"] | job_id: str = response["id"] | ||||
| if wait: | if wait: | ||||
| return self._monitor_job_status(job_id=job_id, poll_interval=poll_interval) | return self._monitor_job_status(job_id=job_id, poll_interval=poll_interval) | ||||
| if status["status"] == "completed": | if status["status"] == "completed": | ||||
| return status | return status | ||||
| elif status["status"] == "failed": | elif status["status"] == "failed": | ||||
| raise HTTPError(f'Job {job_id} failed: {status["error"]}') | |||||
| raise HTTPError(f"Job {job_id} failed: {status['error']}") | |||||
| time.sleep(poll_interval) | time.sleep(poll_interval) | ||||
| CityCode = City_data["districts"][0]["adcode"] | CityCode = City_data["districts"][0]["adcode"] | ||||
| weatherInfo_response = s.request( | weatherInfo_response = s.request( | ||||
| method="GET", | method="GET", | ||||
| url="{url}/weather/weatherInfo?city={citycode}&extensions=all&key={apikey}&output=json" | |||||
| "".format(url=api_domain, citycode=CityCode, apikey=self.runtime.credentials.get("api_key")), | |||||
| url="{url}/weather/weatherInfo?city={citycode}&extensions=all&key={apikey}&output=json".format( | |||||
| url=api_domain, citycode=CityCode, apikey=self.runtime.credentials.get("api_key") | |||||
| ), | |||||
| ) | ) | ||||
| weatherInfo_data = weatherInfo_response.json() | weatherInfo_data = weatherInfo_response.json() | ||||
| if weatherInfo_response.status_code == 200 and weatherInfo_data.get("info") == "OK": | if weatherInfo_response.status_code == 200 and weatherInfo_data.get("info") == "OK": |
| result["rows"].append(self.get_row_field_value(row, schema)) | result["rows"].append(self.get_row_field_value(row, schema)) | ||||
| return self.create_text_message(json.dumps(result, ensure_ascii=False)) | return self.create_text_message(json.dumps(result, ensure_ascii=False)) | ||||
| else: | else: | ||||
| result_text = f"Found {result['total']} rows in worksheet \"{worksheet_name}\"." | |||||
| result_text = f'Found {result["total"]} rows in worksheet "{worksheet_name}".' | |||||
| if result["total"] > 0: | if result["total"] > 0: | ||||
| result_text += ( | result_text += ( | ||||
| f" The following are {min(limit, result['total'])}" | f" The following are {min(limit, result['total'])}" |
| """ | """ | ||||
| This method is responsible for generating the authorization headers. | This method is responsible for generating the authorization headers. | ||||
| """ | """ | ||||
| return {"Authorization": f'Bearer {credentials.get("api_key", "")}'} | |||||
| return {"Authorization": f"Bearer {credentials.get('api_key', '')}"} |
| tool_parameters={ | tool_parameters={ | ||||
| "model": "chinook", | "model": "chinook", | ||||
| "db_type": "SQLite", | "db_type": "SQLite", | ||||
| "url": f'{self._get_protocol_and_main_domain(credentials["base_url"])}/Chinook.sqlite', | |||||
| "url": f"{self._get_protocol_and_main_domain(credentials['base_url'])}/Chinook.sqlite", | |||||
| "query": "What are the top 10 customers by sales?", | "query": "What are the top 10 customers by sales?", | ||||
| }, | }, | ||||
| ) | ) |
| if "api_key_header_prefix" in credentials: | if "api_key_header_prefix" in credentials: | ||||
| api_key_header_prefix = credentials["api_key_header_prefix"] | api_key_header_prefix = credentials["api_key_header_prefix"] | ||||
| if api_key_header_prefix == "basic" and credentials["api_key_value"]: | if api_key_header_prefix == "basic" and credentials["api_key_value"]: | ||||
| credentials["api_key_value"] = f'Basic {credentials["api_key_value"]}' | |||||
| credentials["api_key_value"] = f"Basic {credentials['api_key_value']}" | |||||
| elif api_key_header_prefix == "bearer" and credentials["api_key_value"]: | elif api_key_header_prefix == "bearer" and credentials["api_key_value"]: | ||||
| credentials["api_key_value"] = f'Bearer {credentials["api_key_value"]}' | |||||
| credentials["api_key_value"] = f"Bearer {credentials['api_key_value']}" | |||||
| elif api_key_header_prefix == "custom": | elif api_key_header_prefix == "custom": | ||||
| pass | pass | ||||
| user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, file_url=message.message | user_id=user_id, tenant_id=tenant_id, conversation_id=conversation_id, file_url=message.message | ||||
| ) | ) | ||||
| url = f'/files/tools/{file.id}{guess_extension(file.mimetype) or ".png"}' | |||||
| url = f"/files/tools/{file.id}{guess_extension(file.mimetype) or '.png'}" | |||||
| result.append( | result.append( | ||||
| ToolInvokeMessage( | ToolInvokeMessage( | ||||
| @classmethod | @classmethod | ||||
| def get_tool_file_url(cls, tool_file_id: str, extension: Optional[str]) -> str: | def get_tool_file_url(cls, tool_file_id: str, extension: Optional[str]) -> str: | ||||
| return f'/files/tools/{tool_file_id}{extension or ".bin"}' | |||||
| return f"/files/tools/{tool_file_id}{extension or '.bin'}" |
| if not path: | if not path: | ||||
| path = str(uuid.uuid4()) | path = str(uuid.uuid4()) | ||||
| interface["operation"]["operationId"] = f'{path}_{interface["method"]}' | |||||
| interface["operation"]["operationId"] = f"{path}_{interface['method']}" | |||||
| bundles.append( | bundles.append( | ||||
| ApiToolBundle( | ApiToolBundle( |
| ) | ) | ||||
| if executor_response.size > threshold_size: | if executor_response.size > threshold_size: | ||||
| raise ResponseSizeError( | raise ResponseSizeError( | ||||
| f'{"File" if executor_response.is_file else "Text"} size is too large,' | |||||
| f' max size is {threshold_size / 1024 / 1024:.2f} MB,' | |||||
| f' but current size is {executor_response.readable_size}.' | |||||
| f"{'File' if executor_response.is_file else 'Text'} size is too large," | |||||
| f" max size is {threshold_size / 1024 / 1024:.2f} MB," | |||||
| f" but current size is {executor_response.readable_size}." | |||||
| ) | ) | ||||
| return executor_response | return executor_response | ||||
| if self.auth.config and self.auth.config.header: | if self.auth.config and self.auth.config.header: | ||||
| authorization_header = self.auth.config.header | authorization_header = self.auth.config.header | ||||
| if k.lower() == authorization_header.lower(): | if k.lower() == authorization_header.lower(): | ||||
| raw += f'{k}: {"*" * len(v)}\r\n' | |||||
| raw += f"{k}: {'*' * len(v)}\r\n" | |||||
| continue | continue | ||||
| raw += f"{k}: {v}\r\n" | raw += f"{k}: {v}\r\n" | ||||
| tool_runtime=tool_runtime, | tool_runtime=tool_runtime, | ||||
| provider_name=tool_entity.provider_name, | provider_name=tool_entity.provider_name, | ||||
| provider_type=tool_entity.provider_type, | provider_type=tool_entity.provider_type, | ||||
| identity_id=f'WORKFLOW.{app.id}.{node_data.get("id")}', | |||||
| identity_id=f"WORKFLOW.{app.id}.{node_data.get('id')}", | |||||
| ) | ) | ||||
| manager.delete_tool_parameters_cache() | manager.delete_tool_parameters_cache() | ||||
| except: | except: |
| [[package]] | [[package]] | ||||
| name = "ruff" | name = "ruff" | ||||
| version = "0.8.6" | |||||
| version = "0.9.2" | |||||
| description = "An extremely fast Python linter and code formatter, written in Rust." | description = "An extremely fast Python linter and code formatter, written in Rust." | ||||
| optional = false | optional = false | ||||
| python-versions = ">=3.7" | python-versions = ">=3.7" | ||||
| files = [ | files = [ | ||||
| {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"}, | |||||
| {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"}, | |||||
| {file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"}, | |||||
| {file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"}, | |||||
| {file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"}, | |||||
| {file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"}, | |||||
| {file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"}, | |||||
| {file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"}, | |||||
| {file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"}, | |||||
| {file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"}, | |||||
| {file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"}, | |||||
| {file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"}, | |||||
| {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, | |||||
| {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, | |||||
| {file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"}, | |||||
| {file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"}, | |||||
| {file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"}, | |||||
| {file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"}, | |||||
| {file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"}, | |||||
| {file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"}, | |||||
| {file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"}, | |||||
| {file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"}, | |||||
| {file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"}, | |||||
| {file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"}, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [metadata] | [metadata] | ||||
| lock-version = "2.0" | lock-version = "2.0" | ||||
| python-versions = ">=3.11,<3.13" | python-versions = ">=3.11,<3.13" | ||||
| content-hash = "3bb0ce64c87712cf105c75105a0ca75c0523d6b27001ff6a623bb2a0d1343003" | |||||
| content-hash = "3ac10f0687162281a0cd083a52cba5508b086dd42d63dd68175209e88b249142" |
| optional = true | optional = true | ||||
| [tool.poetry.group.lint.dependencies] | [tool.poetry.group.lint.dependencies] | ||||
| dotenv-linter = "~0.5.0" | dotenv-linter = "~0.5.0" | ||||
| ruff = "~0.8.1" | |||||
| ruff = "~0.9.2" |
| ) | ) | ||||
| except LLMBadRequestError: | except LLMBadRequestError: | ||||
| raise ValueError( | raise ValueError( | ||||
| "No Embedding Model available. Please configure a valid provider " | |||||
| "in the Settings -> Model Provider." | |||||
| "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." | |||||
| ) | ) | ||||
| except ProviderTokenNotInitError as ex: | except ProviderTokenNotInitError as ex: | ||||
| raise ValueError(f"The dataset in unavailable, due to: {ex.description}") | raise ValueError(f"The dataset in unavailable, due to: {ex.description}") |
| if custom_parameters: | if custom_parameters: | ||||
| for parameter in custom_parameters: | for parameter in custom_parameters: | ||||
| if parameter.get("required", False) and not process_parameter.get(parameter.get("name")): | if parameter.get("required", False) and not process_parameter.get(parameter.get("name")): | ||||
| raise ValueError(f'{parameter.get("name")} is required') | |||||
| raise ValueError(f"{parameter.get('name')} is required") | |||||
| @staticmethod | @staticmethod | ||||
| def process_external_api( | def process_external_api( |
| dependency_names = list(dependencies.keys()) | dependency_names = list(dependencies.keys()) | ||||
| all_dependency_names.extend(dependency_names) | all_dependency_names.extend(dependency_names) | ||||
| expected_all_dependency_names = set(all_dependency_names) | expected_all_dependency_names = set(all_dependency_names) | ||||
| assert sorted(expected_all_dependency_names) == sorted( | |||||
| all_dependency_names | |||||
| ), "Duplicated dependencies crossing groups are found" | |||||
| assert sorted(expected_all_dependency_names) == sorted(all_dependency_names), ( | |||||
| "Duplicated dependencies crossing groups are found" | |||||
| ) |
| print("Actual document ID:", hits_by_vector[0].metadata["document_id"] if hits_by_vector else "No hits") | print("Actual document ID:", hits_by_vector[0].metadata["document_id"] if hits_by_vector else "No hits") | ||||
| assert len(hits_by_vector) > 0, f"Expected at least one hit, got {len(hits_by_vector)}" | assert len(hits_by_vector) > 0, f"Expected at least one hit, got {len(hits_by_vector)}" | ||||
| assert ( | |||||
| hits_by_vector[0].metadata["document_id"] == self.example_doc_id | |||||
| ), f"Expected document ID {self.example_doc_id}, got {hits_by_vector[0].metadata['document_id']}" | |||||
| assert hits_by_vector[0].metadata["document_id"] == self.example_doc_id, ( | |||||
| f"Expected document ID {self.example_doc_id}, got {hits_by_vector[0].metadata['document_id']}" | |||||
| ) | |||||
| def test_get_ids_by_metadata_field(self): | def test_get_ids_by_metadata_field(self): | ||||
| mock_response = {"hits": {"total": {"value": 1}, "hits": [{"_id": "mock_id"}]}} | mock_response = {"hits": {"total": {"value": 1}, "hits": [{"_id": "mock_id"}]}} |
| # Verify the result | # Verify the result | ||||
| assert len(prompt_messages) == len(scenario.expected_messages), f"Scenario failed: {scenario.description}" | assert len(prompt_messages) == len(scenario.expected_messages), f"Scenario failed: {scenario.description}" | ||||
| assert ( | |||||
| prompt_messages == scenario.expected_messages | |||||
| ), f"Message content mismatch in scenario: {scenario.description}" | |||||
| assert prompt_messages == scenario.expected_messages, ( | |||||
| f"Message content mismatch in scenario: {scenario.description}" | |||||
| ) | |||||
| def test_handle_list_messages_basic(llm_node): | def test_handle_list_messages_basic(llm_node): |
| prompt_template = PromptTemplateEntity( | prompt_template = PromptTemplateEntity( | ||||
| prompt_type=PromptTemplateEntity.PromptType.ADVANCED, | prompt_type=PromptTemplateEntity.PromptType.ADVANCED, | ||||
| advanced_completion_prompt_template=AdvancedCompletionPromptTemplateEntity( | advanced_completion_prompt_template=AdvancedCompletionPromptTemplateEntity( | ||||
| prompt="You are a helpful assistant named {{name}}.\n\nContext:\n{{#context#}}\n\n" | |||||
| "Human: hi\nAssistant: ", | |||||
| prompt="You are a helpful assistant named {{name}}.\n\nContext:\n{{#context#}}\n\nHuman: hi\nAssistant: ", | |||||
| role_prefix=AdvancedCompletionPromptTemplateEntity.RolePrefixEntity(user="Human", assistant="Assistant"), | role_prefix=AdvancedCompletionPromptTemplateEntity.RolePrefixEntity(user="Human", assistant="Assistant"), | ||||
| ), | ), | ||||
| ) | ) |