Просмотр исходного кода

chore: apply ruff's pyflakes linter rules (#2420)

tags/0.5.5
Bowen Liang 1 год назад
Родитель
Сommit
14a19a3da9
Аккаунт пользователя с таким Email не найден
34 измененных файлов: 91 добавлений и 86 удалений
  1. 2
    2
      api/controllers/console/app/app.py
  2. 4
    4
      api/controllers/console/datasets/datasets.py
  3. 8
    8
      api/controllers/console/datasets/datasets_document.py
  4. 6
    6
      api/controllers/console/datasets/datasets_segments.py
  5. 2
    2
      api/controllers/console/datasets/hit_testing.py
  6. 1
    1
      api/controllers/console/explore/parameter.py
  7. 1
    1
      api/controllers/files/image_preview.py
  8. 1
    1
      api/controllers/files/tool_files.py
  9. 1
    1
      api/controllers/service_api/app/app.py
  10. 6
    6
      api/controllers/service_api/dataset/segment.py
  11. 1
    1
      api/controllers/web/app.py
  12. 1
    1
      api/core/app_runner/assistant_app_runner.py
  13. 1
    1
      api/core/app_runner/basic_app_runner.py
  14. 1
    1
      api/core/features/assistant_base_runner.py
  15. 2
    2
      api/core/features/assistant_cot_runner.py
  16. 1
    1
      api/core/features/assistant_fc_runner.py
  17. 1
    1
      api/core/index/vector_index/vector_index.py
  18. 6
    6
      api/core/model_manager.py
  19. 4
    4
      api/core/model_runtime/callbacks/logging_callback.py
  20. 1
    1
      api/core/model_runtime/model_providers/baichuan/llm/llm.py
  21. 3
    3
      api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py
  22. 2
    2
      api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/text_embedding.py
  23. 8
    8
      api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py
  24. 1
    1
      api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py
  25. 2
    2
      api/core/prompt/output_parser/rule_config_generator.py
  26. 4
    4
      api/core/tools/model/tool_model_manager.py
  27. 1
    1
      api/core/tools/provider/builtin/yahoo/tools/analytics.py
  28. 1
    1
      api/core/tools/provider/builtin/yahoo/tools/news.py
  29. 1
    1
      api/core/tools/provider/builtin/yahoo/tools/ticker.py
  30. 1
    1
      api/core/tools/tool/tool.py
  31. 1
    1
      api/libs/helper.py
  32. 7
    2
      api/pyproject.toml
  33. 4
    4
      api/services/dataset_service.py
  34. 4
    4
      api/services/tools_manage_service.py

+ 2
- 2
api/controllers/console/app/app.py Просмотреть файл



if not model_instance: if not model_instance:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Default System Reasoning Model available. Please configure "
f"in the Settings -> Model Provider.")
"No Default System Reasoning Model available. Please configure "
"in the Settings -> Model Provider.")
else: else:
model_config_dict["model"]["provider"] = model_instance.provider model_config_dict["model"]["provider"] = model_instance.provider
model_config_dict["model"]["name"] = model_instance.model model_config_dict["model"]["name"] = model_instance.model

+ 4
- 4
api/controllers/console/datasets/datasets.py Просмотреть файл

args['indexing_technique']) args['indexing_technique'])
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
elif args['info_list']['data_source_type'] == 'notion_import': elif args['info_list']['data_source_type'] == 'notion_import':
args['indexing_technique']) args['indexing_technique'])
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
else: else:

+ 8
- 8
api/controllers/console/datasets/datasets_document.py Просмотреть файл

) )
except InvokeAuthorizationError: except InvokeAuthorizationError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)


'English', dataset_id) 'English', dataset_id)
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)


'English', dataset_id) 'English', dataset_id)
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
elif dataset.data_source_type == 'notion_import': elif dataset.data_source_type == 'notion_import':
None, 'English', dataset_id) None, 'English', dataset_id)
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
else: else:

+ 6
- 6
api/controllers/console/datasets/datasets_segments.py Просмотреть файл

) )
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)


) )
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
try: try:
) )
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
# check segment # check segment

+ 2
- 2
api/controllers/console/datasets/hit_testing.py Просмотреть файл

raise ProviderModelCurrentlyNotSupportError() raise ProviderModelCurrentlyNotSupportError()
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model or Reranking Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model or Reranking Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except InvokeError as e: except InvokeError as e:
raise CompletionRequestError(e.description) raise CompletionRequestError(e.description)
except ValueError as e: except ValueError as e:

+ 1
- 1
api/controllers/console/explore/parameter.py Просмотреть файл

# get all tools # get all tools
tools = agent_config.get('tools', []) tools = agent_config.get('tools', [])
url_prefix = (current_app.config.get("CONSOLE_API_URL") url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ f"/console/api/workspaces/current/tool-provider/builtin/")
+ "/console/api/workspaces/current/tool-provider/builtin/")
for tool in tools: for tool in tools:
keys = list(tool.keys()) keys = list(tool.keys())
if len(keys) >= 4: if len(keys) >= 4:

+ 1
- 1
api/controllers/files/image_preview.py Просмотреть файл

webapp_logo_file_id = custom_config.get('replace_webapp_logo') if custom_config is not None else None webapp_logo_file_id = custom_config.get('replace_webapp_logo') if custom_config is not None else None


if not webapp_logo_file_id: if not webapp_logo_file_id:
raise NotFound(f'webapp logo is not found')
raise NotFound('webapp logo is not found')


try: try:
generator, mimetype = FileService.get_public_image_preview( generator, mimetype = FileService.get_public_image_preview(

+ 1
- 1
api/controllers/files/tool_files.py Просмотреть файл

) )


if not result: if not result:
raise NotFound(f'file is not found')
raise NotFound('file is not found')
generator, mimetype = result generator, mimetype = result
except Exception: except Exception:

+ 1
- 1
api/controllers/service_api/app/app.py Просмотреть файл

# get all tools # get all tools
tools = agent_config.get('tools', []) tools = agent_config.get('tools', [])
url_prefix = (current_app.config.get("CONSOLE_API_URL") url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ f"/console/api/workspaces/current/tool-provider/builtin/")
+ "/console/api/workspaces/current/tool-provider/builtin/")
for tool in tools: for tool in tools:
keys = list(tool.keys()) keys = list(tool.keys())
if len(keys) >= 4: if len(keys) >= 4:

+ 6
- 6
api/controllers/service_api/dataset/segment.py Просмотреть файл

) )
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
# validate args # validate args
) )
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)


) )
except LLMBadRequestError: except LLMBadRequestError:
raise ProviderNotInitializeError( raise ProviderNotInitializeError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description) raise ProviderNotInitializeError(ex.description)
# check segment # check segment

+ 1
- 1
api/controllers/web/app.py Просмотреть файл

# get all tools # get all tools
tools = agent_config.get('tools', []) tools = agent_config.get('tools', [])
url_prefix = (current_app.config.get("CONSOLE_API_URL") url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ f"/console/api/workspaces/current/tool-provider/builtin/")
+ "/console/api/workspaces/current/tool-provider/builtin/")
for tool in tools: for tool in tools:
keys = list(tool.keys()) keys = list(tool.keys())
if len(keys) >= 4: if len(keys) >= 4:

+ 1
- 1
api/core/app_runner/assistant_app_runner.py Просмотреть файл

""" """
app_record = db.session.query(App).filter(App.id == application_generate_entity.app_id).first() app_record = db.session.query(App).filter(App.id == application_generate_entity.app_id).first()
if not app_record: if not app_record:
raise ValueError(f"App not found")
raise ValueError("App not found")


app_orchestration_config = application_generate_entity.app_orchestration_config_entity app_orchestration_config = application_generate_entity.app_orchestration_config_entity



+ 1
- 1
api/core/app_runner/basic_app_runner.py Просмотреть файл

""" """
app_record = db.session.query(App).filter(App.id == application_generate_entity.app_id).first() app_record = db.session.query(App).filter(App.id == application_generate_entity.app_id).first()
if not app_record: if not app_record:
raise ValueError(f"App not found")
raise ValueError("App not found")


app_orchestration_config = application_generate_entity.app_orchestration_config_entity app_orchestration_config = application_generate_entity.app_orchestration_config_entity



+ 1
- 1
api/core/features/assistant_base_runner.py Просмотреть файл

result += f"result link: {response.message}. please tell user to check it." result += f"result link: {response.message}. please tell user to check it."
elif response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ elif response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \
response.type == ToolInvokeMessage.MessageType.IMAGE: response.type == ToolInvokeMessage.MessageType.IMAGE:
result += f"image has been created and sent to user already, you should tell user to check it now."
result += "image has been created and sent to user already, you should tell user to check it now."
else: else:
result += f"tool response: {response.message}." result += f"tool response: {response.message}."



+ 2
- 2
api/core/features/assistant_cot_runner.py Просмотреть файл



message_file_ids = [message_file.id for message_file, _ in message_files] message_file_ids = [message_file.id for message_file, _ in message_files]
except ToolProviderCredentialValidationError as e: except ToolProviderCredentialValidationError as e:
error_response = f"Please check your tool provider credentials"
error_response = "Please check your tool provider credentials"
except ( except (
ToolNotFoundError, ToolNotSupportedError, ToolProviderNotFoundError ToolNotFoundError, ToolNotSupportedError, ToolProviderNotFoundError
) as e: ) as e:
next_iteration = agent_prompt_message.next_iteration next_iteration = agent_prompt_message.next_iteration


if not isinstance(first_prompt, str) or not isinstance(next_iteration, str): if not isinstance(first_prompt, str) or not isinstance(next_iteration, str):
raise ValueError(f"first_prompt or next_iteration is required in CoT agent mode")
raise ValueError("first_prompt or next_iteration is required in CoT agent mode")
# check instruction, tools, and tool_names slots # check instruction, tools, and tool_names slots
if not first_prompt.find("{{instruction}}") >= 0: if not first_prompt.find("{{instruction}}") >= 0:

+ 1
- 1
api/core/features/assistant_fc_runner.py Просмотреть файл

message_file_ids.append(message_file.id) message_file_ids.append(message_file.id)
except ToolProviderCredentialValidationError as e: except ToolProviderCredentialValidationError as e:
error_response = f"Please check your tool provider credentials"
error_response = "Please check your tool provider credentials"
except ( except (
ToolNotFoundError, ToolNotSupportedError, ToolProviderNotFoundError ToolNotFoundError, ToolNotSupportedError, ToolProviderNotFoundError
) as e: ) as e:

+ 1
- 1
api/core/index/vector_index/vector_index.py Просмотреть файл

vector_type = self._dataset.index_struct_dict['type'] vector_type = self._dataset.index_struct_dict['type']


if not vector_type: if not vector_type:
raise ValueError(f"Vector store must be specified.")
raise ValueError("Vector store must be specified.")


if vector_type == "weaviate": if vector_type == "weaviate":
from core.index.vector_index.weaviate_vector_index import WeaviateConfig, WeaviateVectorIndex from core.index.vector_index.weaviate_vector_index import WeaviateConfig, WeaviateVectorIndex

+ 6
- 6
api/core/model_manager.py Просмотреть файл

:return: full response or stream response chunk generator result :return: full response or stream response chunk generator result
""" """
if not isinstance(self.model_type_instance, LargeLanguageModel): if not isinstance(self.model_type_instance, LargeLanguageModel):
raise Exception(f"Model type instance is not LargeLanguageModel")
raise Exception("Model type instance is not LargeLanguageModel")


self.model_type_instance = cast(LargeLanguageModel, self.model_type_instance) self.model_type_instance = cast(LargeLanguageModel, self.model_type_instance)
return self.model_type_instance.invoke( return self.model_type_instance.invoke(
:return: embeddings result :return: embeddings result
""" """
if not isinstance(self.model_type_instance, TextEmbeddingModel): if not isinstance(self.model_type_instance, TextEmbeddingModel):
raise Exception(f"Model type instance is not TextEmbeddingModel")
raise Exception("Model type instance is not TextEmbeddingModel")


self.model_type_instance = cast(TextEmbeddingModel, self.model_type_instance) self.model_type_instance = cast(TextEmbeddingModel, self.model_type_instance)
return self.model_type_instance.invoke( return self.model_type_instance.invoke(
:return: rerank result :return: rerank result
""" """
if not isinstance(self.model_type_instance, RerankModel): if not isinstance(self.model_type_instance, RerankModel):
raise Exception(f"Model type instance is not RerankModel")
raise Exception("Model type instance is not RerankModel")


self.model_type_instance = cast(RerankModel, self.model_type_instance) self.model_type_instance = cast(RerankModel, self.model_type_instance)
return self.model_type_instance.invoke( return self.model_type_instance.invoke(
:return: false if text is safe, true otherwise :return: false if text is safe, true otherwise
""" """
if not isinstance(self.model_type_instance, ModerationModel): if not isinstance(self.model_type_instance, ModerationModel):
raise Exception(f"Model type instance is not ModerationModel")
raise Exception("Model type instance is not ModerationModel")


self.model_type_instance = cast(ModerationModel, self.model_type_instance) self.model_type_instance = cast(ModerationModel, self.model_type_instance)
return self.model_type_instance.invoke( return self.model_type_instance.invoke(
:return: text for given audio file :return: text for given audio file
""" """
if not isinstance(self.model_type_instance, Speech2TextModel): if not isinstance(self.model_type_instance, Speech2TextModel):
raise Exception(f"Model type instance is not Speech2TextModel")
raise Exception("Model type instance is not Speech2TextModel")


self.model_type_instance = cast(Speech2TextModel, self.model_type_instance) self.model_type_instance = cast(Speech2TextModel, self.model_type_instance)
return self.model_type_instance.invoke( return self.model_type_instance.invoke(
:return: text for given audio file :return: text for given audio file
""" """
if not isinstance(self.model_type_instance, TTSModel): if not isinstance(self.model_type_instance, TTSModel):
raise Exception(f"Model type instance is not TTSModel")
raise Exception("Model type instance is not TTSModel")


self.model_type_instance = cast(TTSModel, self.model_type_instance) self.model_type_instance = cast(TTSModel, self.model_type_instance)
return self.model_type_instance.invoke( return self.model_type_instance.invoke(

+ 4
- 4
api/core/model_runtime/callbacks/logging_callback.py Просмотреть файл

""" """
self.print_text("\n[on_llm_before_invoke]\n", color='blue') self.print_text("\n[on_llm_before_invoke]\n", color='blue')
self.print_text(f"Model: {model}\n", color='blue') self.print_text(f"Model: {model}\n", color='blue')
self.print_text(f"Parameters:\n", color='blue')
self.print_text("Parameters:\n", color='blue')
for key, value in model_parameters.items(): for key, value in model_parameters.items():
self.print_text(f"\t{key}: {value}\n", color='blue') self.print_text(f"\t{key}: {value}\n", color='blue')


self.print_text(f"\tstop: {stop}\n", color='blue') self.print_text(f"\tstop: {stop}\n", color='blue')


if tools: if tools:
self.print_text(f"\tTools:\n", color='blue')
self.print_text("\tTools:\n", color='blue')
for tool in tools: for tool in tools:
self.print_text(f"\t\t{tool.name}\n", color='blue') self.print_text(f"\t\t{tool.name}\n", color='blue')


if user: if user:
self.print_text(f"User: {user}\n", color='blue') self.print_text(f"User: {user}\n", color='blue')


self.print_text(f"Prompt messages:\n", color='blue')
self.print_text("Prompt messages:\n", color='blue')
for prompt_message in prompt_messages: for prompt_message in prompt_messages:
if prompt_message.name: if prompt_message.name:
self.print_text(f"\tname: {prompt_message.name}\n", color='blue') self.print_text(f"\tname: {prompt_message.name}\n", color='blue')
self.print_text(f"Content: {result.message.content}\n", color='yellow') self.print_text(f"Content: {result.message.content}\n", color='yellow')


if result.message.tool_calls: if result.message.tool_calls:
self.print_text(f"Tool calls:\n", color='yellow')
self.print_text("Tool calls:\n", color='yellow')
for tool_call in result.message.tool_calls: for tool_call in result.message.tool_calls:
self.print_text(f"\t{tool_call.id}\n", color='yellow') self.print_text(f"\t{tool_call.id}\n", color='yellow')
self.print_text(f"\t{tool_call.function.name}\n", color='yellow') self.print_text(f"\t{tool_call.function.name}\n", color='yellow')

+ 1
- 1
api/core/model_runtime/model_providers/baichuan/llm/llm.py Просмотреть файл

stop: List[str] | None = None, stream: bool = True, user: str | None = None) \ stop: List[str] | None = None, stream: bool = True, user: str | None = None) \
-> LLMResult | Generator: -> LLMResult | Generator:
if tools is not None and len(tools) > 0: if tools is not None and len(tools) > 0:
raise InvokeBadRequestError(f"Baichuan model doesn't support tools")
raise InvokeBadRequestError("Baichuan model doesn't support tools")
instance = BaichuanModel( instance = BaichuanModel(
api_key=credentials['api_key'], api_key=credentials['api_key'],

+ 3
- 3
api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py Просмотреть файл

try: try:
json_result = response.json() json_result = response.json()
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
raise CredentialsValidateFailedError(f'Credentials validation failed: JSON decode error')
raise CredentialsValidateFailedError('Credentials validation failed: JSON decode error')


if (completion_type is LLMMode.CHAT if (completion_type is LLMMode.CHAT
and ('object' not in json_result or json_result['object'] != 'chat.completion')): and ('object' not in json_result or json_result['object'] != 'chat.completion')):
raise CredentialsValidateFailedError( raise CredentialsValidateFailedError(
f'Credentials validation failed: invalid response object, must be \'chat.completion\'')
'Credentials validation failed: invalid response object, must be \'chat.completion\'')
elif (completion_type is LLMMode.COMPLETION elif (completion_type is LLMMode.COMPLETION
and ('object' not in json_result or json_result['object'] != 'text_completion')): and ('object' not in json_result or json_result['object'] != 'text_completion')):
raise CredentialsValidateFailedError( raise CredentialsValidateFailedError(
f'Credentials validation failed: invalid response object, must be \'text_completion\'')
'Credentials validation failed: invalid response object, must be \'text_completion\'')
except CredentialsValidateFailedError: except CredentialsValidateFailedError:
raise raise
except Exception as ex: except Exception as ex:

+ 2
- 2
api/core/model_runtime/model_providers/openai_api_compatible/text_embedding/text_embedding.py Просмотреть файл

try: try:
json_result = response.json() json_result = response.json()
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
raise CredentialsValidateFailedError(f'Credentials validation failed: JSON decode error')
raise CredentialsValidateFailedError('Credentials validation failed: JSON decode error')


if 'model' not in json_result: if 'model' not in json_result:
raise CredentialsValidateFailedError( raise CredentialsValidateFailedError(
f'Credentials validation failed: invalid response')
'Credentials validation failed: invalid response')
except CredentialsValidateFailedError: except CredentialsValidateFailedError:
raise raise
except Exception as ex: except Exception as ex:

+ 8
- 8
api/core/model_runtime/model_providers/wenxin/llm/ernie_bot.py Просмотреть файл

# so, we just disable function calling for now. # so, we just disable function calling for now.


if tools is not None and len(tools) > 0: if tools is not None and len(tools) > 0:
raise BadRequestError(f'function calling is not supported yet.')
raise BadRequestError('function calling is not supported yet.')


if stop is not None: if stop is not None:
if len(stop) > 4: if len(stop) > 4:
raise BadRequestError(f'stop list should not exceed 4 items.')
raise BadRequestError('stop list should not exceed 4 items.')


for s in stop: for s in stop:
if len(s) > 20: if len(s) > 20:
raise BadRequestError(f'stop item should not exceed 20 characters.')
raise BadRequestError('stop item should not exceed 20 characters.')
def _build_request_body(self, model: str, messages: List[ErnieMessage], stream: bool, parameters: Dict[str, Any], def _build_request_body(self, model: str, messages: List[ErnieMessage], stream: bool, parameters: Dict[str, Any],
tools: List[PromptMessageTool], stop: List[str], user: str) -> Dict[str, Any]: tools: List[PromptMessageTool], stop: List[str], user: str) -> Dict[str, Any]:
stop: List[str], user: str) \ stop: List[str], user: str) \
-> Dict[str, Any]: -> Dict[str, Any]:
if len(messages) % 2 == 0: if len(messages) % 2 == 0:
raise BadRequestError(f'The number of messages should be odd.')
raise BadRequestError('The number of messages should be odd.')
if messages[0].role == 'function': if messages[0].role == 'function':
raise BadRequestError(f'The first message should be user message.')
raise BadRequestError('The first message should be user message.')
""" """
TODO: implement function calling TODO: implement function calling
parameters: Dict[str, Any], stop: List[str], user: str) \ parameters: Dict[str, Any], stop: List[str], user: str) \
-> Dict[str, Any]: -> Dict[str, Any]:
if len(messages) == 0: if len(messages) == 0:
raise BadRequestError(f'The number of messages should not be zero.')
raise BadRequestError('The number of messages should not be zero.')
# check if the first element is system, shift it # check if the first element is system, shift it
system_message = '' system_message = ''
system_message = message.content system_message = message.content


if len(messages) % 2 == 0: if len(messages) % 2 == 0:
raise BadRequestError(f'The number of messages should be odd.')
raise BadRequestError('The number of messages should be odd.')
if messages[0].role != 'user': if messages[0].role != 'user':
raise BadRequestError(f'The first message should be user message.')
raise BadRequestError('The first message should be user message.')
body = { body = {
'messages': [message.to_dict() for message in messages], 'messages': [message.to_dict() for message in messages],
'stream': stream, 'stream': stream,

+ 1
- 1
api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/_client.py Просмотреть файл

if base_url is None: if base_url is None:
base_url = os.environ.get("ZHIPUAI_BASE_URL") base_url = os.environ.get("ZHIPUAI_BASE_URL")
if base_url is None: if base_url is None:
base_url = f"https://open.bigmodel.cn/api/paas/v4"
base_url = "https://open.bigmodel.cn/api/paas/v4"
from .__version__ import __version__ from .__version__ import __version__
super().__init__( super().__init__(
version=__version__, version=__version__,

+ 2
- 2
api/core/prompt/output_parser/rule_config_generator.py Просмотреть файл

raise ValueError("Expected 'prompt' to be a string.") raise ValueError("Expected 'prompt' to be a string.")
if not isinstance(parsed["variables"], list): if not isinstance(parsed["variables"], list):
raise ValueError( raise ValueError(
f"Expected 'variables' to be a list."
"Expected 'variables' to be a list."
) )
if not isinstance(parsed["opening_statement"], str): if not isinstance(parsed["opening_statement"], str):
raise ValueError( raise ValueError(
f"Expected 'opening_statement' to be a str."
"Expected 'opening_statement' to be a str."
) )
return parsed return parsed
except Exception as e: except Exception as e:

+ 4
- 4
api/core/tools/model/tool_model_manager.py Просмотреть файл

) )


if not model_instance: if not model_instance:
raise InvokeModelError(f'Model not found')
raise InvokeModelError('Model not found')
llm_model = cast(LargeLanguageModel, model_instance.model_type_instance) llm_model = cast(LargeLanguageModel, model_instance.model_type_instance)
schema = llm_model.get_model_schema(model_instance.model, model_instance.credentials) schema = llm_model.get_model_schema(model_instance.model, model_instance.credentials)


if not schema: if not schema:
raise InvokeModelError(f'No model schema found')
raise InvokeModelError('No model schema found')


max_tokens = schema.model_properties.get(ModelPropertyKey.CONTEXT_SIZE, None) max_tokens = schema.model_properties.get(ModelPropertyKey.CONTEXT_SIZE, None)
if max_tokens is None: if max_tokens is None:
) )


if not model_instance: if not model_instance:
raise InvokeModelError(f'Model not found')
raise InvokeModelError('Model not found')
llm_model = cast(LargeLanguageModel, model_instance.model_type_instance) llm_model = cast(LargeLanguageModel, model_instance.model_type_instance)


except InvokeConnectionError as e: except InvokeConnectionError as e:
raise InvokeModelError(f'Invoke connection error: {e}') raise InvokeModelError(f'Invoke connection error: {e}')
except InvokeAuthorizationError as e: except InvokeAuthorizationError as e:
raise InvokeModelError(f'Invoke authorization error')
raise InvokeModelError('Invoke authorization error')
except InvokeServerUnavailableError as e: except InvokeServerUnavailableError as e:
raise InvokeModelError(f'Invoke server unavailable error: {e}') raise InvokeModelError(f'Invoke server unavailable error: {e}')
except Exception as e: except Exception as e:

+ 1
- 1
api/core/tools/provider/builtin/yahoo/tools/analytics.py Просмотреть файл

try: try:
return self.create_text_message(str(summary_df.to_dict())) return self.create_text_message(str(summary_df.to_dict()))
except (HTTPError, ReadTimeout): except (HTTPError, ReadTimeout):
return self.create_text_message(f'There is a internet connection problem. Please try again later.')
return self.create_text_message('There is a internet connection problem. Please try again later.')

+ 1
- 1
api/core/tools/provider/builtin/yahoo/tools/news.py Просмотреть файл

try: try:
return self.run(ticker=query, user_id=user_id) return self.run(ticker=query, user_id=user_id)
except (HTTPError, ReadTimeout): except (HTTPError, ReadTimeout):
return self.create_text_message(f'There is a internet connection problem. Please try again later.')
return self.create_text_message('There is a internet connection problem. Please try again later.')


def run(self, ticker: str, user_id: str) -> ToolInvokeMessage: def run(self, ticker: str, user_id: str) -> ToolInvokeMessage:
company = yfinance.Ticker(ticker) company = yfinance.Ticker(ticker)

+ 1
- 1
api/core/tools/provider/builtin/yahoo/tools/ticker.py Просмотреть файл

try: try:
return self.create_text_message(self.run(ticker=query)) return self.create_text_message(self.run(ticker=query))
except (HTTPError, ReadTimeout): except (HTTPError, ReadTimeout):
return self.create_text_message(f'There is a internet connection problem. Please try again later.')
return self.create_text_message('There is a internet connection problem. Please try again later.')
def run(self, ticker: str) -> str: def run(self, ticker: str) -> str:
return str(Ticker(ticker).info) return str(Ticker(ticker).info)

+ 1
- 1
api/core/tools/tool/tool.py Просмотреть файл

result += f"result link: {response.message}. please tell user to check it." result += f"result link: {response.message}. please tell user to check it."
elif response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \ elif response.type == ToolInvokeMessage.MessageType.IMAGE_LINK or \
response.type == ToolInvokeMessage.MessageType.IMAGE: response.type == ToolInvokeMessage.MessageType.IMAGE:
result += f"image has been created and sent to user already, you should tell user to check it now."
result += "image has been created and sent to user already, you should tell user to check it now."
elif response.type == ToolInvokeMessage.MessageType.BLOB: elif response.type == ToolInvokeMessage.MessageType.BLOB:
if len(response.message) > 114: if len(response.message) > 114:
result += str(response.message[:114]) + '...' result += str(response.message[:114]) + '...'

+ 1
- 1
api/libs/helper.py Просмотреть файл

datetime.strptime(value, self.format) datetime.strptime(value, self.format)
except ValueError: except ValueError:
error = ('Invalid {arg}: {val}. {arg} must be conform to the format {format}' error = ('Invalid {arg}: {val}. {arg} must be conform to the format {format}'
.format(arg=self.argument, val=value, lo=self.format))
.format(arg=self.argument, val=value, format=self.format))
raise ValueError(error) raise ValueError(error)


return value return value

+ 7
- 2
api/pyproject.toml Просмотреть файл

[tool.ruff.lint] [tool.ruff.lint]
ignore-init-module-imports = true ignore-init-module-imports = true
select = [ select = [
"F401", # unused-import
"F", # pyflakes rules
"I001", # unsorted-imports "I001", # unsorted-imports
"I002", # missing-required-import "I002", # missing-required-import
"F811", # redefined-while-unused
]
ignore = [
"F403", # undefined-local-with-import-star
"F405", # undefined-local-with-import-star-usage
"F821", # undefined-name
"F841", # unused-variable
] ]

+ 4
- 4
api/services/dataset_service.py Просмотреть файл

) )
except LLMBadRequestError: except LLMBadRequestError:
raise ValueError( raise ValueError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ValueError(f"The dataset in unavailable, due to: " raise ValueError(f"The dataset in unavailable, due to: "
f"{ex.description}") f"{ex.description}")
filtered_data['collection_binding_id'] = dataset_collection_binding.id filtered_data['collection_binding_id'] = dataset_collection_binding.id
except LLMBadRequestError: except LLMBadRequestError:
raise ValueError( raise ValueError(
f"No Embedding Model available. Please configure a valid provider "
f"in the Settings -> Model Provider.")
"No Embedding Model available. Please configure a valid provider "
"in the Settings -> Model Provider.")
except ProviderTokenNotInitError as ex: except ProviderTokenNotInitError as ex:
raise ValueError(ex.description) raise ValueError(ex.description)



+ 4
- 4
api/services/tools_manage_service.py Просмотреть файл

:param provider: the provider dict :param provider: the provider dict
""" """
url_prefix = (current_app.config.get("CONSOLE_API_URL") url_prefix = (current_app.config.get("CONSOLE_API_URL")
+ f"/console/api/workspaces/current/tool-provider/builtin/")
+ "/console/api/workspaces/current/tool-provider/builtin/")
if 'icon' in provider: if 'icon' in provider:
if provider['type'] == UserToolProvider.ProviderType.BUILTIN.value: if provider['type'] == UserToolProvider.ProviderType.BUILTIN.value:
tool_bundles, schema_type = ToolManageService.convert_schema_to_tool_bundles(schema, extra_info) tool_bundles, schema_type = ToolManageService.convert_schema_to_tool_bundles(schema, extra_info)
if len(tool_bundles) > 10: if len(tool_bundles) > 10:
raise ValueError(f'the number of apis should be less than 10')
raise ValueError('the number of apis should be less than 10')


# create db provider # create db provider
db_provider = ApiToolProvider( db_provider = ApiToolProvider(
# try to parse schema, avoid SSRF attack # try to parse schema, avoid SSRF attack
ToolManageService.parser_api_schema(schema) ToolManageService.parser_api_schema(schema)
except Exception as e: except Exception as e:
raise ValueError(f'invalid schema, please check the url you provided')
raise ValueError('invalid schema, please check the url you provided')
return { return {
'schema': schema 'schema': schema
try: try:
tool_bundles, _ = ApiBasedToolSchemaParser.auto_parse_to_tool_bundle(schema) tool_bundles, _ = ApiBasedToolSchemaParser.auto_parse_to_tool_bundle(schema)
except Exception as e: except Exception as e:
raise ValueError(f'invalid schema')
raise ValueError('invalid schema')
# get tool bundle # get tool bundle
tool_bundle = next(filter(lambda tb: tb.operation_id == tool_name, tool_bundles), None) tool_bundle = next(filter(lambda tb: tb.operation_id == tool_name, tool_bundles), None)

Загрузка…
Отмена
Сохранить