Переглянути джерело

[Chore/Refactor] Switch from MyPy to Basedpyright for type checking (#25047)

Signed-off-by: -LAN- <laipz8200@outlook.com>
tags/1.8.1
-LAN- 1 місяць тому
джерело
коміт
9d5956cef8
Аккаунт користувача з таким Email не знайдено
84 змінених файлів з 2414 додано та 2385 видалено
  1. 2
    8
      .github/workflows/api-tests.yml
  2. 1
    2
      .gitignore
  3. 1
    1
      CLAUDE.md
  4. 1
    1
      api/README.md
  5. 1
    1
      api/commands.py
  6. 1
    1
      api/configs/remote_settings_sources/nacos/__init__.py
  7. 1
    1
      api/configs/remote_settings_sources/nacos/http_request.py
  8. 4
    4
      api/controllers/console/auth/login.py
  9. 1
    1
      api/controllers/service_api/app/file_preview.py
  10. 1
    1
      api/controllers/service_api/dataset/document.py
  11. 1
    1
      api/core/app/app_config/features/more_like_this/manager.py
  12. 15
    22
      api/core/app/apps/advanced_chat/generate_task_pipeline.py
  13. 1
    1
      api/core/app/apps/base_app_queue_manager.py
  14. 9
    10
      api/core/app/apps/workflow/generate_task_pipeline.py
  15. 2
    2
      api/core/app/entities/app_invoke_entities.py
  16. 1
    1
      api/core/app/task_pipeline/message_cycle_manager.py
  17. 1
    1
      api/core/callback_handler/index_tool_callback_handler.py
  18. 2
    2
      api/core/extension/extensible.py
  19. 0
    6
      api/core/extension/extension.py
  20. 0
    1
      api/core/external_data_tool/factory.py
  21. 1
    1
      api/core/helper/marketplace.py
  22. 1
    2
      api/core/indexing_runner.py
  23. 1
    1
      api/core/llm_generator/llm_generator.py
  24. 2
    2
      api/core/mcp/auth/auth_flow.py
  25. 2
    3
      api/core/mcp/mcp_client.py
  26. 0
    1
      api/core/moderation/factory.py
  27. 1
    1
      api/core/moderation/output_moderation.py
  28. 2
    2
      api/core/ops/ops_trace_manager.py
  29. 2
    2
      api/core/plugin/utils/chunk_merger.py
  30. 2
    2
      api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py
  31. 6
    6
      api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py
  32. 3
    3
      api/core/rag/datasource/vdb/couchbase/couchbase_vector.py
  33. 1
    1
      api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
  34. 1
    1
      api/core/rag/datasource/vdb/opensearch/opensearch_vector.py
  35. 1
    1
      api/core/rag/datasource/vdb/tablestore/tablestore_vector.py
  36. 1
    1
      api/core/rag/extractor/unstructured/unstructured_doc_extractor.py
  37. 2
    2
      api/core/rag/index_processor/processor/qa_index_processor.py
  38. 5
    7
      api/core/rag/retrieval/dataset_retrieval.py
  39. 1
    1
      api/core/rag/retrieval/output_parser/structured_chat.py
  40. 1
    1
      api/core/rag/retrieval/router/multi_dataset_function_call_router.py
  41. 2
    2
      api/core/rag/retrieval/router/multi_dataset_react_route.py
  42. 1
    1
      api/core/repositories/celery_workflow_execution_repository.py
  43. 2
    2
      api/core/repositories/celery_workflow_node_execution_repository.py
  44. 1
    1
      api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
  45. 1
    1
      api/core/tools/custom_tool/provider.py
  46. 1
    1
      api/core/tools/entities/values.py
  47. 7
    10
      api/core/tools/tool_manager.py
  48. 1
    1
      api/core/workflow/nodes/answer/answer_stream_processor.py
  49. 3
    3
      api/core/workflow/nodes/document_extractor/node.py
  50. 1
    1
      api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py
  51. 1
    1
      api/core/workflow/nodes/loop/loop_node.py
  52. 0
    4
      api/core/workflow/nodes/parameter_extractor/entities.py
  53. 1
    1
      api/core/workflow/nodes/variable_assigner/common/helpers.py
  54. 27
    9
      api/events/event_handlers/__init__.py
  55. 1
    1
      api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py
  56. 1
    1
      api/events/event_handlers/update_provider_when_message_created.py
  57. 1
    1
      api/extensions/ext_sentry.py
  58. 5
    5
      api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py
  59. 11
    11
      api/extensions/storage/clickzetta_volume/file_lifecycle.py
  60. 7
    7
      api/extensions/storage/clickzetta_volume/volume_permissions.py
  61. 1
    1
      api/libs/helper.py
  62. 3
    3
      api/libs/sendgrid.py
  63. 3
    3
      api/libs/smtp.py
  64. 1
    1
      api/models/dataset.py
  65. 1
    2
      api/pyproject.toml
  66. 48
    0
      api/pyrightconfig.json
  67. 4
    4
      api/repositories/sqlalchemy_api_workflow_run_repository.py
  68. 2
    2
      api/schedule/clean_workflow_runlogs_precise.py
  69. 2
    2
      api/schedule/queue_monitor_task.py
  70. 1
    1
      api/services/annotation_service.py
  71. 1
    1
      api/services/app_generate_service.py
  72. 2
    2
      api/services/app_service.py
  73. 1
    1
      api/services/external_knowledge_service.py
  74. 1
    1
      api/services/plugin/data_migration.py
  75. 1
    1
      api/services/tools/tools_transform_service.py
  76. 1
    1
      api/tasks/annotation/delete_annotation_index_task.py
  77. 1
    1
      api/tasks/batch_create_segment_to_index_task.py
  78. 2
    2
      api/tasks/clean_dataset_task.py
  79. 1
    1
      api/tasks/delete_account_task.py
  80. 1
    1
      api/tasks/process_tenant_plugin_autoupgrade_check_task.py
  81. 2168
    2178
      api/uv.lock
  82. 9
    0
      dev/basedpyright-check
  83. 2
    2
      dev/reformat
  84. 0
    1
      web/.husky/pre-commit

+ 2
- 8
.github/workflows/api-tests.yml Переглянути файл

@@ -62,14 +62,8 @@ jobs:
- name: Run dify config tests
run: uv run --project api dev/pytest/pytest_config_tests.py

- name: MyPy Cache
uses: actions/cache@v4
with:
path: api/.mypy_cache
key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/uv.lock') }}

- name: Run MyPy Checks
run: dev/mypy-check
- name: Run Basedpyright Checks
run: dev/basedpyright-check

- name: Set up dotenvs
run: |

+ 1
- 2
.gitignore Переглянути файл

@@ -123,7 +123,7 @@ venv.bak/
# mkdocs documentation
/site

# mypy
# type checking
.mypy_cache/
.dmypy.json
dmypy.json
@@ -195,7 +195,6 @@ sdks/python-client/dify_client.egg-info
.vscode/*
!.vscode/launch.json.template
!.vscode/README.md
pyrightconfig.json
api/.vscode
# vscode Code History Extension
.history

+ 1
- 1
CLAUDE.md Переглянути файл

@@ -32,7 +32,7 @@ uv run --project api pytest tests/integration_tests/ # Integration tests
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --project api mypy . # Type checking
uv run --directory api basedpyright # Type checking
```

### Frontend (Web)

+ 1
- 1
api/README.md Переглянути файл

@@ -108,5 +108,5 @@ uv run celery -A app.celery beat
../dev/reformat # Run all formatters and linters
uv run ruff check --fix ./ # Fix linting issues
uv run ruff format ./ # Format code
uv run mypy . # Type checking
uv run basedpyright . # Type checking
```

+ 1
- 1
api/commands.py Переглянути файл

@@ -571,7 +571,7 @@ def old_metadata_migration():
for document in documents:
if document.doc_metadata:
doc_metadata = document.doc_metadata
for key, value in doc_metadata.items():
for key in doc_metadata:
for field in BuiltInField:
if field.value == key:
break

+ 1
- 1
api/configs/remote_settings_sources/nacos/__init__.py Переглянути файл

@@ -29,7 +29,7 @@ class NacosSettingsSource(RemoteSettingsSource):
try:
content = NacosHttpClient().http_request("/nacos/v1/cs/configs", method="GET", headers={}, params=params)
self.remote_configs = self._parse_config(content)
except Exception as e:
except Exception:
logger.exception("[get-access-token] exception occurred")
raise


+ 1
- 1
api/configs/remote_settings_sources/nacos/http_request.py Переглянути файл

@@ -77,6 +77,6 @@ class NacosHttpClient:
self.token = response_data.get("accessToken")
self.token_ttl = response_data.get("tokenTtl", 18000)
self.token_expire_time = current_time + self.token_ttl - 10
except Exception as e:
except Exception:
logger.exception("[get-access-token] exception occur")
raise

+ 4
- 4
api/controllers/console/auth/login.py Переглянути файл

@@ -130,7 +130,7 @@ class ResetPasswordSendEmailApi(Resource):
language = "en-US"
try:
account = AccountService.get_user_through_email(args["email"])
except AccountRegisterError as are:
except AccountRegisterError:
raise AccountInFreezeError()

if account is None:
@@ -162,7 +162,7 @@ class EmailCodeLoginSendEmailApi(Resource):
language = "en-US"
try:
account = AccountService.get_user_through_email(args["email"])
except AccountRegisterError as are:
except AccountRegisterError:
raise AccountInFreezeError()

if account is None:
@@ -200,7 +200,7 @@ class EmailCodeLoginApi(Resource):
AccountService.revoke_email_code_login_token(args["token"])
try:
account = AccountService.get_user_through_email(user_email)
except AccountRegisterError as are:
except AccountRegisterError:
raise AccountInFreezeError()
if account:
tenants = TenantService.get_join_tenants(account)
@@ -223,7 +223,7 @@ class EmailCodeLoginApi(Resource):
)
except WorkSpaceNotAllowedCreateError:
raise NotAllowedCreateWorkspace()
except AccountRegisterError as are:
except AccountRegisterError:
raise AccountInFreezeError()
except WorkspacesLimitExceededError:
raise WorkspacesLimitExceeded()

+ 1
- 1
api/controllers/service_api/app/file_preview.py Переглянути файл

@@ -59,7 +59,7 @@ class FilePreviewApi(Resource):
args = file_preview_parser.parse_args()

# Validate file ownership and get file objects
message_file, upload_file = self._validate_file_ownership(file_id, app_model.id)
_, upload_file = self._validate_file_ownership(file_id, app_model.id)

# Get file content generator
try:

+ 1
- 1
api/controllers/service_api/dataset/document.py Переглянути файл

@@ -410,7 +410,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
DocumentService.document_create_args_validate(knowledge_config)

try:
documents, batch = DocumentService.save_document_with_dataset_id(
documents, _ = DocumentService.save_document_with_dataset_id(
dataset=dataset,
knowledge_config=knowledge_config,
account=dataset.created_by_account,

+ 1
- 1
api/core/app/app_config/features/more_like_this/manager.py Переглянути файл

@@ -26,7 +26,7 @@ class MoreLikeThisConfigManager:
def validate_and_set_defaults(cls, config: dict) -> tuple[dict, list[str]]:
try:
return AppConfigModel.model_validate(config).model_dump(), ["more_like_this"]
except ValidationError as e:
except ValidationError:
raise ValueError(
"more_like_this must be of dict type and enabled in more_like_this must be of boolean type"
)

+ 15
- 22
api/core/app/apps/advanced_chat/generate_task_pipeline.py Переглянути файл

@@ -310,13 +310,8 @@ class AdvancedChatAppGenerateTaskPipeline:
err = self._base_task_pipeline._handle_error(event=event, session=session, message_id=self._message_id)
yield self._base_task_pipeline._error_to_stream_response(err)

def _handle_workflow_started_event(
self, event: QueueWorkflowStartedEvent, *, graph_runtime_state: Optional[GraphRuntimeState] = None, **kwargs
) -> Generator[StreamResponse, None, None]:
def _handle_workflow_started_event(self, **kwargs) -> Generator[StreamResponse, None, None]:
"""Handle workflow started events."""
# Override graph runtime state - this is a side effect but necessary
graph_runtime_state = event.graph_runtime_state

with self._database_session() as session:
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start()
self._workflow_run_id = workflow_execution.id_
@@ -337,15 +332,14 @@ class AdvancedChatAppGenerateTaskPipeline:
"""Handle node retry events."""
self._ensure_workflow_initialized()

with self._database_session() as session:
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
workflow_execution_id=self._workflow_run_id, event=event
)
node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response(
event=event,
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
workflow_execution_id=self._workflow_run_id, event=event
)
node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response(
event=event,
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)

if node_retry_resp:
yield node_retry_resp
@@ -379,13 +373,12 @@ class AdvancedChatAppGenerateTaskPipeline:
self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {})
)

with self._database_session() as session:
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(event=event)
node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response(
event=event,
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(event=event)
node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response(
event=event,
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)

self._save_output_for_event(event, workflow_node_execution.id)


+ 1
- 1
api/core/app/apps/base_app_queue_manager.py Переглянути файл

@@ -159,7 +159,7 @@ class AppQueueManager:
def _check_for_sqlalchemy_models(self, data: Any):
# from entity to dict or list
if isinstance(data, dict):
for key, value in data.items():
for value in data.values():
self._check_for_sqlalchemy_models(value)
elif isinstance(data, list):
for item in data:

+ 9
- 10
api/core/app/apps/workflow/generate_task_pipeline.py Переглянути файл

@@ -300,16 +300,15 @@ class WorkflowAppGenerateTaskPipeline:
"""Handle node retry events."""
self._ensure_workflow_initialized()

with self._database_session() as session:
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
workflow_execution_id=self._workflow_run_id,
event=event,
)
response = self._workflow_response_converter.workflow_node_retry_to_stream_response(
event=event,
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
workflow_execution_id=self._workflow_run_id,
event=event,
)
response = self._workflow_response_converter.workflow_node_retry_to_stream_response(
event=event,
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)

if response:
yield response

+ 2
- 2
api/core/app/entities/app_invoke_entities.py Переглянути файл

@@ -1,5 +1,5 @@
from collections.abc import Mapping, Sequence
from enum import Enum
from enum import StrEnum
from typing import Any, Optional

from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
@@ -11,7 +11,7 @@ from core.file import File, FileUploadConfig
from core.model_runtime.entities.model_entities import AIModelEntity


class InvokeFrom(Enum):
class InvokeFrom(StrEnum):
"""
Invoke From.
"""

+ 1
- 1
api/core/app/task_pipeline/message_cycle_manager.py Переглянути файл

@@ -101,7 +101,7 @@ class MessageCycleManager:
try:
name = LLMGenerator.generate_conversation_name(app_model.tenant_id, query)
conversation.name = name
except Exception as e:
except Exception:
if dify_config.DEBUG:
logger.exception("generate conversation name failed, conversation_id: %s", conversation_id)
pass

+ 1
- 1
api/core/callback_handler/index_tool_callback_handler.py Переглянути файл

@@ -67,7 +67,7 @@ class DatasetIndexToolCallbackHandler:
)
child_chunk = db.session.scalar(child_chunk_stmt)
if child_chunk:
segment = (
_ = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == child_chunk.segment_id)
.update(

+ 2
- 2
api/core/extension/extensible.py Переглянути файл

@@ -91,7 +91,7 @@ class Extensible:

# Find extension class
extension_class = None
for name, obj in vars(mod).items():
for obj in vars(mod).values():
if isinstance(obj, type) and issubclass(obj, cls) and obj != cls:
extension_class = obj
break
@@ -123,7 +123,7 @@ class Extensible:
)
)

except Exception as e:
except Exception:
logger.exception("Error scanning extensions")
raise


+ 0
- 6
api/core/extension/extension.py Переглянути файл

@@ -41,9 +41,3 @@ class Extension:
assert module_extension.extension_class is not None
t: type = module_extension.extension_class
return t

def validate_form_schema(self, module: ExtensionModule, extension_name: str, config: dict) -> None:
module_extension = self.module_extension(module, extension_name)
form_schema = module_extension.form_schema

# TODO validate form_schema

+ 0
- 1
api/core/external_data_tool/factory.py Переглянути файл

@@ -22,7 +22,6 @@ class ExternalDataToolFactory:
:param config: the form config data
:return:
"""
code_based_extension.validate_form_schema(ExtensionModule.EXTERNAL_DATA_TOOL, name, config)
extension_class = code_based_extension.extension_class(ExtensionModule.EXTERNAL_DATA_TOOL, name)
# FIXME mypy issue here, figure out how to fix it
extension_class.validate_config(tenant_id, config) # type: ignore

+ 1
- 1
api/core/helper/marketplace.py Переглянути файл

@@ -42,7 +42,7 @@ def batch_fetch_plugin_manifests_ignore_deserialization_error(
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration(**plugin))
except Exception as e:
except Exception:
pass

return result

+ 1
- 2
api/core/indexing_runner.py Переглянути файл

@@ -5,7 +5,7 @@ import re
import threading
import time
import uuid
from typing import Any, Optional, cast
from typing import Any, Optional

from flask import current_app
from sqlalchemy import select
@@ -397,7 +397,6 @@ class IndexingRunner:
)

# replace doc id to document model id
text_docs = cast(list[Document], text_docs)
for text_doc in text_docs:
if text_doc.metadata is not None:
text_doc.metadata["document_id"] = dataset_document.id

+ 1
- 1
api/core/llm_generator/llm_generator.py Переглянути файл

@@ -66,7 +66,7 @@ class LLMGenerator:
try:
result_dict = json.loads(cleaned_answer)
answer = result_dict["Your Output"]
except json.JSONDecodeError as e:
except json.JSONDecodeError:
logger.exception("Failed to generate name after answer, use query instead")
answer = query
name = answer.strip()

+ 2
- 2
api/core/mcp/auth/auth_flow.py Переглянути файл

@@ -101,7 +101,7 @@ def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackSta

def check_support_resource_discovery(server_url: str) -> tuple[bool, str]:
"""Check if the server supports OAuth 2.0 Resource Discovery."""
b_scheme, b_netloc, b_path, b_params, b_query, b_fragment = urlparse(server_url, "", True)
b_scheme, b_netloc, b_path, _, b_query, b_fragment = urlparse(server_url, "", True)
url_for_resource_discovery = f"{b_scheme}://{b_netloc}/.well-known/oauth-protected-resource{b_path}"
if b_query:
url_for_resource_discovery += f"?{b_query}"
@@ -117,7 +117,7 @@ def check_support_resource_discovery(server_url: str) -> tuple[bool, str]:
else:
return False, ""
return False, ""
except httpx.RequestError as e:
except httpx.RequestError:
# Not support resource discovery, fall back to well-known OAuth metadata
return False, ""


+ 2
- 3
api/core/mcp/mcp_client.py Переглянути файл

@@ -2,7 +2,7 @@ import logging
from collections.abc import Callable
from contextlib import AbstractContextManager, ExitStack
from types import TracebackType
from typing import Any, Optional, cast
from typing import Any, Optional
from urllib.parse import urlparse

from core.mcp.client.sse_client import sse_client
@@ -116,8 +116,7 @@ class MCPClient:

self._session_context = ClientSession(*streams)
self._session = self._exit_stack.enter_context(self._session_context)
session = cast(ClientSession, self._session)
session.initialize()
self._session.initialize()
return

except MCPAuthError:

+ 0
- 1
api/core/moderation/factory.py Переглянути файл

@@ -20,7 +20,6 @@ class ModerationFactory:
:param config: the form config data
:return:
"""
code_based_extension.validate_form_schema(ExtensionModule.MODERATION, name, config)
extension_class = code_based_extension.extension_class(ExtensionModule.MODERATION, name)
# FIXME: mypy error, try to fix it instead of using type: ignore
extension_class.validate_config(tenant_id, config) # type: ignore

+ 1
- 1
api/core/moderation/output_moderation.py Переглянути файл

@@ -135,7 +135,7 @@ class OutputModeration(BaseModel):

result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer)
return result
except Exception as e:
except Exception:
logger.exception("Moderation Output error, app_id: %s", app_id)

return None

+ 2
- 2
api/core/ops/ops_trace_manager.py Переглянути файл

@@ -849,7 +849,7 @@ class TraceQueueManager:
if self.trace_instance:
trace_task.app_id = self.app_id
trace_manager_queue.put(trace_task)
except Exception as e:
except Exception:
logger.exception("Error adding trace task, trace_type %s", trace_task.trace_type)
finally:
self.start_timer()
@@ -868,7 +868,7 @@ class TraceQueueManager:
tasks = self.collect_tasks()
if tasks:
self.send_to_celery(tasks)
except Exception as e:
except Exception:
logger.exception("Error processing trace tasks")

def start_timer(self):

+ 2
- 2
api/core/plugin/utils/chunk_merger.py Переглянути файл

@@ -1,6 +1,6 @@
from collections.abc import Generator
from dataclasses import dataclass, field
from typing import TypeVar, Union, cast
from typing import TypeVar, Union

from core.agent.entities import AgentInvokeMessage
from core.tools.entities.tool_entities import ToolInvokeMessage
@@ -85,7 +85,7 @@ def merge_blob_chunks(
message=ToolInvokeMessage.BlobMessage(blob=files[chunk_id].data[: files[chunk_id].bytes_written]),
meta=resp.meta,
)
yield cast(MessageType, merged_message)
yield merged_message
# Clean up the buffer
del files[chunk_id]
else:

+ 2
- 2
api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py Переглянути файл

@@ -228,7 +228,7 @@ class AnalyticdbVectorBySql:
)
documents = []
for record in cur:
id, vector, score, page_content, metadata = record
_, vector, score, page_content, metadata = record
if score >= score_threshold:
metadata["score"] = score
doc = Document(
@@ -260,7 +260,7 @@ class AnalyticdbVectorBySql:
)
documents = []
for record in cur:
id, vector, page_content, metadata, score = record
_, vector, page_content, metadata, score = record
metadata["score"] = score
doc = Document(
page_content=page_content,

+ 6
- 6
api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py Переглянути файл

@@ -701,7 +701,7 @@ class ClickzettaVector(BaseVector):
len(data_rows),
vector_dimension,
)
except (RuntimeError, ValueError, TypeError, ConnectionError) as e:
except (RuntimeError, ValueError, TypeError, ConnectionError):
logger.exception("Parameterized SQL execution failed for %d documents", len(data_rows))
logger.exception("SQL template: %s", insert_sql)
logger.exception("Sample data row: %s", data_rows[0] if data_rows else "None")
@@ -787,7 +787,7 @@ class ClickzettaVector(BaseVector):
document_ids_filter = kwargs.get("document_ids_filter")

# Handle filter parameter from canvas (workflow)
filter_param = kwargs.get("filter", {})
_ = kwargs.get("filter", {})

# Build filter clause
filter_clauses = []
@@ -879,7 +879,7 @@ class ClickzettaVector(BaseVector):
document_ids_filter = kwargs.get("document_ids_filter")

# Handle filter parameter from canvas (workflow)
filter_param = kwargs.get("filter", {})
_ = kwargs.get("filter", {})

# Build filter clause
filter_clauses = []
@@ -938,7 +938,7 @@ class ClickzettaVector(BaseVector):
metadata = {}
else:
metadata = {}
except (json.JSONDecodeError, TypeError) as e:
except (json.JSONDecodeError, TypeError):
logger.exception("JSON parsing failed")
# Fallback: extract document_id with regex

@@ -956,7 +956,7 @@ class ClickzettaVector(BaseVector):
metadata["score"] = 1.0 # Clickzetta doesn't provide relevance scores
doc = Document(page_content=row[1], metadata=metadata)
documents.append(doc)
except (RuntimeError, ValueError, TypeError, ConnectionError) as e:
except (RuntimeError, ValueError, TypeError, ConnectionError):
logger.exception("Full-text search failed")
# Fallback to LIKE search if full-text search fails
return self._search_by_like(query, **kwargs)
@@ -978,7 +978,7 @@ class ClickzettaVector(BaseVector):
document_ids_filter = kwargs.get("document_ids_filter")

# Handle filter parameter from canvas (workflow)
filter_param = kwargs.get("filter", {})
_ = kwargs.get("filter", {})

# Build filter clause
filter_clauses = []

+ 3
- 3
api/core/rag/datasource/vdb/couchbase/couchbase_vector.py Переглянути файл

@@ -212,10 +212,10 @@ class CouchbaseVector(BaseVector):

documents_to_insert = [
{"text": text, "embedding": vector, "metadata": metadata}
for id, text, vector, metadata in zip(uuids, texts, embeddings, metadatas)
for _, text, vector, metadata in zip(uuids, texts, embeddings, metadatas)
]
for doc, id in zip(documents_to_insert, uuids):
result = self._scope.collection(self._collection_name).upsert(id, doc)
_ = self._scope.collection(self._collection_name).upsert(id, doc)

doc_ids.extend(uuids)

@@ -241,7 +241,7 @@ class CouchbaseVector(BaseVector):
"""
try:
self._cluster.query(query, named_parameters={"doc_ids": ids}).execute()
except Exception as e:
except Exception:
logger.exception("Failed to delete documents, ids: %s", ids)

def delete_by_document_id(self, document_id: str):

+ 1
- 1
api/core/rag/datasource/vdb/matrixone/matrixone_vector.py Переглянути файл

@@ -99,7 +99,7 @@ class MatrixoneVector(BaseVector):
return client
try:
client.create_full_text_index()
except Exception as e:
except Exception:
logger.exception("Failed to create full text index")
redis_client.set(collection_exist_cache_key, 1, ex=3600)
return client

+ 1
- 1
api/core/rag/datasource/vdb/opensearch/opensearch_vector.py Переглянути файл

@@ -197,7 +197,7 @@ class OpenSearchVector(BaseVector):

try:
response = self._client.search(index=self._collection_name.lower(), body=query)
except Exception as e:
except Exception:
logger.exception("Error executing vector search, query: %s", query)
raise


+ 1
- 1
api/core/rag/datasource/vdb/tablestore/tablestore_vector.py Переглянути файл

@@ -71,7 +71,7 @@ class TableStoreVector(BaseVector):
table_result = result.get_result_by_table(self._table_name)
for item in table_result:
if item.is_ok and item.row:
kv = {k: v for k, v, t in item.row.attribute_columns}
kv = {k: v for k, v, _ in item.row.attribute_columns}
docs.append(
Document(
page_content=kv[Field.CONTENT_KEY.value], metadata=json.loads(kv[Field.METADATA_KEY.value])

+ 1
- 1
api/core/rag/extractor/unstructured/unstructured_doc_extractor.py Переглянути файл

@@ -23,7 +23,7 @@ class UnstructuredWordExtractor(BaseExtractor):
unstructured_version = tuple(int(x) for x in __unstructured_version__.split("."))
# check the file extension
try:
import magic # noqa: F401
import magic # noqa: F401 # pyright: ignore[reportUnusedImport]

is_doc = detect_filetype(self._file_path) == FileType.DOC
except ImportError:

+ 2
- 2
api/core/rag/index_processor/processor/qa_index_processor.py Переглянути файл

@@ -113,7 +113,7 @@ class QAIndexProcessor(BaseIndexProcessor):
# Skip the first row
df = pd.read_csv(file)
text_docs = []
for index, row in df.iterrows():
for _, row in df.iterrows():
data = Document(page_content=row.iloc[0], metadata={"answer": row.iloc[1]})
text_docs.append(data)
if len(text_docs) == 0:
@@ -183,7 +183,7 @@ class QAIndexProcessor(BaseIndexProcessor):
qa_document.metadata["doc_hash"] = hash
qa_documents.append(qa_document)
format_documents.extend(qa_documents)
except Exception as e:
except Exception:
logger.exception("Failed to format qa document")

all_qa_documents.extend(format_documents)

+ 5
- 7
api/core/rag/retrieval/dataset_retrieval.py Переглянути файл

@@ -9,7 +9,6 @@ from typing import Any, Optional, Union, cast
from flask import Flask, current_app
from sqlalchemy import Float, and_, or_, select, text
from sqlalchemy import cast as sqlalchemy_cast
from sqlalchemy.orm import Session

from core.app.app_config.entities import (
DatasetEntity,
@@ -526,7 +525,7 @@ class DatasetRetrieval:
)
child_chunk = db.session.scalar(child_chunk_stmt)
if child_chunk:
segment = (
_ = (
db.session.query(DocumentSegment)
.where(DocumentSegment.id == child_chunk.segment_id)
.update(
@@ -593,9 +592,8 @@ class DatasetRetrieval:
metadata_condition: Optional[MetadataCondition] = None,
):
with flask_app.app_context():
with Session(db.engine) as session:
dataset_stmt = select(Dataset).where(Dataset.id == dataset_id)
dataset = session.scalar(dataset_stmt)
dataset_stmt = select(Dataset).where(Dataset.id == dataset_id)
dataset = db.session.scalar(dataset_stmt)

if not dataset:
return []
@@ -987,7 +985,7 @@ class DatasetRetrieval:
)

# handle invoke result
result_text, usage = self._handle_invoke_result(invoke_result=invoke_result)
result_text, _ = self._handle_invoke_result(invoke_result=invoke_result)

result_text_json = parse_and_check_json_markdown(result_text, [])
automatic_metadata_filters = []
@@ -1002,7 +1000,7 @@ class DatasetRetrieval:
"condition": item.get("comparison_operator"),
}
)
except Exception as e:
except Exception:
return None
return automatic_metadata_filters


+ 1
- 1
api/core/rag/retrieval/output_parser/structured_chat.py Переглянути файл

@@ -19,5 +19,5 @@ class StructuredChatOutputParser:
return ReactAction(response["action"], response.get("action_input", {}), text)
else:
return ReactFinish({"output": text}, text)
except Exception as e:
except Exception:
raise ValueError(f"Could not parse LLM output: {text}")

+ 1
- 1
api/core/rag/retrieval/router/multi_dataset_function_call_router.py Переглянути файл

@@ -38,5 +38,5 @@ class FunctionCallMultiDatasetRouter:
# get retrieval model config
return result.message.tool_calls[0].function.name
return None
except Exception as e:
except Exception:
return None

+ 2
- 2
api/core/rag/retrieval/router/multi_dataset_react_route.py Переглянути файл

@@ -77,7 +77,7 @@ class ReactMultiDatasetRouter:
user_id=user_id,
tenant_id=tenant_id,
)
except Exception as e:
except Exception:
return None

def _react_invoke(
@@ -120,7 +120,7 @@ class ReactMultiDatasetRouter:
memory=None,
model_config=model_config,
)
result_text, usage = self._invoke_llm(
result_text, _ = self._invoke_llm(
completion_param=model_config.parameters,
model_instance=model_instance,
prompt_messages=prompt_messages,

+ 1
- 1
api/core/repositories/celery_workflow_execution_repository.py Переглянути файл

@@ -119,7 +119,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository):

logger.debug("Queued async save for workflow execution: %s", execution.id_)

except Exception as e:
except Exception:
logger.exception("Failed to queue save operation for execution %s", execution.id_)
# In case of Celery failure, we could implement a fallback to synchronous save
# For now, we'll re-raise the exception

+ 2
- 2
api/core/repositories/celery_workflow_node_execution_repository.py Переглянути файл

@@ -142,7 +142,7 @@ class CeleryWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):

logger.debug("Cached and queued async save for workflow node execution: %s", execution.id)

except Exception as e:
except Exception:
logger.exception("Failed to cache or queue save operation for node execution %s", execution.id)
# In case of Celery failure, we could implement a fallback to synchronous save
# For now, we'll re-raise the exception
@@ -185,6 +185,6 @@ class CeleryWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):
logger.debug("Retrieved %d workflow node executions for run %s from cache", len(result), workflow_run_id)
return result

except Exception as e:
except Exception:
logger.exception("Failed to get workflow node executions for run %s from cache", workflow_run_id)
return []

+ 1
- 1
api/core/repositories/sqlalchemy_workflow_node_execution_repository.py Переглянути файл

@@ -250,7 +250,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
logger.debug("Updating cache for node_execution_id: %s", db_model.node_execution_id)
self._node_execution_cache[db_model.node_execution_id] = db_model

except Exception as e:
except Exception:
logger.exception("Failed to save workflow node execution after all retries")
raise


+ 1
- 1
api/core/tools/custom_tool/provider.py Переглянути файл

@@ -191,7 +191,7 @@ class ApiToolProviderController(ToolProviderController):
self.tools = tools
return tools

def get_tool(self, tool_name: str):
def get_tool(self, tool_name: str) -> ApiTool:
"""
get tool by name


+ 1
- 1
api/core/tools/entities/values.py Переглянути файл

@@ -107,5 +107,5 @@ default_tool_label_dict = {
),
}

default_tool_labels = [v for k, v in default_tool_label_dict.items()]
default_tool_labels = list(default_tool_label_dict.values())
default_tool_label_name_list = [label.name for label in default_tool_labels]

+ 7
- 10
api/core/tools/tool_manager.py Переглянути файл

@@ -303,16 +303,13 @@ class ToolManager:
tenant_id=tenant_id,
controller=api_provider,
)
return cast(
ApiTool,
api_provider.get_tool(tool_name).fork_tool_runtime(
runtime=ToolRuntime(
tenant_id=tenant_id,
credentials=encrypter.decrypt(credentials),
invoke_from=invoke_from,
tool_invoke_from=tool_invoke_from,
)
),
return api_provider.get_tool(tool_name).fork_tool_runtime(
runtime=ToolRuntime(
tenant_id=tenant_id,
credentials=encrypter.decrypt(credentials),
invoke_from=invoke_from,
tool_invoke_from=tool_invoke_from,
)
)
elif provider_type == ToolProviderType.WORKFLOW:
workflow_provider_stmt = select(WorkflowToolProvider).where(

+ 1
- 1
api/core/workflow/nodes/answer/answer_stream_processor.py Переглянути файл

@@ -68,7 +68,7 @@ class AnswerStreamProcessor(StreamProcessor):

def reset(self) -> None:
self.route_position = {}
for answer_node_id, route_chunks in self.generate_routes.answer_generate_route.items():
for answer_node_id, _ in self.generate_routes.answer_generate_route.items():
self.route_position[answer_node_id] = 0
self.rest_node_ids = self.graph.node_ids.copy()
self.current_stream_chunk_generating_node_ids = {}

+ 3
- 3
api/core/workflow/nodes/document_extractor/node.py Переглянути файл

@@ -5,7 +5,7 @@ import logging
import os
import tempfile
from collections.abc import Mapping, Sequence
from typing import Any, Optional, cast
from typing import Any, Optional

import chardet
import docx
@@ -428,9 +428,9 @@ def _download_file_content(file: File) -> bytes:
raise FileDownloadError("Missing URL for remote file")
response = ssrf_proxy.get(file.remote_url)
response.raise_for_status()
return cast(bytes, response.content)
return response.content
else:
return cast(bytes, file_manager.download(file))
return file_manager.download(file)
except Exception as e:
raise FileDownloadError(f"Error downloading file: {str(e)}") from e


+ 1
- 1
api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py Переглянути файл

@@ -571,7 +571,7 @@ class KnowledgeRetrievalNode(BaseNode):
"condition": item.get("comparison_operator"),
}
)
except Exception as e:
except Exception:
return []
return automatic_metadata_filters


+ 1
- 1
api/core/workflow/nodes/loop/loop_node.py Переглянути файл

@@ -324,7 +324,7 @@ class LoopNode(BaseNode):

# Process conditions if at least one variable is available
if available_conditions:
input_conditions, group_result, check_break_result = condition_processor.process_conditions(
_, _, check_break_result = condition_processor.process_conditions(
variable_pool=self.graph_runtime_state.variable_pool,
conditions=available_conditions,
operator=logical_operator,

+ 0
- 4
api/core/workflow/nodes/parameter_extractor/entities.py Переглянути файл

@@ -43,10 +43,6 @@ def _validate_type(parameter_type: str) -> SegmentType:
return SegmentType(parameter_type)


class _ParameterConfigError(Exception):
pass


class ParameterConfig(BaseModel):
"""
Parameter Config.

+ 1
- 1
api/core/workflow/nodes/variable_assigner/common/helpers.py Переглянути файл

@@ -25,7 +25,7 @@ _T = TypeVar("_T", bound=MutableMapping[str, Any])
def variable_to_processed_data(selector: Sequence[str], seg: Segment) -> UpdatedVariable:
if len(selector) < SELECTORS_LENGTH:
raise Exception("selector too short")
node_id, var_name = selector[:2]
_, var_name = selector[:2]
return UpdatedVariable(
name=var_name,
selector=list(selector[:2]),

+ 27
- 9
api/events/event_handlers/__init__.py Переглянути файл

@@ -1,12 +1,30 @@
from .clean_when_dataset_deleted import handle
from .clean_when_document_deleted import handle
from .create_document_index import handle
from .create_installed_app_when_app_created import handle
from .create_site_record_when_app_created import handle
from .delete_tool_parameters_cache_when_sync_draft_workflow import handle
from .update_app_dataset_join_when_app_model_config_updated import handle
from .update_app_dataset_join_when_app_published_workflow_updated import handle
from .clean_when_dataset_deleted import handle as handle_clean_when_dataset_deleted
from .clean_when_document_deleted import handle as handle_clean_when_document_deleted
from .create_document_index import handle as handle_create_document_index
from .create_installed_app_when_app_created import handle as handle_create_installed_app_when_app_created
from .create_site_record_when_app_created import handle as handle_create_site_record_when_app_created
from .delete_tool_parameters_cache_when_sync_draft_workflow import (
handle as handle_delete_tool_parameters_cache_when_sync_draft_workflow,
)
from .update_app_dataset_join_when_app_model_config_updated import (
handle as handle_update_app_dataset_join_when_app_model_config_updated,
)
from .update_app_dataset_join_when_app_published_workflow_updated import (
handle as handle_update_app_dataset_join_when_app_published_workflow_updated,
)

# Consolidated handler replaces both deduct_quota_when_message_created and
# update_provider_last_used_at_when_message_created
from .update_provider_when_message_created import handle
from .update_provider_when_message_created import handle as handle_update_provider_when_message_created

__all__ = [
"handle_clean_when_dataset_deleted",
"handle_clean_when_document_deleted",
"handle_create_document_index",
"handle_create_installed_app_when_app_created",
"handle_create_site_record_when_app_created",
"handle_delete_tool_parameters_cache_when_sync_draft_workflow",
"handle_update_app_dataset_join_when_app_model_config_updated",
"handle_update_app_dataset_join_when_app_published_workflow_updated",
"handle_update_provider_when_message_created",
]

+ 1
- 1
api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py Переглянути файл

@@ -61,7 +61,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]:
try:
node_data = KnowledgeRetrievalNodeData(**node.get("data", {}))
dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids)
except Exception as e:
except Exception:
continue

return dataset_ids

+ 1
- 1
api/events/event_handlers/update_provider_when_message_created.py Переглянути файл

@@ -204,7 +204,7 @@ def _calculate_quota_usage(
elif quota_unit == QuotaUnit.TIMES:
return 1
return None
except Exception as e:
except Exception:
logger.exception("Failed to calculate quota usage")
return None


+ 1
- 1
api/extensions/ext_sentry.py Переглянути файл

@@ -15,7 +15,7 @@ def init_app(app: DifyApp):

def before_send(event, hint):
if "exc_info" in hint:
exc_type, exc_value, tb = hint["exc_info"]
_, exc_value, _ = hint["exc_info"]
if parse_error.defaultErrorResponse in str(exc_value):
return None


+ 5
- 5
api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py Переглянути файл

@@ -139,7 +139,7 @@ class ClickZettaVolumeStorage(BaseStorage):
schema=self._config.schema_name,
)
logger.debug("ClickZetta connection established")
except Exception as e:
except Exception:
logger.exception("Failed to connect to ClickZetta")
raise

@@ -150,7 +150,7 @@ class ClickZettaVolumeStorage(BaseStorage):
self._connection, self._config.volume_type, self._config.volume_name
)
logger.debug("Permission manager initialized")
except Exception as e:
except Exception:
logger.exception("Failed to initialize permission manager")
raise

@@ -213,7 +213,7 @@ class ClickZettaVolumeStorage(BaseStorage):
if fetch:
return cursor.fetchall()
return None
except Exception as e:
except Exception:
logger.exception("SQL execution failed: %s", sql)
raise

@@ -349,7 +349,7 @@ class ClickZettaVolumeStorage(BaseStorage):

# Find the downloaded file (may be in subdirectories)
downloaded_file = None
for root, dirs, files in os.walk(temp_dir):
for root, _, files in os.walk(temp_dir):
for file in files:
if file == filename or file == os.path.basename(filename):
downloaded_file = Path(root) / file
@@ -524,6 +524,6 @@ class ClickZettaVolumeStorage(BaseStorage):
logger.debug("Scanned %d items in path %s", len(result), path)
return result

except Exception as e:
except Exception:
logger.exception("Error scanning path %s", path)
return []

+ 11
- 11
api/extensions/storage/clickzetta_volume/file_lifecycle.py Переглянути файл

@@ -145,7 +145,7 @@ class FileLifecycleManager:
logger.info("File %s saved with lifecycle management, version %s", filename, new_version)
return file_metadata

except Exception as e:
except Exception:
logger.exception("Failed to save file with lifecycle")
raise

@@ -163,7 +163,7 @@ class FileLifecycleManager:
if filename in metadata_dict:
return FileMetadata.from_dict(metadata_dict[filename])
return None
except Exception as e:
except Exception:
logger.exception("Failed to get file metadata for %s", filename)
return None

@@ -192,7 +192,7 @@ class FileLifecycleManager:
# Parse version number
version_str = file_path.split(".v")[-1].split(".")[0]
try:
version_num = int(version_str)
_ = int(version_str)
# Simplified processing here, should actually read metadata from version file
# Temporarily create basic metadata information
except ValueError:
@@ -203,7 +203,7 @@ class FileLifecycleManager:

return sorted(versions, key=lambda x: x.version or 0, reverse=True)

except Exception as e:
except Exception:
logger.exception("Failed to list file versions for %s", filename)
return []

@@ -237,7 +237,7 @@ class FileLifecycleManager:
self.save_with_lifecycle(filename, version_data, {"restored_from": str(version)})
return True

except Exception as e:
except Exception:
logger.exception("Failed to restore %s to version %s", filename, version)
return False

@@ -270,7 +270,7 @@ class FileLifecycleManager:
logger.info("File %s archived successfully", filename)
return True

except Exception as e:
except Exception:
logger.exception("Failed to archive file %s", filename)
return False

@@ -314,7 +314,7 @@ class FileLifecycleManager:
logger.info("File %s soft deleted successfully", filename)
return True

except Exception as e:
except Exception:
logger.exception("Failed to soft delete file %s", filename)
return False

@@ -372,7 +372,7 @@ class FileLifecycleManager:

return cleaned_count

except Exception as e:
except Exception:
logger.exception("Failed to cleanup old versions")
return 0

@@ -427,7 +427,7 @@ class FileLifecycleManager:

return stats

except Exception as e:
except Exception:
logger.exception("Failed to get storage statistics")
return {}

@@ -465,7 +465,7 @@ class FileLifecycleManager:
metadata_content = json.dumps(metadata_dict, indent=2, ensure_ascii=False)
self._storage.save(self._metadata_file, metadata_content.encode("utf-8"))
logger.debug("Metadata saved successfully")
except Exception as e:
except Exception:
logger.exception("Failed to save metadata")
raise

@@ -508,7 +508,7 @@ class FileLifecycleManager:
result = self._permission_manager.validate_operation(mapped_operation, self._dataset_id)
return bool(result)

except Exception as e:
except Exception:
logger.exception("Permission check failed for %s operation %s", filename, operation)
# Safe default: deny access when permission check fails
return False

+ 7
- 7
api/extensions/storage/clickzetta_volume/volume_permissions.py Переглянути файл

@@ -84,7 +84,7 @@ class VolumePermissionManager:
logger.warning("Unknown volume type: %s", self._volume_type)
return False

except Exception as e:
except Exception:
logger.exception("Permission check failed")
return False

@@ -119,7 +119,7 @@ class VolumePermissionManager:
)
return False

except Exception as e:
except Exception:
logger.exception("User Volume permission check failed")
# For User Volume, if permission check fails, it might be a configuration issue, provide friendlier error message
logger.info("User Volume permission check failed, but permission checking is disabled in this version")
@@ -158,7 +158,7 @@ class VolumePermissionManager:

return has_permission

except Exception as e:
except Exception:
logger.exception("Table volume permission check failed for %s", table_name)
return False

@@ -216,7 +216,7 @@ class VolumePermissionManager:

return has_permission

except Exception as e:
except Exception:
logger.exception("External volume permission check failed for %s", self._volume_name)
logger.info("External Volume permission check failed, but permission checking is disabled in this version")
return False
@@ -292,7 +292,7 @@ class VolumePermissionManager:
if result:
self._current_username = result[0]
return str(self._current_username)
except Exception as e:
except Exception:
logger.exception("Failed to get current username")

return "unknown"
@@ -316,7 +316,7 @@ class VolumePermissionManager:
for grant in grants:
if len(grant) >= 3: # Typical format: (privilege, object_type, object_name, ...)
privilege = grant[0].upper()
object_type = grant[1].upper() if len(grant) > 1 else ""
_ = grant[1].upper() if len(grant) > 1 else ""

# Collect all relevant permissions
if privilege in ["SELECT", "INSERT", "UPDATE", "DELETE", "ALL"]:
@@ -521,7 +521,7 @@ class VolumePermissionManager:
logger.warning("Unknown volume type for permission inheritance: %s", self._volume_type)
return False

except Exception as e:
except Exception:
logger.exception("Permission inheritance check failed")
return False


+ 1
- 1
api/libs/helper.py Переглянути файл

@@ -185,7 +185,7 @@ def timezone(timezone_string):
def generate_string(n):
letters_digits = string.ascii_letters + string.digits
result = ""
for i in range(n):
for _ in range(n):
result += secrets.choice(letters_digits)

return result

+ 3
- 3
api/libs/sendgrid.py Переглянути файл

@@ -33,15 +33,15 @@ class SendGridClient:
logger.debug(response.body)
logger.debug(response.headers)

except TimeoutError as e:
except TimeoutError:
logger.exception("SendGridClient Timeout occurred while sending email")
raise
except (UnauthorizedError, ForbiddenError) as e:
except (UnauthorizedError, ForbiddenError):
logger.exception(
"SendGridClient Authentication failed. "
"Verify that your credentials and the 'from' email address are correct"
)
raise
except Exception as e:
except Exception:
logger.exception("SendGridClient Unexpected error occurred while sending email to %s", _to)
raise

+ 3
- 3
api/libs/smtp.py Переглянути файл

@@ -45,13 +45,13 @@ class SMTPClient:
msg.attach(MIMEText(mail["html"], "html"))

smtp.sendmail(self._from, mail["to"], msg.as_string())
except smtplib.SMTPException as e:
except smtplib.SMTPException:
logger.exception("SMTP error occurred")
raise
except TimeoutError as e:
except TimeoutError:
logger.exception("Timeout occurred while sending email")
raise
except Exception as e:
except Exception:
logger.exception("Unexpected error occurred while sending email to %s", mail["to"])
raise
finally:

+ 1
- 1
api/models/dataset.py Переглянути файл

@@ -915,7 +915,7 @@ class DatasetKeywordTable(Base):
if keyword_table_text:
return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder)
return None
except Exception as e:
except Exception:
logger.exception("Failed to load keyword table from file: %s", file_key)
return None


+ 1
- 2
api/pyproject.toml Переглянути файл

@@ -111,7 +111,7 @@ dev = [
"faker~=32.1.0",
"lxml-stubs~=0.5.1",
"ty~=0.0.1a19",
"mypy~=1.17.1",
"basedpyright~=1.31.0",
"ruff~=0.12.3",
"pytest~=8.3.2",
"pytest-benchmark~=4.0.0",
@@ -218,4 +218,3 @@ vdb = [
"xinference-client~=1.2.2",
"mo-vector~=0.1.13",
]


+ 48
- 0
api/pyrightconfig.json Переглянути файл

@@ -0,0 +1,48 @@
{
"include": ["."],
"exclude": ["tests/", "migrations/", ".venv/"],
"typeCheckingMode": "strict",
"pythonVersion": "3.11",
"pythonPlatform": "All",
"reportMissingTypeStubs": false,
"reportGeneralTypeIssues": "none",
"reportOptionalMemberAccess": "none",
"reportOptionalIterable": "none",
"reportOptionalOperand": "none",
"reportOptionalSubscript": "none",
"reportTypedDictNotRequiredAccess": "none",
"reportPrivateImportUsage": "none",
"reportUnsupportedDunderAll": "none",
"reportUnnecessaryTypeIgnoreComment": "none",
"reportMatchNotExhaustive": "none",
"reportImplicitOverride": "none",
"reportCallInDefaultInitializer": "none",
"reportUnnecessaryIsInstance": "none",
"reportUnnecessaryComparison": "none",
"reportUnknownParameterType": "none",
"reportMissingParameterType": "none",
"reportUnknownArgumentType": "none",
"reportUnknownVariableType": "none",
"reportUnknownMemberType": "none",
"reportMissingTypeArgument": "none",
"reportUntypedFunctionDecorator": "none",
"reportUnknownLambdaType": "none",
"reportPrivateUsage": "none",
"reportConstantRedefinition": "none",
"reportIncompatibleMethodOverride": "none",
"reportIncompatibleVariableOverride": "none",
"reportOverlappingOverload": "none",
"reportPossiblyUnboundVariable": "none",
"reportUnusedImport": "none",
"reportUnusedFunction": "none",
"reportArgumentType": "none",
"reportAssignmentType": "none",
"reportAttributeAccessIssue": "none",
"reportCallIssue": "none",
"reportIndexIssue": "none",
"reportRedeclaration": "none",
"reportReturnType": "none",
"reportOperatorIssue": "none",
"reportTypeCommentUsage": "none",
"reportDeprecated": "none"
}

+ 4
- 4
api/repositories/sqlalchemy_api_workflow_run_repository.py Переглянути файл

@@ -22,7 +22,7 @@ Implementation Notes:
import logging
from collections.abc import Sequence
from datetime import datetime
from typing import Optional, cast
from typing import Optional

from sqlalchemy import delete, select
from sqlalchemy.orm import Session, sessionmaker
@@ -117,7 +117,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
WorkflowRun.app_id == app_id,
WorkflowRun.id == run_id,
)
return cast(Optional[WorkflowRun], session.scalar(stmt))
return session.scalar(stmt)

def get_expired_runs_batch(
self,
@@ -137,7 +137,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
)
.limit(batch_size)
)
return cast(Sequence[WorkflowRun], session.scalars(stmt).all())
return session.scalars(stmt).all()

def delete_runs_by_ids(
self,
@@ -154,7 +154,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
result = session.execute(stmt)
session.commit()

deleted_count = cast(int, result.rowcount)
deleted_count = result.rowcount
logger.info("Deleted %s workflow runs by IDs", deleted_count)
return deleted_count


+ 2
- 2
api/schedule/clean_workflow_runlogs_precise.py Переглянути файл

@@ -77,7 +77,7 @@ def clean_workflow_runlogs_precise():

logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted)

except Exception as e:
except Exception:
db.session.rollback()
logger.exception("Unexpected error in workflow log cleanup")
raise
@@ -149,7 +149,7 @@ def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) ->
db.session.commit()
return True

except Exception as e:
except Exception:
db.session.rollback()
logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1)
return False

+ 2
- 2
api/schedule/queue_monitor_task.py Переглянути файл

@@ -63,10 +63,10 @@ def queue_monitor_task():
"alert_time": current_time,
},
)
except Exception as e:
except Exception:
logger.exception(click.style("Exception occurred during sending email", fg="red"))

except Exception as e:
except Exception:
logger.exception(click.style("Exception occurred during queue monitoring", fg="red"))
finally:
if db.session.is_active:

+ 1
- 1
api/services/annotation_service.py Переглянути файл

@@ -330,7 +330,7 @@ class AppAnnotationService:
# Skip the first row
df = pd.read_csv(file, dtype=str)
result = []
for index, row in df.iterrows():
for _, row in df.iterrows():
content = {"question": row.iloc[0], "answer": row.iloc[1]}
result.append(content)
if len(result) == 0:

+ 1
- 1
api/services/app_generate_service.py Переглянути файл

@@ -227,7 +227,7 @@ class AppGenerateService:
# If workflow_id is specified, get the specific workflow version
if workflow_id:
try:
workflow_uuid = uuid.UUID(workflow_id)
_ = uuid.UUID(workflow_id)
except ValueError:
raise WorkflowIdFormatError(f"Invalid workflow_id format: '{workflow_id}'. ")
workflow = workflow_service.get_published_workflow_by_id(app_model=app_model, workflow_id=workflow_id)

+ 2
- 2
api/services/app_service.py Переглянути файл

@@ -96,7 +96,7 @@ class AppService:
)
except (ProviderTokenNotInitError, LLMBadRequestError):
model_instance = None
except Exception as e:
except Exception:
logger.exception("Get default model instance failed, tenant_id: %s", tenant_id)
model_instance = None

@@ -201,7 +201,7 @@ class AppService:

# override tool parameters
tool["tool_parameters"] = masked_parameter
except Exception as e:
except Exception:
pass

# override agent mode

+ 1
- 1
api/services/external_knowledge_service.py Переглянути файл

@@ -89,7 +89,7 @@ class ExternalDatasetService:
raise ValueError(f"invalid endpoint: {endpoint}")
try:
response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"})
except Exception as e:
except Exception:
raise ValueError(f"failed to connect to the endpoint: {endpoint}")
if response.status_code == 502:
raise ValueError(f"Bad Gateway: failed to connect to the endpoint: {endpoint}")

+ 1
- 1
api/services/plugin/data_migration.py Переглянути файл

@@ -175,7 +175,7 @@ limit 1000"""
# update jina to langgenius/jina_tool/jina etc.
updated_value = provider_cls(provider_name).to_string()
batch_updates.append((updated_value, record_id))
except Exception as e:
except Exception:
failed_ids.append(record_id)
click.echo(
click.style(

+ 1
- 1
api/services/tools/tools_transform_service.py Переглянути файл

@@ -128,7 +128,7 @@ class ToolTransformService:
)
}

for name, value in schema.items():
for name in schema:
if result.masked_credentials:
result.masked_credentials[name] = ""


+ 1
- 1
api/tasks/annotation/delete_annotation_index_task.py Переглянути файл

@@ -38,7 +38,7 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
logger.exception("Delete annotation index failed when annotation deleted.")
end_at = time.perf_counter()
logger.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green"))
except Exception as e:
except Exception:
logger.exception("Annotation deleted index failed")
finally:
db.session.close()

+ 1
- 1
api/tasks/batch_create_segment_to_index_task.py Переглянути файл

@@ -79,7 +79,7 @@ def batch_create_segment_to_index_task(
# Skip the first row
df = pd.read_csv(file_path)
content = []
for index, row in df.iterrows():
for _, row in df.iterrows():
if dataset_document.doc_form == "qa_model":
data = {"content": row.iloc[0], "answer": row.iloc[1]}
else:

+ 2
- 2
api/tasks/clean_dataset_task.py Переглянути файл

@@ -75,7 +75,7 @@ def clean_dataset_task(
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(dataset, None, with_keywords=True, delete_child_chunks=True)
logger.info(click.style(f"Successfully cleaned vector database for dataset: {dataset_id}", fg="green"))
except Exception as index_cleanup_error:
except Exception:
logger.exception(click.style(f"Failed to clean vector database for dataset {dataset_id}", fg="red"))
# Continue with document and segment deletion even if vector cleanup fails
logger.info(
@@ -145,7 +145,7 @@ def clean_dataset_task(
try:
db.session.rollback()
logger.info(click.style(f"Rolled back database session for dataset: {dataset_id}", fg="yellow"))
except Exception as rollback_error:
except Exception:
logger.exception("Failed to rollback database session")

logger.exception("Cleaned dataset when dataset deleted failed")

+ 1
- 1
api/tasks/delete_account_task.py Переглянути файл

@@ -15,7 +15,7 @@ def delete_account_task(account_id):
account = db.session.query(Account).where(Account.id == account_id).first()
try:
BillingService.delete_account(account_id)
except Exception as e:
except Exception:
logger.exception("Failed to delete account %s from billing service.", account_id)
raise


+ 1
- 1
api/tasks/process_tenant_plugin_autoupgrade_check_task.py Переглянути файл

@@ -146,7 +146,7 @@ def process_tenant_plugin_autoupgrade_check_task(
fg="green",
)
)
task_start_resp = manager.upgrade_plugin(
_ = manager.upgrade_plugin(
tenant_id,
original_unique_identifier,
new_unique_identifier,

+ 2168
- 2178
api/uv.lock
Різницю між файлами не показано, бо вона завелика
Переглянути файл


+ 9
- 0
dev/basedpyright-check Переглянути файл

@@ -0,0 +1,9 @@
#!/bin/bash

set -x

SCRIPT_DIR="$(dirname "$(realpath "$0")")"
cd "$SCRIPT_DIR/.."

# run basedpyright checks
uv --directory api run basedpyright

+ 2
- 2
dev/reformat Переглянути файл

@@ -14,5 +14,5 @@ uv run --directory api --dev ruff format ./
# run dotenv-linter linter
uv run --project api --dev dotenv-linter ./api/.env.example ./web/.env.example

# run mypy check
dev/mypy-check
# run basedpyright check
dev/basedpyright-check

+ 0
- 1
web/.husky/pre-commit Переглянути файл

@@ -35,7 +35,6 @@ if $api_modified; then

status=${status:-0}


if [ $status -ne 0 ]; then
echo "Ruff linter on api module error, exit code: $status"
echo "Please run 'dev/reformat' to fix the fixable linting errors."

Завантаження…
Відмінити
Зберегти