ソースを参照

Merge branch 'main' into feat/rag-2

tags/2.0.0-beta.1
twwu 3ヶ月前
コミット
682b65034c
100個のファイルの変更9161行の追加316行の削除
  1. 1
    1
      README.md
  2. 5
    0
      api/.env.example
  3. 7
    0
      api/configs/feature/__init__.py
  4. 1
    0
      api/controllers/console/__init__.py
  5. 107
    0
      api/controllers/console/app/mcp_server.py
  6. 188
    3
      api/controllers/console/workspace/tool_providers.py
  7. 8
    0
      api/controllers/mcp/__init__.py
  8. 104
    0
      api/controllers/mcp/mcp.py
  9. 16
    8
      api/core/agent/base_agent_runner.py
  10. 1
    1
      api/core/agent/plugin_entities.py
  11. 3
    1
      api/core/agent/strategy/plugin.py
  12. 3
    0
      api/core/entities/parameter_entities.py
  13. 3
    1
      api/core/file/helpers.py
  14. 0
    0
      api/core/mcp/__init__.py
  15. 342
    0
      api/core/mcp/auth/auth_flow.py
  16. 81
    0
      api/core/mcp/auth/auth_provider.py
  17. 361
    0
      api/core/mcp/client/sse_client.py
  18. 476
    0
      api/core/mcp/client/streamable_client.py
  19. 19
    0
      api/core/mcp/entities.py
  20. 10
    0
      api/core/mcp/error.py
  21. 150
    0
      api/core/mcp/mcp_client.py
  22. 228
    0
      api/core/mcp/server/streamable_http.py
  23. 397
    0
      api/core/mcp/session/base_session.py
  24. 365
    0
      api/core/mcp/session/client_session.py
  25. 1217
    0
      api/core/mcp/types.py
  26. 114
    0
      api/core/mcp/utils.py
  27. 2
    0
      api/core/model_runtime/entities/provider_entities.py
  28. 41
    0
      api/core/plugin/entities/parameters.py
  29. 2
    0
      api/core/plugin/entities/plugin.py
  30. 1
    0
      api/core/plugin/entities/plugin_daemon.py
  31. 1
    1
      api/core/plugin/entities/request.py
  32. 2
    0
      api/core/rag/datasource/vdb/qdrant/qdrant_vector.py
  33. 18
    2
      api/core/rag/datasource/vdb/vector_factory.py
  34. 25
    3
      api/core/tools/custom_tool/provider.py
  35. 18
    3
      api/core/tools/custom_tool/tool.py
  36. 20
    3
      api/core/tools/entities/api_entities.py
  37. 11
    1
      api/core/tools/entities/tool_entities.py
  38. 130
    0
      api/core/tools/mcp_tool/provider.py
  39. 92
    0
      api/core/tools/mcp_tool/tool.py
  40. 3
    2
      api/core/tools/signature.py
  41. 3
    2
      api/core/tools/tool_file_manager.py
  42. 135
    32
      api/core/tools/tool_manager.py
  43. 12
    11
      api/core/tools/utils/configuration.py
  44. 6
    1
      api/core/tools/workflow_as_tool/tool.py
  45. 3
    3
      api/core/workflow/callbacks/workflow_logging_callback.py
  46. 1
    1
      api/core/workflow/graph_engine/entities/graph.py
  47. 21
    3
      api/core/workflow/nodes/agent/agent_node.py
  48. 2
    0
      api/core/workflow/nodes/node_mapping.py
  49. 23
    0
      api/core/workflow/nodes/tool/entities.py
  50. 29
    25
      api/core/workflow/nodes/tool/tool_node.py
  51. 2
    0
      api/extensions/ext_blueprints.py
  52. 16
    1
      api/extensions/ext_login.py
  53. 28
    0
      api/extensions/ext_redis.py
  54. 1
    1
      api/factories/agent_factory.py
  55. 24
    0
      api/fields/app_fields.py
  56. 4
    2
      api/libs/passport.py
  57. 64
    0
      api/migrations/versions/2025_06_25_0936-58eb7bdb93fe_add_mcp_server_tool_and_app_server.py
  58. 2
    0
      api/models/__init__.py
  59. 33
    0
      api/models/model.py
  60. 106
    0
      api/models/tools.py
  61. 4
    2
      api/pyproject.toml
  62. 8
    1
      api/services/account_service.py
  63. 2
    2
      api/services/enterprise/enterprise_service.py
  64. 1
    1
      api/services/entities/knowledge_entities/knowledge_entities.py
  65. 10
    7
      api/services/plugin/plugin_service.py
  66. 231
    0
      api/services/tools/mcp_tools_mange_service.py
  67. 108
    6
      api/services/tools/tools_transform_service.py
  68. 14
    0
      api/tasks/remove_app_and_related_data_task.py
  69. 0
    1
      api/tests/integration_tests/vdb/couchbase/test_couchbase.py
  70. 0
    1
      api/tests/integration_tests/vdb/matrixone/test_matrixone.py
  71. 0
    1
      api/tests/integration_tests/vdb/opengauss/test_opengauss.py
  72. 0
    1
      api/tests/integration_tests/vdb/pyvastbase/test_vastbase_vector.py
  73. 4
    7
      api/tests/integration_tests/workflow/nodes/test_llm.py
  74. 471
    0
      api/tests/unit_tests/core/mcp/client/test_session.py
  75. 349
    0
      api/tests/unit_tests/core/mcp/client/test_sse.py
  76. 450
    0
      api/tests/unit_tests/core/mcp/client/test_streamable_http.py
  77. 53
    0
      api/tests/unit_tests/extensions/test_redis.py
  78. 0
    4
      api/tests/unit_tests/factories/test_variable_factory.py
  79. 232
    0
      api/tests/unit_tests/libs/test_login.py
  80. 205
    0
      api/tests/unit_tests/libs/test_passport.py
  81. 59
    0
      api/tests/unit_tests/services/services_test_help.py
  82. 1545
    0
      api/tests/unit_tests/services/test_account_service.py
  83. 0
    1
      api/tests/unit_tests/services/test_dataset_service_update_dataset.py
  84. 2
    2
      api/tests/unit_tests/utils/structured_output_parser/test_structured_output_parser.py
  85. 139
    108
      api/uv.lock
  86. 5
    0
      docker/.env.example
  87. 3
    3
      docker/docker-compose-template.yaml
  88. 4
    3
      docker/docker-compose.yaml
  89. 4
    1
      docker/nginx/conf.d/default.conf.template
  90. 8
    0
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/cardView.tsx
  91. 1
    2
      web/app/(shareLayout)/webapp-reset-password/set-password/page.tsx
  92. 1
    2
      web/app/account/account-page/index.tsx
  93. 5
    7
      web/app/components/app-sidebar/app-info.tsx
  94. 15
    18
      web/app/components/app-sidebar/basic.tsx
  95. 50
    18
      web/app/components/app/configuration/config/agent/agent-tools/index.tsx
  96. 7
    5
      web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx
  97. 78
    0
      web/app/components/app/configuration/config/config-audio.tsx
  98. 3
    0
      web/app/components/app/configuration/config/index.tsx
  99. 2
    0
      web/app/components/app/configuration/index.tsx
  100. 0
    0
      web/app/components/app/overview/appCard.tsx

+ 1
- 1
README.md ファイルの表示

@@ -54,7 +54,7 @@
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>

Dify is an open-source LLM app development platform. Its intuitive interface combines agentic AI workflow, RAG pipeline, agent capabilities, model management, observability features, and more, allowing you to quickly move from prototype to production.
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and moreallowing you to quickly move from prototype to production.

## Quick start


+ 5
- 0
api/.env.example ファイルの表示

@@ -17,6 +17,11 @@ APP_WEB_URL=http://127.0.0.1:3000
# Files URL
FILES_URL=http://127.0.0.1:5001

# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
# Set this to the internal Docker service URL for proper plugin file access.
# Example: INTERNAL_FILES_URL=http://api:5001
INTERNAL_FILES_URL=http://127.0.0.1:5001

# The time in seconds after the signature is rejected
FILES_ACCESS_TIMEOUT=300


+ 7
- 0
api/configs/feature/__init__.py ファイルの表示

@@ -237,6 +237,13 @@ class FileAccessConfig(BaseSettings):
default="",
)

INTERNAL_FILES_URL: str = Field(
description="Internal base URL for file access within Docker network,"
" used for plugin daemon and internal service communication."
" Falls back to FILES_URL if not specified.",
default="",
)

FILES_ACCESS_TIMEOUT: int = Field(
description="Expiration time in seconds for file access URLs",
default=300,

+ 1
- 0
api/controllers/console/__init__.py ファイルの表示

@@ -56,6 +56,7 @@ from .app import (
conversation,
conversation_variables,
generator,
mcp_server,
message,
model_config,
ops_trace,

+ 107
- 0
api/controllers/console/app/mcp_server.py ファイルの表示

@@ -0,0 +1,107 @@
import json
from enum import StrEnum

from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import NotFound

from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_server_fields
from libs.login import login_required
from models.model import AppMCPServer


class AppMCPServerStatus(StrEnum):
ACTIVE = "active"
INACTIVE = "inactive"


class AppMCPServerController(Resource):
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
def get(self, app_model):
server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == app_model.id).first()
return server

@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
def post(self, app_model):
# The role of the current user in the ta table must be editor, admin, or owner
if not current_user.is_editor:
raise NotFound()
parser = reqparse.RequestParser()
parser.add_argument("description", type=str, required=True, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json")
args = parser.parse_args()
server = AppMCPServer(
name=app_model.name,
description=args["description"],
parameters=json.dumps(args["parameters"], ensure_ascii=False),
status=AppMCPServerStatus.ACTIVE,
app_id=app_model.id,
tenant_id=current_user.current_tenant_id,
server_code=AppMCPServer.generate_server_code(16),
)
db.session.add(server)
db.session.commit()
return server

@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
def put(self, app_model):
if not current_user.is_editor:
raise NotFound()
parser = reqparse.RequestParser()
parser.add_argument("id", type=str, required=True, location="json")
parser.add_argument("description", type=str, required=True, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json")
parser.add_argument("status", type=str, required=False, location="json")
args = parser.parse_args()
server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first()
if not server:
raise NotFound()
server.description = args["description"]
server.parameters = json.dumps(args["parameters"], ensure_ascii=False)
if args["status"]:
if args["status"] not in [status.value for status in AppMCPServerStatus]:
raise ValueError("Invalid status")
server.status = args["status"]
db.session.commit()
return server


class AppMCPServerRefreshController(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_server_fields)
def get(self, server_id):
if not current_user.is_editor:
raise NotFound()
server = (
db.session.query(AppMCPServer)
.filter(AppMCPServer.id == server_id)
.filter(AppMCPServer.tenant_id == current_user.current_tenant_id)
.first()
)
if not server:
raise NotFound()
server.server_code = AppMCPServer.generate_server_code(16)
db.session.commit()
return server


api.add_resource(AppMCPServerController, "/apps/<uuid:app_id>/server")
api.add_resource(AppMCPServerRefreshController, "/apps/<uuid:server_id>/server/refresh")

+ 188
- 3
api/controllers/console/workspace/tool_providers.py ファイルの表示

@@ -1,6 +1,7 @@
import io
from urllib.parse import urlparse

from flask import send_file
from flask import redirect, send_file
from flask_login import current_user
from flask_restful import Resource, reqparse
from sqlalchemy.orm import Session
@@ -9,17 +10,34 @@ from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api
from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required
from core.mcp.auth.auth_flow import auth, handle_callback
from core.mcp.auth.auth_provider import OAuthClientProvider
from core.mcp.error import MCPAuthError, MCPError
from core.mcp.mcp_client import MCPClient
from core.model_runtime.utils.encoders import jsonable_encoder
from extensions.ext_database import db
from libs.helper import alphanumeric, uuid_value
from libs.login import login_required
from services.tools.api_tools_manage_service import ApiToolManageService
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
from services.tools.mcp_tools_mange_service import MCPToolManageService
from services.tools.tool_labels_service import ToolLabelsService
from services.tools.tools_manage_service import ToolCommonService
from services.tools.tools_transform_service import ToolTransformService
from services.tools.workflow_tools_manage_service import WorkflowToolManageService


def is_valid_url(url: str) -> bool:
if not url:
return False

try:
parsed = urlparse(url)
return all([parsed.scheme, parsed.netloc]) and parsed.scheme in ["http", "https"]
except Exception:
return False


class ToolProviderListApi(Resource):
@setup_required
@login_required
@@ -34,7 +52,7 @@ class ToolProviderListApi(Resource):
req.add_argument(
"type",
type=str,
choices=["builtin", "model", "api", "workflow"],
choices=["builtin", "model", "api", "workflow", "mcp"],
required=False,
nullable=True,
location="args",
@@ -613,6 +631,166 @@ class ToolLabelsApi(Resource):
return jsonable_encoder(ToolLabelsService.list_tool_labels())


class ToolProviderMCPApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("server_url", type=str, required=True, nullable=False, location="json")
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
parser.add_argument("icon", type=str, required=True, nullable=False, location="json")
parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json")
parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="")
parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
user = current_user
if not is_valid_url(args["server_url"]):
raise ValueError("Server URL is not valid.")
return jsonable_encoder(
MCPToolManageService.create_mcp_provider(
tenant_id=user.current_tenant_id,
server_url=args["server_url"],
name=args["name"],
icon=args["icon"],
icon_type=args["icon_type"],
icon_background=args["icon_background"],
user_id=user.id,
server_identifier=args["server_identifier"],
)
)

@setup_required
@login_required
@account_initialization_required
def put(self):
parser = reqparse.RequestParser()
parser.add_argument("server_url", type=str, required=True, nullable=False, location="json")
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
parser.add_argument("icon", type=str, required=True, nullable=False, location="json")
parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json")
parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json")
parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
if not is_valid_url(args["server_url"]):
if "[__HIDDEN__]" in args["server_url"]:
pass
else:
raise ValueError("Server URL is not valid.")
MCPToolManageService.update_mcp_provider(
tenant_id=current_user.current_tenant_id,
provider_id=args["provider_id"],
server_url=args["server_url"],
name=args["name"],
icon=args["icon"],
icon_type=args["icon_type"],
icon_background=args["icon_background"],
server_identifier=args["server_identifier"],
)
return {"result": "success"}

@setup_required
@login_required
@account_initialization_required
def delete(self):
parser = reqparse.RequestParser()
parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
MCPToolManageService.delete_mcp_tool(tenant_id=current_user.current_tenant_id, provider_id=args["provider_id"])
return {"result": "success"}


class ToolMCPAuthApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json")
parser.add_argument("authorization_code", type=str, required=False, nullable=True, location="json")
args = parser.parse_args()
provider_id = args["provider_id"]
tenant_id = current_user.current_tenant_id
provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id)
if not provider:
raise ValueError("provider not found")
try:
with MCPClient(
provider.decrypted_server_url,
provider_id,
tenant_id,
authed=False,
authorization_code=args["authorization_code"],
for_list=True,
):
MCPToolManageService.update_mcp_provider_credentials(
mcp_provider=provider,
credentials=provider.decrypted_credentials,
authed=True,
)
return {"result": "success"}

except MCPAuthError:
auth_provider = OAuthClientProvider(provider_id, tenant_id, for_list=True)
return auth(auth_provider, provider.decrypted_server_url, args["authorization_code"])
except MCPError as e:
MCPToolManageService.update_mcp_provider_credentials(
mcp_provider=provider,
credentials={},
authed=False,
)
raise ValueError(f"Failed to connect to MCP server: {e}") from e


class ToolMCPDetailApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, provider_id):
user = current_user
provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, user.current_tenant_id)
return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True))


class ToolMCPListAllApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
user = current_user
tenant_id = user.current_tenant_id

tools = MCPToolManageService.retrieve_mcp_tools(tenant_id=tenant_id)

return [tool.to_dict() for tool in tools]


class ToolMCPUpdateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, provider_id):
tenant_id = current_user.current_tenant_id
tools = MCPToolManageService.list_mcp_tool_from_remote_server(
tenant_id=tenant_id,
provider_id=provider_id,
)
return jsonable_encoder(tools)


class ToolMCPCallbackApi(Resource):
def get(self):
parser = reqparse.RequestParser()
parser.add_argument("code", type=str, required=True, nullable=False, location="args")
parser.add_argument("state", type=str, required=True, nullable=False, location="args")
args = parser.parse_args()
state_key = args["state"]
authorization_code = args["code"]
handle_callback(state_key, authorization_code)
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")


# tool provider
api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers")

@@ -647,8 +825,15 @@ api.add_resource(ToolWorkflowProviderDeleteApi, "/workspaces/current/tool-provid
api.add_resource(ToolWorkflowProviderGetApi, "/workspaces/current/tool-provider/workflow/get")
api.add_resource(ToolWorkflowProviderListToolApi, "/workspaces/current/tool-provider/workflow/tools")

# mcp tool provider
api.add_resource(ToolMCPDetailApi, "/workspaces/current/tool-provider/mcp/tools/<path:provider_id>")
api.add_resource(ToolProviderMCPApi, "/workspaces/current/tool-provider/mcp")
api.add_resource(ToolMCPUpdateApi, "/workspaces/current/tool-provider/mcp/update/<path:provider_id>")
api.add_resource(ToolMCPAuthApi, "/workspaces/current/tool-provider/mcp/auth")
api.add_resource(ToolMCPCallbackApi, "/mcp/oauth/callback")

api.add_resource(ToolBuiltinListApi, "/workspaces/current/tools/builtin")
api.add_resource(ToolApiListApi, "/workspaces/current/tools/api")
api.add_resource(ToolMCPListAllApi, "/workspaces/current/tools/mcp")
api.add_resource(ToolWorkflowListApi, "/workspaces/current/tools/workflow")

api.add_resource(ToolLabelsApi, "/workspaces/current/tool-labels")

+ 8
- 0
api/controllers/mcp/__init__.py ファイルの表示

@@ -0,0 +1,8 @@
from flask import Blueprint

from libs.external_api import ExternalApi

bp = Blueprint("mcp", __name__, url_prefix="/mcp")
api = ExternalApi(bp)

from . import mcp

+ 104
- 0
api/controllers/mcp/mcp.py ファイルの表示

@@ -0,0 +1,104 @@
from flask_restful import Resource, reqparse
from pydantic import ValidationError

from controllers.console.app.mcp_server import AppMCPServerStatus
from controllers.mcp import api
from core.app.app_config.entities import VariableEntity
from core.mcp import types
from core.mcp.server.streamable_http import MCPServerStreamableHTTPRequestHandler
from core.mcp.types import ClientNotification, ClientRequest
from core.mcp.utils import create_mcp_error_response
from extensions.ext_database import db
from libs import helper
from models.model import App, AppMCPServer, AppMode


class MCPAppApi(Resource):
def post(self, server_code):
def int_or_str(value):
if isinstance(value, (int, str)):
return value
else:
return None

parser = reqparse.RequestParser()
parser.add_argument("jsonrpc", type=str, required=True, location="json")
parser.add_argument("method", type=str, required=True, location="json")
parser.add_argument("params", type=dict, required=False, location="json")
parser.add_argument("id", type=int_or_str, required=False, location="json")
args = parser.parse_args()

request_id = args.get("id")

server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first()
if not server:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server Not Found")
)

if server.status != AppMCPServerStatus.ACTIVE:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server is not active")
)

app = db.session.query(App).filter(App.id == server.app_id).first()
if not app:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_REQUEST, "App Not Found")
)

if app.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}:
workflow = app.workflow
if workflow is None:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_REQUEST, "App is unavailable")
)

user_input_form = workflow.user_input_form(to_old_structure=True)
else:
app_model_config = app.app_model_config
if app_model_config is None:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_REQUEST, "App is unavailable")
)

features_dict = app_model_config.to_dict()
user_input_form = features_dict.get("user_input_form", [])
converted_user_input_form: list[VariableEntity] = []
try:
for item in user_input_form:
variable_type = item.get("type", "") or list(item.keys())[0]
variable = item[variable_type]
converted_user_input_form.append(
VariableEntity(
type=variable_type,
variable=variable.get("variable"),
description=variable.get("description") or "",
label=variable.get("label"),
required=variable.get("required", False),
max_length=variable.get("max_length"),
options=variable.get("options") or [],
)
)
except ValidationError as e:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_PARAMS, f"Invalid user_input_form: {str(e)}")
)

try:
request: ClientRequest | ClientNotification = ClientRequest.model_validate(args)
except ValidationError as e:
try:
notification = ClientNotification.model_validate(args)
request = notification
except ValidationError as e:
return helper.compact_generate_response(
create_mcp_error_response(request_id, types.INVALID_PARAMS, f"Invalid MCP request: {str(e)}")
)

mcp_server_handler = MCPServerStreamableHTTPRequestHandler(app, request, converted_user_input_form)
response = mcp_server_handler.handle()
return helper.compact_generate_response(response)


api.add_resource(MCPAppApi, "/server/<string:server_code>/mcp")

+ 16
- 8
api/core/agent/base_agent_runner.py ファイルの表示

@@ -161,10 +161,14 @@ class BaseAgentRunner(AppRunner):
if parameter.type == ToolParameter.ToolParameterType.SELECT:
enum = [option.value for option in parameter.options] if parameter.options else []

message_tool.parameters["properties"][parameter.name] = {
"type": parameter_type,
"description": parameter.llm_description or "",
}
message_tool.parameters["properties"][parameter.name] = (
{
"type": parameter_type,
"description": parameter.llm_description or "",
}
if parameter.input_schema is None
else parameter.input_schema
)

if len(enum) > 0:
message_tool.parameters["properties"][parameter.name]["enum"] = enum
@@ -254,10 +258,14 @@ class BaseAgentRunner(AppRunner):
if parameter.type == ToolParameter.ToolParameterType.SELECT:
enum = [option.value for option in parameter.options] if parameter.options else []

prompt_tool.parameters["properties"][parameter.name] = {
"type": parameter_type,
"description": parameter.llm_description or "",
}
prompt_tool.parameters["properties"][parameter.name] = (
{
"type": parameter_type,
"description": parameter.llm_description or "",
}
if parameter.input_schema is None
else parameter.input_schema
)

if len(enum) > 0:
prompt_tool.parameters["properties"][parameter.name]["enum"] = enum

+ 1
- 1
api/core/agent/plugin_entities.py ファイルの表示

@@ -85,7 +85,7 @@ class AgentStrategyEntity(BaseModel):
description: I18nObject = Field(..., description="The description of the agent strategy")
output_schema: Optional[dict] = None
features: Optional[list[AgentFeature]] = None
meta_version: Optional[str] = None
# pydantic configs
model_config = ConfigDict(protected_namespaces=())


+ 3
- 1
api/core/agent/strategy/plugin.py ファイルの表示

@@ -15,10 +15,12 @@ class PluginAgentStrategy(BaseAgentStrategy):

tenant_id: str
declaration: AgentStrategyEntity
meta_version: str | None = None

def __init__(self, tenant_id: str, declaration: AgentStrategyEntity):
def __init__(self, tenant_id: str, declaration: AgentStrategyEntity, meta_version: str | None):
self.tenant_id = tenant_id
self.declaration = declaration
self.meta_version = meta_version

def get_parameters(self) -> Sequence[AgentStrategyParameter]:
return self.declaration.parameters

+ 3
- 0
api/core/entities/parameter_entities.py ファイルの表示

@@ -21,6 +21,9 @@ class CommonParameterType(StrEnum):
DYNAMIC_SELECT = "dynamic-select"

# TOOL_SELECTOR = "tool-selector"
# MCP object and array type parameters
ARRAY = "array"
OBJECT = "object"


class AppSelectorScope(StrEnum):

+ 3
- 1
api/core/file/helpers.py ファイルの表示

@@ -21,7 +21,9 @@ def get_signed_file_url(upload_file_id: str) -> str:


def get_signed_file_url_for_plugin(filename: str, mimetype: str, tenant_id: str, user_id: str) -> str:
url = f"{dify_config.FILES_URL}/files/upload/for-plugin"
# Plugin access should use internal URL for Docker network communication
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
url = f"{base_url}/files/upload/for-plugin"

if user_id is None:
user_id = "DEFAULT-USER"

+ 0
- 0
api/core/mcp/__init__.py ファイルの表示


+ 342
- 0
api/core/mcp/auth/auth_flow.py ファイルの表示

@@ -0,0 +1,342 @@
import base64
import hashlib
import json
import os
import secrets
import urllib.parse
from typing import Optional
from urllib.parse import urljoin

import requests
from pydantic import BaseModel, ValidationError

from core.mcp.auth.auth_provider import OAuthClientProvider
from core.mcp.types import (
OAuthClientInformation,
OAuthClientInformationFull,
OAuthClientMetadata,
OAuthMetadata,
OAuthTokens,
)
from extensions.ext_redis import redis_client

LATEST_PROTOCOL_VERSION = "1.0"
OAUTH_STATE_EXPIRY_SECONDS = 5 * 60 # 5 minutes expiry
OAUTH_STATE_REDIS_KEY_PREFIX = "oauth_state:"


class OAuthCallbackState(BaseModel):
provider_id: str
tenant_id: str
server_url: str
metadata: OAuthMetadata | None = None
client_information: OAuthClientInformation
code_verifier: str
redirect_uri: str


def generate_pkce_challenge() -> tuple[str, str]:
"""Generate PKCE challenge and verifier."""
code_verifier = base64.urlsafe_b64encode(os.urandom(40)).decode("utf-8")
code_verifier = code_verifier.replace("=", "").replace("+", "-").replace("/", "_")

code_challenge_hash = hashlib.sha256(code_verifier.encode("utf-8")).digest()
code_challenge = base64.urlsafe_b64encode(code_challenge_hash).decode("utf-8")
code_challenge = code_challenge.replace("=", "").replace("+", "-").replace("/", "_")

return code_verifier, code_challenge


def _create_secure_redis_state(state_data: OAuthCallbackState) -> str:
"""Create a secure state parameter by storing state data in Redis and returning a random state key."""
# Generate a secure random state key
state_key = secrets.token_urlsafe(32)

# Store the state data in Redis with expiration
redis_key = f"{OAUTH_STATE_REDIS_KEY_PREFIX}{state_key}"
redis_client.setex(redis_key, OAUTH_STATE_EXPIRY_SECONDS, state_data.model_dump_json())

return state_key


def _retrieve_redis_state(state_key: str) -> OAuthCallbackState:
"""Retrieve and decode OAuth state data from Redis using the state key, then delete it."""
redis_key = f"{OAUTH_STATE_REDIS_KEY_PREFIX}{state_key}"

# Get state data from Redis
state_data = redis_client.get(redis_key)

if not state_data:
raise ValueError("State parameter has expired or does not exist")

# Delete the state data from Redis immediately after retrieval to prevent reuse
redis_client.delete(redis_key)

try:
# Parse and validate the state data
oauth_state = OAuthCallbackState.model_validate_json(state_data)

return oauth_state
except ValidationError as e:
raise ValueError(f"Invalid state parameter: {str(e)}")


def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackState:
"""Handle the callback from the OAuth provider."""
# Retrieve state data from Redis (state is automatically deleted after retrieval)
full_state_data = _retrieve_redis_state(state_key)

tokens = exchange_authorization(
full_state_data.server_url,
full_state_data.metadata,
full_state_data.client_information,
authorization_code,
full_state_data.code_verifier,
full_state_data.redirect_uri,
)
provider = OAuthClientProvider(full_state_data.provider_id, full_state_data.tenant_id, for_list=True)
provider.save_tokens(tokens)
return full_state_data


def discover_oauth_metadata(server_url: str, protocol_version: Optional[str] = None) -> Optional[OAuthMetadata]:
"""Looks up RFC 8414 OAuth 2.0 Authorization Server Metadata."""
url = urljoin(server_url, "/.well-known/oauth-authorization-server")

try:
headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION}
response = requests.get(url, headers=headers)
if response.status_code == 404:
return None
if not response.ok:
raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata")
return OAuthMetadata.model_validate(response.json())
except requests.RequestException as e:
if isinstance(e, requests.ConnectionError):
response = requests.get(url)
if response.status_code == 404:
return None
if not response.ok:
raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata")
return OAuthMetadata.model_validate(response.json())
raise


def start_authorization(
server_url: str,
metadata: Optional[OAuthMetadata],
client_information: OAuthClientInformation,
redirect_url: str,
provider_id: str,
tenant_id: str,
) -> tuple[str, str]:
"""Begins the authorization flow with secure Redis state storage."""
response_type = "code"
code_challenge_method = "S256"

if metadata:
authorization_url = metadata.authorization_endpoint
if response_type not in metadata.response_types_supported:
raise ValueError(f"Incompatible auth server: does not support response type {response_type}")
if (
not metadata.code_challenge_methods_supported
or code_challenge_method not in metadata.code_challenge_methods_supported
):
raise ValueError(
f"Incompatible auth server: does not support code challenge method {code_challenge_method}"
)
else:
authorization_url = urljoin(server_url, "/authorize")

code_verifier, code_challenge = generate_pkce_challenge()

# Prepare state data with all necessary information
state_data = OAuthCallbackState(
provider_id=provider_id,
tenant_id=tenant_id,
server_url=server_url,
metadata=metadata,
client_information=client_information,
code_verifier=code_verifier,
redirect_uri=redirect_url,
)

# Store state data in Redis and generate secure state key
state_key = _create_secure_redis_state(state_data)

params = {
"response_type": response_type,
"client_id": client_information.client_id,
"code_challenge": code_challenge,
"code_challenge_method": code_challenge_method,
"redirect_uri": redirect_url,
"state": state_key,
}

authorization_url = f"{authorization_url}?{urllib.parse.urlencode(params)}"
return authorization_url, code_verifier


def exchange_authorization(
server_url: str,
metadata: Optional[OAuthMetadata],
client_information: OAuthClientInformation,
authorization_code: str,
code_verifier: str,
redirect_uri: str,
) -> OAuthTokens:
"""Exchanges an authorization code for an access token."""
grant_type = "authorization_code"

if metadata:
token_url = metadata.token_endpoint
if metadata.grant_types_supported and grant_type not in metadata.grant_types_supported:
raise ValueError(f"Incompatible auth server: does not support grant type {grant_type}")
else:
token_url = urljoin(server_url, "/token")

params = {
"grant_type": grant_type,
"client_id": client_information.client_id,
"code": authorization_code,
"code_verifier": code_verifier,
"redirect_uri": redirect_uri,
}

if client_information.client_secret:
params["client_secret"] = client_information.client_secret

response = requests.post(token_url, data=params)
if not response.ok:
raise ValueError(f"Token exchange failed: HTTP {response.status_code}")
return OAuthTokens.model_validate(response.json())


def refresh_authorization(
server_url: str,
metadata: Optional[OAuthMetadata],
client_information: OAuthClientInformation,
refresh_token: str,
) -> OAuthTokens:
"""Exchange a refresh token for an updated access token."""
grant_type = "refresh_token"

if metadata:
token_url = metadata.token_endpoint
if metadata.grant_types_supported and grant_type not in metadata.grant_types_supported:
raise ValueError(f"Incompatible auth server: does not support grant type {grant_type}")
else:
token_url = urljoin(server_url, "/token")

params = {
"grant_type": grant_type,
"client_id": client_information.client_id,
"refresh_token": refresh_token,
}

if client_information.client_secret:
params["client_secret"] = client_information.client_secret

response = requests.post(token_url, data=params)
if not response.ok:
raise ValueError(f"Token refresh failed: HTTP {response.status_code}")
return OAuthTokens.parse_obj(response.json())


def register_client(
server_url: str,
metadata: Optional[OAuthMetadata],
client_metadata: OAuthClientMetadata,
) -> OAuthClientInformationFull:
"""Performs OAuth 2.0 Dynamic Client Registration."""
if metadata:
if not metadata.registration_endpoint:
raise ValueError("Incompatible auth server: does not support dynamic client registration")
registration_url = metadata.registration_endpoint
else:
registration_url = urljoin(server_url, "/register")

response = requests.post(
registration_url,
json=client_metadata.model_dump(),
headers={"Content-Type": "application/json"},
)
if not response.ok:
response.raise_for_status()
return OAuthClientInformationFull.model_validate(response.json())


def auth(
provider: OAuthClientProvider,
server_url: str,
authorization_code: Optional[str] = None,
state_param: Optional[str] = None,
for_list: bool = False,
) -> dict[str, str]:
"""Orchestrates the full auth flow with a server using secure Redis state storage."""
metadata = discover_oauth_metadata(server_url)

# Handle client registration if needed
client_information = provider.client_information()
if not client_information:
if authorization_code is not None:
raise ValueError("Existing OAuth client information is required when exchanging an authorization code")
try:
full_information = register_client(server_url, metadata, provider.client_metadata)
except requests.RequestException as e:
raise ValueError(f"Could not register OAuth client: {e}")
provider.save_client_information(full_information)
client_information = full_information

# Exchange authorization code for tokens
if authorization_code is not None:
if not state_param:
raise ValueError("State parameter is required when exchanging authorization code")

try:
# Retrieve state data from Redis using state key
full_state_data = _retrieve_redis_state(state_param)

code_verifier = full_state_data.code_verifier
redirect_uri = full_state_data.redirect_uri

if not code_verifier or not redirect_uri:
raise ValueError("Missing code_verifier or redirect_uri in state data")

except (json.JSONDecodeError, ValueError) as e:
raise ValueError(f"Invalid state parameter: {e}")

tokens = exchange_authorization(
server_url,
metadata,
client_information,
authorization_code,
code_verifier,
redirect_uri,
)
provider.save_tokens(tokens)
return {"result": "success"}

provider_tokens = provider.tokens()

# Handle token refresh or new authorization
if provider_tokens and provider_tokens.refresh_token:
try:
new_tokens = refresh_authorization(server_url, metadata, client_information, provider_tokens.refresh_token)
provider.save_tokens(new_tokens)
return {"result": "success"}
except Exception as e:
raise ValueError(f"Could not refresh OAuth tokens: {e}")

# Start new authorization flow
authorization_url, code_verifier = start_authorization(
server_url,
metadata,
client_information,
provider.redirect_url,
provider.mcp_provider.id,
provider.mcp_provider.tenant_id,
)

provider.save_code_verifier(code_verifier)
return {"authorization_url": authorization_url}

+ 81
- 0
api/core/mcp/auth/auth_provider.py ファイルの表示

@@ -0,0 +1,81 @@
from typing import Optional

from configs import dify_config
from core.mcp.types import (
OAuthClientInformation,
OAuthClientInformationFull,
OAuthClientMetadata,
OAuthTokens,
)
from models.tools import MCPToolProvider
from services.tools.mcp_tools_mange_service import MCPToolManageService

LATEST_PROTOCOL_VERSION = "1.0"


class OAuthClientProvider:
mcp_provider: MCPToolProvider

def __init__(self, provider_id: str, tenant_id: str, for_list: bool = False):
if for_list:
self.mcp_provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id)
else:
self.mcp_provider = MCPToolManageService.get_mcp_provider_by_server_identifier(provider_id, tenant_id)

@property
def redirect_url(self) -> str:
"""The URL to redirect the user agent to after authorization."""
return dify_config.CONSOLE_API_URL + "/console/api/mcp/oauth/callback"

@property
def client_metadata(self) -> OAuthClientMetadata:
"""Metadata about this OAuth client."""
return OAuthClientMetadata(
redirect_uris=[self.redirect_url],
token_endpoint_auth_method="none",
grant_types=["authorization_code", "refresh_token"],
response_types=["code"],
client_name="Dify",
client_uri="https://github.com/langgenius/dify",
)

def client_information(self) -> Optional[OAuthClientInformation]:
"""Loads information about this OAuth client."""
client_information = self.mcp_provider.decrypted_credentials.get("client_information", {})
if not client_information:
return None
return OAuthClientInformation.model_validate(client_information)

def save_client_information(self, client_information: OAuthClientInformationFull) -> None:
"""Saves client information after dynamic registration."""
MCPToolManageService.update_mcp_provider_credentials(
self.mcp_provider,
{"client_information": client_information.model_dump()},
)

def tokens(self) -> Optional[OAuthTokens]:
"""Loads any existing OAuth tokens for the current session."""
credentials = self.mcp_provider.decrypted_credentials
if not credentials:
return None
return OAuthTokens(
access_token=credentials.get("access_token", ""),
token_type=credentials.get("token_type", "Bearer"),
expires_in=int(credentials.get("expires_in", "3600") or 3600),
refresh_token=credentials.get("refresh_token", ""),
)

def save_tokens(self, tokens: OAuthTokens) -> None:
"""Stores new OAuth tokens for the current session."""
# update mcp provider credentials
token_dict = tokens.model_dump()
MCPToolManageService.update_mcp_provider_credentials(self.mcp_provider, token_dict, authed=True)

def save_code_verifier(self, code_verifier: str) -> None:
"""Saves a PKCE code verifier for the current session."""
MCPToolManageService.update_mcp_provider_credentials(self.mcp_provider, {"code_verifier": code_verifier})

def code_verifier(self) -> str:
"""Loads the PKCE code verifier for the current session."""
# get code verifier from mcp provider credentials
return str(self.mcp_provider.decrypted_credentials.get("code_verifier", ""))

+ 361
- 0
api/core/mcp/client/sse_client.py ファイルの表示

@@ -0,0 +1,361 @@
import logging
import queue
from collections.abc import Generator
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from typing import Any, TypeAlias, final
from urllib.parse import urljoin, urlparse

import httpx
from sseclient import SSEClient

from core.mcp import types
from core.mcp.error import MCPAuthError, MCPConnectionError
from core.mcp.types import SessionMessage
from core.mcp.utils import create_ssrf_proxy_mcp_http_client, ssrf_proxy_sse_connect

logger = logging.getLogger(__name__)

DEFAULT_QUEUE_READ_TIMEOUT = 3


@final
class _StatusReady:
def __init__(self, endpoint_url: str):
self._endpoint_url = endpoint_url


@final
class _StatusError:
def __init__(self, exc: Exception):
self._exc = exc


# Type aliases for better readability
ReadQueue: TypeAlias = queue.Queue[SessionMessage | Exception | None]
WriteQueue: TypeAlias = queue.Queue[SessionMessage | Exception | None]
StatusQueue: TypeAlias = queue.Queue[_StatusReady | _StatusError]


def remove_request_params(url: str) -> str:
"""Remove request parameters from URL, keeping only the path."""
return urljoin(url, urlparse(url).path)


class SSETransport:
"""SSE client transport implementation."""

def __init__(
self,
url: str,
headers: dict[str, Any] | None = None,
timeout: float = 5.0,
sse_read_timeout: float = 5 * 60,
) -> None:
"""Initialize the SSE transport.

Args:
url: The SSE endpoint URL.
headers: Optional headers to include in requests.
timeout: HTTP timeout for regular operations.
sse_read_timeout: Timeout for SSE read operations.
"""
self.url = url
self.headers = headers or {}
self.timeout = timeout
self.sse_read_timeout = sse_read_timeout
self.endpoint_url: str | None = None

def _validate_endpoint_url(self, endpoint_url: str) -> bool:
"""Validate that the endpoint URL matches the connection origin.

Args:
endpoint_url: The endpoint URL to validate.

Returns:
True if valid, False otherwise.
"""
url_parsed = urlparse(self.url)
endpoint_parsed = urlparse(endpoint_url)

return url_parsed.netloc == endpoint_parsed.netloc and url_parsed.scheme == endpoint_parsed.scheme

def _handle_endpoint_event(self, sse_data: str, status_queue: StatusQueue) -> None:
"""Handle an 'endpoint' SSE event.

Args:
sse_data: The SSE event data.
status_queue: Queue to put status updates.
"""
endpoint_url = urljoin(self.url, sse_data)
logger.info(f"Received endpoint URL: {endpoint_url}")

if not self._validate_endpoint_url(endpoint_url):
error_msg = f"Endpoint origin does not match connection origin: {endpoint_url}"
logger.error(error_msg)
status_queue.put(_StatusError(ValueError(error_msg)))
return

status_queue.put(_StatusReady(endpoint_url))

def _handle_message_event(self, sse_data: str, read_queue: ReadQueue) -> None:
"""Handle a 'message' SSE event.

Args:
sse_data: The SSE event data.
read_queue: Queue to put parsed messages.
"""
try:
message = types.JSONRPCMessage.model_validate_json(sse_data)
logger.debug(f"Received server message: {message}")
session_message = SessionMessage(message)
read_queue.put(session_message)
except Exception as exc:
logger.exception("Error parsing server message")
read_queue.put(exc)

def _handle_sse_event(self, sse, read_queue: ReadQueue, status_queue: StatusQueue) -> None:
"""Handle a single SSE event.

Args:
sse: The SSE event object.
read_queue: Queue for message events.
status_queue: Queue for status events.
"""
match sse.event:
case "endpoint":
self._handle_endpoint_event(sse.data, status_queue)
case "message":
self._handle_message_event(sse.data, read_queue)
case _:
logger.warning(f"Unknown SSE event: {sse.event}")

def sse_reader(self, event_source, read_queue: ReadQueue, status_queue: StatusQueue) -> None:
"""Read and process SSE events.

Args:
event_source: The SSE event source.
read_queue: Queue to put received messages.
status_queue: Queue to put status updates.
"""
try:
for sse in event_source.iter_sse():
self._handle_sse_event(sse, read_queue, status_queue)
except httpx.ReadError as exc:
logger.debug(f"SSE reader shutting down normally: {exc}")
except Exception as exc:
read_queue.put(exc)
finally:
read_queue.put(None)

def _send_message(self, client: httpx.Client, endpoint_url: str, message: SessionMessage) -> None:
"""Send a single message to the server.

Args:
client: HTTP client to use.
endpoint_url: The endpoint URL to send to.
message: The message to send.
"""
response = client.post(
endpoint_url,
json=message.message.model_dump(
by_alias=True,
mode="json",
exclude_none=True,
),
)
response.raise_for_status()
logger.debug(f"Client message sent successfully: {response.status_code}")

def post_writer(self, client: httpx.Client, endpoint_url: str, write_queue: WriteQueue) -> None:
"""Handle writing messages to the server.

Args:
client: HTTP client to use.
endpoint_url: The endpoint URL to send messages to.
write_queue: Queue to read messages from.
"""
try:
while True:
try:
message = write_queue.get(timeout=DEFAULT_QUEUE_READ_TIMEOUT)
if message is None:
break
if isinstance(message, Exception):
write_queue.put(message)
continue

self._send_message(client, endpoint_url, message)

except queue.Empty:
continue
except httpx.ReadError as exc:
logger.debug(f"Post writer shutting down normally: {exc}")
except Exception as exc:
logger.exception("Error writing messages")
write_queue.put(exc)
finally:
write_queue.put(None)

def _wait_for_endpoint(self, status_queue: StatusQueue) -> str:
"""Wait for the endpoint URL from the status queue.

Args:
status_queue: Queue to read status from.

Returns:
The endpoint URL.

Raises:
ValueError: If endpoint URL is not received or there's an error.
"""
try:
status = status_queue.get(timeout=1)
except queue.Empty:
raise ValueError("failed to get endpoint URL")

if isinstance(status, _StatusReady):
return status._endpoint_url
elif isinstance(status, _StatusError):
raise status._exc
else:
raise ValueError("failed to get endpoint URL")

def connect(
self,
executor: ThreadPoolExecutor,
client: httpx.Client,
event_source,
) -> tuple[ReadQueue, WriteQueue]:
"""Establish connection and start worker threads.

Args:
executor: Thread pool executor.
client: HTTP client.
event_source: SSE event source.

Returns:
Tuple of (read_queue, write_queue).
"""
read_queue: ReadQueue = queue.Queue()
write_queue: WriteQueue = queue.Queue()
status_queue: StatusQueue = queue.Queue()

# Start SSE reader thread
executor.submit(self.sse_reader, event_source, read_queue, status_queue)

# Wait for endpoint URL
endpoint_url = self._wait_for_endpoint(status_queue)
self.endpoint_url = endpoint_url

# Start post writer thread
executor.submit(self.post_writer, client, endpoint_url, write_queue)

return read_queue, write_queue


@contextmanager
def sse_client(
url: str,
headers: dict[str, Any] | None = None,
timeout: float = 5.0,
sse_read_timeout: float = 5 * 60,
) -> Generator[tuple[ReadQueue, WriteQueue], None, None]:
"""
Client transport for SSE.
`sse_read_timeout` determines how long (in seconds) the client will wait for a new
event before disconnecting. All other HTTP operations are controlled by `timeout`.

Args:
url: The SSE endpoint URL.
headers: Optional headers to include in requests.
timeout: HTTP timeout for regular operations.
sse_read_timeout: Timeout for SSE read operations.

Yields:
Tuple of (read_queue, write_queue) for message communication.
"""
transport = SSETransport(url, headers, timeout, sse_read_timeout)

read_queue: ReadQueue | None = None
write_queue: WriteQueue | None = None

with ThreadPoolExecutor() as executor:
try:
with create_ssrf_proxy_mcp_http_client(headers=transport.headers) as client:
with ssrf_proxy_sse_connect(
url, timeout=httpx.Timeout(timeout, read=sse_read_timeout), client=client
) as event_source:
event_source.response.raise_for_status()

read_queue, write_queue = transport.connect(executor, client, event_source)

yield read_queue, write_queue

except httpx.HTTPStatusError as exc:
if exc.response.status_code == 401:
raise MCPAuthError()
raise MCPConnectionError()
except Exception:
logger.exception("Error connecting to SSE endpoint")
raise
finally:
# Clean up queues
if read_queue:
read_queue.put(None)
if write_queue:
write_queue.put(None)


def send_message(http_client: httpx.Client, endpoint_url: str, session_message: SessionMessage) -> None:
"""
Send a message to the server using the provided HTTP client.

Args:
http_client: The HTTP client to use for sending
endpoint_url: The endpoint URL to send the message to
session_message: The message to send
"""
try:
response = http_client.post(
endpoint_url,
json=session_message.message.model_dump(
by_alias=True,
mode="json",
exclude_none=True,
),
)
response.raise_for_status()
logger.debug(f"Client message sent successfully: {response.status_code}")
except Exception as exc:
logger.exception("Error sending message")
raise


def read_messages(
sse_client: SSEClient,
) -> Generator[SessionMessage | Exception, None, None]:
"""
Read messages from the SSE client.

Args:
sse_client: The SSE client to read from

Yields:
SessionMessage or Exception for each event received
"""
try:
for sse in sse_client.events():
if sse.event == "message":
try:
message = types.JSONRPCMessage.model_validate_json(sse.data)
logger.debug(f"Received server message: {message}")
yield SessionMessage(message)
except Exception as exc:
logger.exception("Error parsing server message")
yield exc
else:
logger.warning(f"Unknown SSE event: {sse.event}")
except Exception as exc:
logger.exception("Error reading SSE messages")
yield exc

+ 476
- 0
api/core/mcp/client/streamable_client.py ファイルの表示

@@ -0,0 +1,476 @@
"""
StreamableHTTP Client Transport Module

This module implements the StreamableHTTP transport for MCP clients,
providing support for HTTP POST requests with optional SSE streaming responses
and session management.
"""

import logging
import queue
from collections.abc import Callable, Generator
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import timedelta
from typing import Any, cast

import httpx
from httpx_sse import EventSource, ServerSentEvent

from core.mcp.types import (
ClientMessageMetadata,
ErrorData,
JSONRPCError,
JSONRPCMessage,
JSONRPCNotification,
JSONRPCRequest,
JSONRPCResponse,
RequestId,
SessionMessage,
)
from core.mcp.utils import create_ssrf_proxy_mcp_http_client, ssrf_proxy_sse_connect

logger = logging.getLogger(__name__)


SessionMessageOrError = SessionMessage | Exception | None
# Queue types with clearer names for their roles
ServerToClientQueue = queue.Queue[SessionMessageOrError] # Server to client messages
ClientToServerQueue = queue.Queue[SessionMessage | None] # Client to server messages
GetSessionIdCallback = Callable[[], str | None]

MCP_SESSION_ID = "mcp-session-id"
LAST_EVENT_ID = "last-event-id"
CONTENT_TYPE = "content-type"
ACCEPT = "Accept"


JSON = "application/json"
SSE = "text/event-stream"

DEFAULT_QUEUE_READ_TIMEOUT = 3


class StreamableHTTPError(Exception):
"""Base exception for StreamableHTTP transport errors."""

pass


class ResumptionError(StreamableHTTPError):
"""Raised when resumption request is invalid."""

pass


@dataclass
class RequestContext:
"""Context for a request operation."""

client: httpx.Client
headers: dict[str, str]
session_id: str | None
session_message: SessionMessage
metadata: ClientMessageMetadata | None
server_to_client_queue: ServerToClientQueue # Renamed for clarity
sse_read_timeout: timedelta


class StreamableHTTPTransport:
"""StreamableHTTP client transport implementation."""

def __init__(
self,
url: str,
headers: dict[str, Any] | None = None,
timeout: timedelta = timedelta(seconds=30),
sse_read_timeout: timedelta = timedelta(seconds=60 * 5),
) -> None:
"""Initialize the StreamableHTTP transport.

Args:
url: The endpoint URL.
headers: Optional headers to include in requests.
timeout: HTTP timeout for regular operations.
sse_read_timeout: Timeout for SSE read operations.
"""
self.url = url
self.headers = headers or {}
self.timeout = timeout
self.sse_read_timeout = sse_read_timeout
self.session_id: str | None = None
self.request_headers = {
ACCEPT: f"{JSON}, {SSE}",
CONTENT_TYPE: JSON,
**self.headers,
}

def _update_headers_with_session(self, base_headers: dict[str, str]) -> dict[str, str]:
"""Update headers with session ID if available."""
headers = base_headers.copy()
if self.session_id:
headers[MCP_SESSION_ID] = self.session_id
return headers

def _is_initialization_request(self, message: JSONRPCMessage) -> bool:
"""Check if the message is an initialization request."""
return isinstance(message.root, JSONRPCRequest) and message.root.method == "initialize"

def _is_initialized_notification(self, message: JSONRPCMessage) -> bool:
"""Check if the message is an initialized notification."""
return isinstance(message.root, JSONRPCNotification) and message.root.method == "notifications/initialized"

def _maybe_extract_session_id_from_response(
self,
response: httpx.Response,
) -> None:
"""Extract and store session ID from response headers."""
new_session_id = response.headers.get(MCP_SESSION_ID)
if new_session_id:
self.session_id = new_session_id
logger.info(f"Received session ID: {self.session_id}")

def _handle_sse_event(
self,
sse: ServerSentEvent,
server_to_client_queue: ServerToClientQueue,
original_request_id: RequestId | None = None,
resumption_callback: Callable[[str], None] | None = None,
) -> bool:
"""Handle an SSE event, returning True if the response is complete."""
if sse.event == "message":
try:
message = JSONRPCMessage.model_validate_json(sse.data)
logger.debug(f"SSE message: {message}")

# If this is a response and we have original_request_id, replace it
if original_request_id is not None and isinstance(message.root, JSONRPCResponse | JSONRPCError):
message.root.id = original_request_id

session_message = SessionMessage(message)
# Put message in queue that goes to client
server_to_client_queue.put(session_message)

# Call resumption token callback if we have an ID
if sse.id and resumption_callback:
resumption_callback(sse.id)

# If this is a response or error return True indicating completion
# Otherwise, return False to continue listening
return isinstance(message.root, JSONRPCResponse | JSONRPCError)

except Exception as exc:
# Put exception in queue that goes to client
server_to_client_queue.put(exc)
return False
elif sse.event == "ping":
logger.debug("Received ping event")
return False
else:
logger.warning(f"Unknown SSE event: {sse.event}")
return False

def handle_get_stream(
self,
client: httpx.Client,
server_to_client_queue: ServerToClientQueue,
) -> None:
"""Handle GET stream for server-initiated messages."""
try:
if not self.session_id:
return

headers = self._update_headers_with_session(self.request_headers)

with ssrf_proxy_sse_connect(
self.url,
headers=headers,
timeout=httpx.Timeout(self.timeout.seconds, read=self.sse_read_timeout.seconds),
client=client,
method="GET",
) as event_source:
event_source.response.raise_for_status()
logger.debug("GET SSE connection established")

for sse in event_source.iter_sse():
self._handle_sse_event(sse, server_to_client_queue)

except Exception as exc:
logger.debug(f"GET stream error (non-fatal): {exc}")

def _handle_resumption_request(self, ctx: RequestContext) -> None:
"""Handle a resumption request using GET with SSE."""
headers = self._update_headers_with_session(ctx.headers)
if ctx.metadata and ctx.metadata.resumption_token:
headers[LAST_EVENT_ID] = ctx.metadata.resumption_token
else:
raise ResumptionError("Resumption request requires a resumption token")

# Extract original request ID to map responses
original_request_id = None
if isinstance(ctx.session_message.message.root, JSONRPCRequest):
original_request_id = ctx.session_message.message.root.id

with ssrf_proxy_sse_connect(
self.url,
headers=headers,
timeout=httpx.Timeout(self.timeout.seconds, read=ctx.sse_read_timeout.seconds),
client=ctx.client,
method="GET",
) as event_source:
event_source.response.raise_for_status()
logger.debug("Resumption GET SSE connection established")

for sse in event_source.iter_sse():
is_complete = self._handle_sse_event(
sse,
ctx.server_to_client_queue,
original_request_id,
ctx.metadata.on_resumption_token_update if ctx.metadata else None,
)
if is_complete:
break

def _handle_post_request(self, ctx: RequestContext) -> None:
"""Handle a POST request with response processing."""
headers = self._update_headers_with_session(ctx.headers)
message = ctx.session_message.message
is_initialization = self._is_initialization_request(message)

with ctx.client.stream(
"POST",
self.url,
json=message.model_dump(by_alias=True, mode="json", exclude_none=True),
headers=headers,
) as response:
if response.status_code == 202:
logger.debug("Received 202 Accepted")
return

if response.status_code == 404:
if isinstance(message.root, JSONRPCRequest):
self._send_session_terminated_error(
ctx.server_to_client_queue,
message.root.id,
)
return

response.raise_for_status()
if is_initialization:
self._maybe_extract_session_id_from_response(response)

content_type = cast(str, response.headers.get(CONTENT_TYPE, "").lower())

if content_type.startswith(JSON):
self._handle_json_response(response, ctx.server_to_client_queue)
elif content_type.startswith(SSE):
self._handle_sse_response(response, ctx)
else:
self._handle_unexpected_content_type(
content_type,
ctx.server_to_client_queue,
)

def _handle_json_response(
self,
response: httpx.Response,
server_to_client_queue: ServerToClientQueue,
) -> None:
"""Handle JSON response from the server."""
try:
content = response.read()
message = JSONRPCMessage.model_validate_json(content)
session_message = SessionMessage(message)
server_to_client_queue.put(session_message)
except Exception as exc:
server_to_client_queue.put(exc)

def _handle_sse_response(self, response: httpx.Response, ctx: RequestContext) -> None:
"""Handle SSE response from the server."""
try:
event_source = EventSource(response)
for sse in event_source.iter_sse():
is_complete = self._handle_sse_event(
sse,
ctx.server_to_client_queue,
resumption_callback=(ctx.metadata.on_resumption_token_update if ctx.metadata else None),
)
if is_complete:
break
except Exception as e:
ctx.server_to_client_queue.put(e)

def _handle_unexpected_content_type(
self,
content_type: str,
server_to_client_queue: ServerToClientQueue,
) -> None:
"""Handle unexpected content type in response."""
error_msg = f"Unexpected content type: {content_type}"
logger.error(error_msg)
server_to_client_queue.put(ValueError(error_msg))

def _send_session_terminated_error(
self,
server_to_client_queue: ServerToClientQueue,
request_id: RequestId,
) -> None:
"""Send a session terminated error response."""
jsonrpc_error = JSONRPCError(
jsonrpc="2.0",
id=request_id,
error=ErrorData(code=32600, message="Session terminated by server"),
)
session_message = SessionMessage(JSONRPCMessage(jsonrpc_error))
server_to_client_queue.put(session_message)

def post_writer(
self,
client: httpx.Client,
client_to_server_queue: ClientToServerQueue,
server_to_client_queue: ServerToClientQueue,
start_get_stream: Callable[[], None],
) -> None:
"""Handle writing requests to the server.

This method processes messages from the client_to_server_queue and sends them to the server.
Responses are written to the server_to_client_queue.
"""
while True:
try:
# Read message from client queue with timeout to check stop_event periodically
session_message = client_to_server_queue.get(timeout=DEFAULT_QUEUE_READ_TIMEOUT)
if session_message is None:
break

message = session_message.message
metadata = (
session_message.metadata if isinstance(session_message.metadata, ClientMessageMetadata) else None
)

# Check if this is a resumption request
is_resumption = bool(metadata and metadata.resumption_token)

logger.debug(f"Sending client message: {message}")

# Handle initialized notification
if self._is_initialized_notification(message):
start_get_stream()

ctx = RequestContext(
client=client,
headers=self.request_headers,
session_id=self.session_id,
session_message=session_message,
metadata=metadata,
server_to_client_queue=server_to_client_queue, # Queue to write responses to client
sse_read_timeout=self.sse_read_timeout,
)

if is_resumption:
self._handle_resumption_request(ctx)
else:
self._handle_post_request(ctx)
except queue.Empty:
continue
except Exception as exc:
server_to_client_queue.put(exc)

def terminate_session(self, client: httpx.Client) -> None:
"""Terminate the session by sending a DELETE request."""
if not self.session_id:
return

try:
headers = self._update_headers_with_session(self.request_headers)
response = client.delete(self.url, headers=headers)

if response.status_code == 405:
logger.debug("Server does not allow session termination")
elif response.status_code != 200:
logger.warning(f"Session termination failed: {response.status_code}")
except Exception as exc:
logger.warning(f"Session termination failed: {exc}")

def get_session_id(self) -> str | None:
"""Get the current session ID."""
return self.session_id


@contextmanager
def streamablehttp_client(
url: str,
headers: dict[str, Any] | None = None,
timeout: timedelta = timedelta(seconds=30),
sse_read_timeout: timedelta = timedelta(seconds=60 * 5),
terminate_on_close: bool = True,
) -> Generator[
tuple[
ServerToClientQueue, # Queue for receiving messages FROM server
ClientToServerQueue, # Queue for sending messages TO server
GetSessionIdCallback,
],
None,
None,
]:
"""
Client transport for StreamableHTTP.

`sse_read_timeout` determines how long (in seconds) the client will wait for a new
event before disconnecting. All other HTTP operations are controlled by `timeout`.

Yields:
Tuple containing:
- server_to_client_queue: Queue for reading messages FROM the server
- client_to_server_queue: Queue for sending messages TO the server
- get_session_id_callback: Function to retrieve the current session ID
"""
transport = StreamableHTTPTransport(url, headers, timeout, sse_read_timeout)

# Create queues with clear directional meaning
server_to_client_queue: ServerToClientQueue = queue.Queue() # For messages FROM server TO client
client_to_server_queue: ClientToServerQueue = queue.Queue() # For messages FROM client TO server

with ThreadPoolExecutor(max_workers=2) as executor:
try:
with create_ssrf_proxy_mcp_http_client(
headers=transport.request_headers,
timeout=httpx.Timeout(transport.timeout.seconds, read=transport.sse_read_timeout.seconds),
) as client:
# Define callbacks that need access to thread pool
def start_get_stream() -> None:
"""Start a worker thread to handle server-initiated messages."""
executor.submit(transport.handle_get_stream, client, server_to_client_queue)

# Start the post_writer worker thread
executor.submit(
transport.post_writer,
client,
client_to_server_queue, # Queue for messages FROM client TO server
server_to_client_queue, # Queue for messages FROM server TO client
start_get_stream,
)

try:
yield (
server_to_client_queue, # Queue for receiving messages FROM server
client_to_server_queue, # Queue for sending messages TO server
transport.get_session_id,
)
finally:
if transport.session_id and terminate_on_close:
transport.terminate_session(client)

# Signal threads to stop
client_to_server_queue.put(None)
finally:
# Clear any remaining items and add None sentinel to unblock any waiting threads
try:
while not client_to_server_queue.empty():
client_to_server_queue.get_nowait()
except queue.Empty:
pass

client_to_server_queue.put(None)
server_to_client_queue.put(None)

+ 19
- 0
api/core/mcp/entities.py ファイルの表示

@@ -0,0 +1,19 @@
from dataclasses import dataclass
from typing import Any, Generic, TypeVar

from core.mcp.session.base_session import BaseSession
from core.mcp.types import LATEST_PROTOCOL_VERSION, RequestId, RequestParams

SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", LATEST_PROTOCOL_VERSION]


SessionT = TypeVar("SessionT", bound=BaseSession[Any, Any, Any, Any, Any])
LifespanContextT = TypeVar("LifespanContextT")


@dataclass
class RequestContext(Generic[SessionT, LifespanContextT]):
request_id: RequestId
meta: RequestParams.Meta | None
session: SessionT
lifespan_context: LifespanContextT

+ 10
- 0
api/core/mcp/error.py ファイルの表示

@@ -0,0 +1,10 @@
class MCPError(Exception):
pass


class MCPConnectionError(MCPError):
pass


class MCPAuthError(MCPConnectionError):
pass

+ 150
- 0
api/core/mcp/mcp_client.py ファイルの表示

@@ -0,0 +1,150 @@
import logging
from collections.abc import Callable
from contextlib import AbstractContextManager, ExitStack
from types import TracebackType
from typing import Any, Optional, cast
from urllib.parse import urlparse

from core.mcp.client.sse_client import sse_client
from core.mcp.client.streamable_client import streamablehttp_client
from core.mcp.error import MCPAuthError, MCPConnectionError
from core.mcp.session.client_session import ClientSession
from core.mcp.types import Tool

logger = logging.getLogger(__name__)


class MCPClient:
def __init__(
self,
server_url: str,
provider_id: str,
tenant_id: str,
authed: bool = True,
authorization_code: Optional[str] = None,
for_list: bool = False,
):
# Initialize info
self.provider_id = provider_id
self.tenant_id = tenant_id
self.client_type = "streamable"
self.server_url = server_url

# Authentication info
self.authed = authed
self.authorization_code = authorization_code
if authed:
from core.mcp.auth.auth_provider import OAuthClientProvider

self.provider = OAuthClientProvider(self.provider_id, self.tenant_id, for_list=for_list)
self.token = self.provider.tokens()

# Initialize session and client objects
self._session: Optional[ClientSession] = None
self._streams_context: Optional[AbstractContextManager[Any]] = None
self._session_context: Optional[ClientSession] = None
self.exit_stack = ExitStack()

# Whether the client has been initialized
self._initialized = False

def __enter__(self):
self._initialize()
self._initialized = True
return self

def __exit__(
self, exc_type: Optional[type], exc_value: Optional[BaseException], traceback: Optional[TracebackType]
):
self.cleanup()

def _initialize(
self,
):
"""Initialize the client with fallback to SSE if streamable connection fails"""
connection_methods: dict[str, Callable[..., AbstractContextManager[Any]]] = {
"mcp": streamablehttp_client,
"sse": sse_client,
}

parsed_url = urlparse(self.server_url)
path = parsed_url.path
method_name = path.rstrip("/").split("/")[-1] if path else ""
try:
client_factory = connection_methods[method_name]
self.connect_server(client_factory, method_name)
except KeyError:
try:
self.connect_server(sse_client, "sse")
except MCPConnectionError:
self.connect_server(streamablehttp_client, "mcp")

def connect_server(
self, client_factory: Callable[..., AbstractContextManager[Any]], method_name: str, first_try: bool = True
):
from core.mcp.auth.auth_flow import auth

try:
headers = (
{"Authorization": f"{self.token.token_type.capitalize()} {self.token.access_token}"}
if self.authed and self.token
else {}
)
self._streams_context = client_factory(url=self.server_url, headers=headers)
if self._streams_context is None:
raise MCPConnectionError("Failed to create connection context")

# Use exit_stack to manage context managers properly
if method_name == "mcp":
read_stream, write_stream, _ = self.exit_stack.enter_context(self._streams_context)
streams = (read_stream, write_stream)
else: # sse_client
streams = self.exit_stack.enter_context(self._streams_context)

self._session_context = ClientSession(*streams)
self._session = self.exit_stack.enter_context(self._session_context)
session = cast(ClientSession, self._session)
session.initialize()
return

except MCPAuthError:
if not self.authed:
raise
try:
auth(self.provider, self.server_url, self.authorization_code)
except Exception as e:
raise ValueError(f"Failed to authenticate: {e}")
self.token = self.provider.tokens()
if first_try:
return self.connect_server(client_factory, method_name, first_try=False)

except MCPConnectionError:
raise

def list_tools(self) -> list[Tool]:
"""Connect to an MCP server running with SSE transport"""
# List available tools to verify connection
if not self._initialized or not self._session:
raise ValueError("Session not initialized.")
response = self._session.list_tools()
tools = response.tools
return tools

def invoke_tool(self, tool_name: str, tool_args: dict):
"""Call a tool"""
if not self._initialized or not self._session:
raise ValueError("Session not initialized.")
return self._session.call_tool(tool_name, tool_args)

def cleanup(self):
"""Clean up resources"""
try:
# ExitStack will handle proper cleanup of all managed context managers
self.exit_stack.close()
self._session = None
self._session_context = None
self._streams_context = None
self._initialized = False
except Exception as e:
logging.exception("Error during cleanup")
raise ValueError(f"Error during cleanup: {e}")

+ 228
- 0
api/core/mcp/server/streamable_http.py ファイルの表示

@@ -0,0 +1,228 @@
import json
import logging
from collections.abc import Mapping
from typing import Any, cast

from configs import dify_config
from controllers.web.passport import generate_session_id
from core.app.app_config.entities import VariableEntity, VariableEntityType
from core.app.entities.app_invoke_entities import InvokeFrom
from core.app.features.rate_limiting.rate_limit import RateLimitGenerator
from core.mcp import types
from core.mcp.types import INTERNAL_ERROR, INVALID_PARAMS, METHOD_NOT_FOUND
from core.mcp.utils import create_mcp_error_response
from core.model_runtime.utils.encoders import jsonable_encoder
from extensions.ext_database import db
from models.model import App, AppMCPServer, AppMode, EndUser
from services.app_generate_service import AppGenerateService

"""
Apply to MCP HTTP streamable server with stateless http
"""
logger = logging.getLogger(__name__)


class MCPServerStreamableHTTPRequestHandler:
def __init__(
self, app: App, request: types.ClientRequest | types.ClientNotification, user_input_form: list[VariableEntity]
):
self.app = app
self.request = request
mcp_server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == self.app.id).first()
if not mcp_server:
raise ValueError("MCP server not found")
self.mcp_server: AppMCPServer = mcp_server
self.end_user = self.retrieve_end_user()
self.user_input_form = user_input_form

@property
def request_type(self):
return type(self.request.root)

@property
def parameter_schema(self):
parameters, required = self._convert_input_form_to_parameters(self.user_input_form)
if self.app.mode in {AppMode.COMPLETION.value, AppMode.WORKFLOW.value}:
return {
"type": "object",
"properties": parameters,
"required": required,
}
return {
"type": "object",
"properties": {
"query": {"type": "string", "description": "User Input/Question content"},
**parameters,
},
"required": ["query", *required],
}

@property
def capabilities(self):
return types.ServerCapabilities(
tools=types.ToolsCapability(listChanged=False),
)

def response(self, response: types.Result | str):
if isinstance(response, str):
sse_content = f"event: ping\ndata: {response}\n\n".encode()
yield sse_content
return
json_response = types.JSONRPCResponse(
jsonrpc="2.0",
id=(self.request.root.model_extra or {}).get("id", 1),
result=response.model_dump(by_alias=True, mode="json", exclude_none=True),
)
json_data = json.dumps(jsonable_encoder(json_response))

sse_content = f"event: message\ndata: {json_data}\n\n".encode()

yield sse_content

def error_response(self, code: int, message: str, data=None):
request_id = (self.request.root.model_extra or {}).get("id", 1) or 1
return create_mcp_error_response(request_id, code, message, data)

def handle(self):
handle_map = {
types.InitializeRequest: self.initialize,
types.ListToolsRequest: self.list_tools,
types.CallToolRequest: self.invoke_tool,
types.InitializedNotification: self.handle_notification,
types.PingRequest: self.handle_ping,
}
try:
if self.request_type in handle_map:
return self.response(handle_map[self.request_type]())
else:
return self.error_response(METHOD_NOT_FOUND, f"Method not found: {self.request_type}")
except ValueError as e:
logger.exception("Invalid params")
return self.error_response(INVALID_PARAMS, str(e))
except Exception as e:
logger.exception("Internal server error")
return self.error_response(INTERNAL_ERROR, f"Internal server error: {str(e)}")

def handle_notification(self):
return "ping"

def handle_ping(self):
return types.EmptyResult()

def initialize(self):
request = cast(types.InitializeRequest, self.request.root)
client_info = request.params.clientInfo
client_name = f"{client_info.name}@{client_info.version}"
if not self.end_user:
end_user = EndUser(
tenant_id=self.app.tenant_id,
app_id=self.app.id,
type="mcp",
name=client_name,
session_id=generate_session_id(),
external_user_id=self.mcp_server.id,
)
db.session.add(end_user)
db.session.commit()
return types.InitializeResult(
protocolVersion=types.SERVER_LATEST_PROTOCOL_VERSION,
capabilities=self.capabilities,
serverInfo=types.Implementation(name="Dify", version=dify_config.project.version),
instructions=self.mcp_server.description,
)

def list_tools(self):
if not self.end_user:
raise ValueError("User not found")
return types.ListToolsResult(
tools=[
types.Tool(
name=self.app.name,
description=self.mcp_server.description,
inputSchema=self.parameter_schema,
)
],
)

def invoke_tool(self):
if not self.end_user:
raise ValueError("User not found")
request = cast(types.CallToolRequest, self.request.root)
args = request.params.arguments
if not args:
raise ValueError("No arguments provided")
if self.app.mode in {AppMode.WORKFLOW.value}:
args = {"inputs": args}
elif self.app.mode in {AppMode.COMPLETION.value}:
args = {"query": "", "inputs": args}
else:
args = {"query": args["query"], "inputs": {k: v for k, v in args.items() if k != "query"}}
response = AppGenerateService.generate(
self.app,
self.end_user,
args,
InvokeFrom.SERVICE_API,
streaming=self.app.mode == AppMode.AGENT_CHAT.value,
)
answer = ""
if isinstance(response, RateLimitGenerator):
for item in response.generator:
data = item
if isinstance(data, str) and data.startswith("data: "):
try:
json_str = data[6:].strip()
parsed_data = json.loads(json_str)
if parsed_data.get("event") == "agent_thought":
answer += parsed_data.get("thought", "")
except json.JSONDecodeError:
continue
if isinstance(response, Mapping):
if self.app.mode in {
AppMode.ADVANCED_CHAT.value,
AppMode.COMPLETION.value,
AppMode.CHAT.value,
AppMode.AGENT_CHAT.value,
}:
answer = response["answer"]
elif self.app.mode in {AppMode.WORKFLOW.value}:
answer = json.dumps(response["data"]["outputs"], ensure_ascii=False)
else:
raise ValueError("Invalid app mode")
# Not support image yet
return types.CallToolResult(content=[types.TextContent(text=answer, type="text")])

def retrieve_end_user(self):
return (
db.session.query(EndUser)
.filter(EndUser.external_user_id == self.mcp_server.id, EndUser.type == "mcp")
.first()
)

def _convert_input_form_to_parameters(self, user_input_form: list[VariableEntity]):
parameters: dict[str, dict[str, Any]] = {}
required = []
for item in user_input_form:
parameters[item.variable] = {}
if item.type in (
VariableEntityType.FILE,
VariableEntityType.FILE_LIST,
VariableEntityType.EXTERNAL_DATA_TOOL,
):
continue
if item.required:
required.append(item.variable)
# if the workflow republished, the parameters not changed
# we should not raise error here
try:
description = self.mcp_server.parameters_dict[item.variable]
except KeyError:
description = ""
parameters[item.variable]["description"] = description
if item.type in (VariableEntityType.TEXT_INPUT, VariableEntityType.PARAGRAPH):
parameters[item.variable]["type"] = "string"
elif item.type == VariableEntityType.SELECT:
parameters[item.variable]["type"] = "string"
parameters[item.variable]["enum"] = item.options
elif item.type == VariableEntityType.NUMBER:
parameters[item.variable]["type"] = "float"
return parameters, required

+ 397
- 0
api/core/mcp/session/base_session.py ファイルの表示

@@ -0,0 +1,397 @@
import logging
import queue
from collections.abc import Callable
from concurrent.futures import ThreadPoolExecutor
from contextlib import ExitStack
from datetime import timedelta
from types import TracebackType
from typing import Any, Generic, Self, TypeVar

from httpx import HTTPStatusError
from pydantic import BaseModel

from core.mcp.error import MCPAuthError, MCPConnectionError
from core.mcp.types import (
CancelledNotification,
ClientNotification,
ClientRequest,
ClientResult,
ErrorData,
JSONRPCError,
JSONRPCMessage,
JSONRPCNotification,
JSONRPCRequest,
JSONRPCResponse,
MessageMetadata,
RequestId,
RequestParams,
ServerMessageMetadata,
ServerNotification,
ServerRequest,
ServerResult,
SessionMessage,
)

SendRequestT = TypeVar("SendRequestT", ClientRequest, ServerRequest)
SendResultT = TypeVar("SendResultT", ClientResult, ServerResult)
SendNotificationT = TypeVar("SendNotificationT", ClientNotification, ServerNotification)
ReceiveRequestT = TypeVar("ReceiveRequestT", ClientRequest, ServerRequest)
ReceiveResultT = TypeVar("ReceiveResultT", bound=BaseModel)
ReceiveNotificationT = TypeVar("ReceiveNotificationT", ClientNotification, ServerNotification)
DEFAULT_RESPONSE_READ_TIMEOUT = 1.0


class RequestResponder(Generic[ReceiveRequestT, SendResultT]):
"""Handles responding to MCP requests and manages request lifecycle.

This class MUST be used as a context manager to ensure proper cleanup and
cancellation handling:

Example:
with request_responder as resp:
resp.respond(result)

The context manager ensures:
1. Proper cancellation scope setup and cleanup
2. Request completion tracking
3. Cleanup of in-flight requests
"""

request: ReceiveRequestT
_session: Any
_on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any]

def __init__(
self,
request_id: RequestId,
request_meta: RequestParams.Meta | None,
request: ReceiveRequestT,
session: """BaseSession[
SendRequestT,
SendNotificationT,
SendResultT,
ReceiveRequestT,
ReceiveNotificationT
]""",
on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any],
) -> None:
self.request_id = request_id
self.request_meta = request_meta
self.request = request
self._session = session
self._completed = False
self._on_complete = on_complete
self._entered = False # Track if we're in a context manager

def __enter__(self) -> "RequestResponder[ReceiveRequestT, SendResultT]":
"""Enter the context manager, enabling request cancellation tracking."""
self._entered = True
return self

def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
"""Exit the context manager, performing cleanup and notifying completion."""
try:
if self._completed:
self._on_complete(self)
finally:
self._entered = False

def respond(self, response: SendResultT | ErrorData) -> None:
"""Send a response for this request.

Must be called within a context manager block.
Raises:
RuntimeError: If not used within a context manager
AssertionError: If request was already responded to
"""
if not self._entered:
raise RuntimeError("RequestResponder must be used as a context manager")
assert not self._completed, "Request already responded to"

self._completed = True

self._session._send_response(request_id=self.request_id, response=response)

def cancel(self) -> None:
"""Cancel this request and mark it as completed."""
if not self._entered:
raise RuntimeError("RequestResponder must be used as a context manager")

self._completed = True # Mark as completed so it's removed from in_flight
# Send an error response to indicate cancellation
self._session._send_response(
request_id=self.request_id,
response=ErrorData(code=0, message="Request cancelled", data=None),
)


class BaseSession(
Generic[
SendRequestT,
SendNotificationT,
SendResultT,
ReceiveRequestT,
ReceiveNotificationT,
],
):
"""
Implements an MCP "session" on top of read/write streams, including features
like request/response linking, notifications, and progress.

This class is a context manager that automatically starts processing
messages when entered.
"""

_response_streams: dict[RequestId, queue.Queue[JSONRPCResponse | JSONRPCError]]
_request_id: int
_in_flight: dict[RequestId, RequestResponder[ReceiveRequestT, SendResultT]]
_receive_request_type: type[ReceiveRequestT]
_receive_notification_type: type[ReceiveNotificationT]

def __init__(
self,
read_stream: queue.Queue,
write_stream: queue.Queue,
receive_request_type: type[ReceiveRequestT],
receive_notification_type: type[ReceiveNotificationT],
# If none, reading will never time out
read_timeout_seconds: timedelta | None = None,
) -> None:
self._read_stream = read_stream
self._write_stream = write_stream
self._response_streams = {}
self._request_id = 0
self._receive_request_type = receive_request_type
self._receive_notification_type = receive_notification_type
self._session_read_timeout_seconds = read_timeout_seconds
self._in_flight = {}
self._exit_stack = ExitStack()

def __enter__(self) -> Self:
self._executor = ThreadPoolExecutor()
self._receiver_future = self._executor.submit(self._receive_loop)
return self

def check_receiver_status(self) -> None:
if self._receiver_future.done():
self._receiver_future.result()

def __exit__(
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> None:
self._exit_stack.close()
self._read_stream.put(None)
self._write_stream.put(None)

def send_request(
self,
request: SendRequestT,
result_type: type[ReceiveResultT],
request_read_timeout_seconds: timedelta | None = None,
metadata: MessageMetadata = None,
) -> ReceiveResultT:
"""
Sends a request and wait for a response. Raises an McpError if the
response contains an error. If a request read timeout is provided, it
will take precedence over the session read timeout.

Do not use this method to emit notifications! Use send_notification()
instead.
"""
self.check_receiver_status()

request_id = self._request_id
self._request_id = request_id + 1

response_queue: queue.Queue[JSONRPCResponse | JSONRPCError] = queue.Queue()
self._response_streams[request_id] = response_queue

try:
jsonrpc_request = JSONRPCRequest(
jsonrpc="2.0",
id=request_id,
**request.model_dump(by_alias=True, mode="json", exclude_none=True),
)

self._write_stream.put(SessionMessage(message=JSONRPCMessage(jsonrpc_request), metadata=metadata))
timeout = DEFAULT_RESPONSE_READ_TIMEOUT
if request_read_timeout_seconds is not None:
timeout = float(request_read_timeout_seconds.total_seconds())
elif self._session_read_timeout_seconds is not None:
timeout = float(self._session_read_timeout_seconds.total_seconds())
while True:
try:
response_or_error = response_queue.get(timeout=timeout)
break
except queue.Empty:
self.check_receiver_status()
continue

if response_or_error is None:
raise MCPConnectionError(
ErrorData(
code=500,
message="No response received",
)
)
elif isinstance(response_or_error, JSONRPCError):
if response_or_error.error.code == 401:
raise MCPAuthError(
ErrorData(code=response_or_error.error.code, message=response_or_error.error.message)
)
else:
raise MCPConnectionError(
ErrorData(code=response_or_error.error.code, message=response_or_error.error.message)
)
else:
return result_type.model_validate(response_or_error.result)

finally:
self._response_streams.pop(request_id, None)

def send_notification(
self,
notification: SendNotificationT,
related_request_id: RequestId | None = None,
) -> None:
"""
Emits a notification, which is a one-way message that does not expect
a response.
"""
self.check_receiver_status()

# Some transport implementations may need to set the related_request_id
# to attribute to the notifications to the request that triggered them.
jsonrpc_notification = JSONRPCNotification(
jsonrpc="2.0",
**notification.model_dump(by_alias=True, mode="json", exclude_none=True),
)
session_message = SessionMessage(
message=JSONRPCMessage(jsonrpc_notification),
metadata=ServerMessageMetadata(related_request_id=related_request_id) if related_request_id else None,
)
self._write_stream.put(session_message)

def _send_response(self, request_id: RequestId, response: SendResultT | ErrorData) -> None:
if isinstance(response, ErrorData):
jsonrpc_error = JSONRPCError(jsonrpc="2.0", id=request_id, error=response)
session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_error))
self._write_stream.put(session_message)
else:
jsonrpc_response = JSONRPCResponse(
jsonrpc="2.0",
id=request_id,
result=response.model_dump(by_alias=True, mode="json", exclude_none=True),
)
session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_response))
self._write_stream.put(session_message)

def _receive_loop(self) -> None:
"""
Main message processing loop.
In a real synchronous implementation, this would likely run in a separate thread.
"""
while True:
try:
# Attempt to receive a message (this would be blocking in a synchronous context)
message = self._read_stream.get(timeout=DEFAULT_RESPONSE_READ_TIMEOUT)
if message is None:
break
if isinstance(message, HTTPStatusError):
response_queue = self._response_streams.get(self._request_id - 1)
if response_queue is not None:
response_queue.put(
JSONRPCError(
jsonrpc="2.0",
id=self._request_id - 1,
error=ErrorData(code=message.response.status_code, message=message.args[0]),
)
)
else:
self._handle_incoming(RuntimeError(f"Received response with an unknown request ID: {message}"))
elif isinstance(message, Exception):
self._handle_incoming(message)
elif isinstance(message.message.root, JSONRPCRequest):
validated_request = self._receive_request_type.model_validate(
message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)

responder = RequestResponder(
request_id=message.message.root.id,
request_meta=validated_request.root.params.meta if validated_request.root.params else None,
request=validated_request,
session=self,
on_complete=lambda r: self._in_flight.pop(r.request_id, None),
)

self._in_flight[responder.request_id] = responder
self._received_request(responder)

if not responder._completed:
self._handle_incoming(responder)

elif isinstance(message.message.root, JSONRPCNotification):
try:
notification = self._receive_notification_type.model_validate(
message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
# Handle cancellation notifications
if isinstance(notification.root, CancelledNotification):
cancelled_id = notification.root.params.requestId
if cancelled_id in self._in_flight:
self._in_flight[cancelled_id].cancel()
else:
self._received_notification(notification)
self._handle_incoming(notification)
except Exception as e:
# For other validation errors, log and continue
logging.warning(f"Failed to validate notification: {e}. Message was: {message.message.root}")
else: # Response or error
response_queue = self._response_streams.get(message.message.root.id)
if response_queue is not None:
response_queue.put(message.message.root)
else:
self._handle_incoming(RuntimeError(f"Server Error: {message}"))
except queue.Empty:
continue
except Exception as e:
logging.exception("Error in message processing loop")
raise

def _received_request(self, responder: RequestResponder[ReceiveRequestT, SendResultT]) -> None:
"""
Can be overridden by subclasses to handle a request without needing to
listen on the message stream.

If the request is responded to within this method, it will not be
forwarded on to the message stream.
"""
pass

def _received_notification(self, notification: ReceiveNotificationT) -> None:
"""
Can be overridden by subclasses to handle a notification without needing
to listen on the message stream.
"""
pass

def send_progress_notification(
self, progress_token: str | int, progress: float, total: float | None = None
) -> None:
"""
Sends a progress notification for a request that is currently being
processed.
"""
pass

def _handle_incoming(
self,
req: RequestResponder[ReceiveRequestT, SendResultT] | ReceiveNotificationT | Exception,
) -> None:
"""A generic handler for incoming messages. Overwritten by subclasses."""
pass

+ 365
- 0
api/core/mcp/session/client_session.py ファイルの表示

@@ -0,0 +1,365 @@
from datetime import timedelta
from typing import Any, Protocol

from pydantic import AnyUrl, TypeAdapter

from configs import dify_config
from core.mcp import types
from core.mcp.entities import SUPPORTED_PROTOCOL_VERSIONS, RequestContext
from core.mcp.session.base_session import BaseSession, RequestResponder

DEFAULT_CLIENT_INFO = types.Implementation(name="Dify", version=dify_config.project.version)


class SamplingFnT(Protocol):
def __call__(
self,
context: RequestContext["ClientSession", Any],
params: types.CreateMessageRequestParams,
) -> types.CreateMessageResult | types.ErrorData: ...


class ListRootsFnT(Protocol):
def __call__(self, context: RequestContext["ClientSession", Any]) -> types.ListRootsResult | types.ErrorData: ...


class LoggingFnT(Protocol):
def __call__(
self,
params: types.LoggingMessageNotificationParams,
) -> None: ...


class MessageHandlerFnT(Protocol):
def __call__(
self,
message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception,
) -> None: ...


def _default_message_handler(
message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception,
) -> None:
if isinstance(message, Exception):
raise ValueError(str(message))
elif isinstance(message, (types.ServerNotification | RequestResponder)):
pass


def _default_sampling_callback(
context: RequestContext["ClientSession", Any],
params: types.CreateMessageRequestParams,
) -> types.CreateMessageResult | types.ErrorData:
return types.ErrorData(
code=types.INVALID_REQUEST,
message="Sampling not supported",
)


def _default_list_roots_callback(
context: RequestContext["ClientSession", Any],
) -> types.ListRootsResult | types.ErrorData:
return types.ErrorData(
code=types.INVALID_REQUEST,
message="List roots not supported",
)


def _default_logging_callback(
params: types.LoggingMessageNotificationParams,
) -> None:
pass


ClientResponse: TypeAdapter[types.ClientResult | types.ErrorData] = TypeAdapter(types.ClientResult | types.ErrorData)


class ClientSession(
BaseSession[
types.ClientRequest,
types.ClientNotification,
types.ClientResult,
types.ServerRequest,
types.ServerNotification,
]
):
def __init__(
self,
read_stream,
write_stream,
read_timeout_seconds: timedelta | None = None,
sampling_callback: SamplingFnT | None = None,
list_roots_callback: ListRootsFnT | None = None,
logging_callback: LoggingFnT | None = None,
message_handler: MessageHandlerFnT | None = None,
client_info: types.Implementation | None = None,
) -> None:
super().__init__(
read_stream,
write_stream,
types.ServerRequest,
types.ServerNotification,
read_timeout_seconds=read_timeout_seconds,
)
self._client_info = client_info or DEFAULT_CLIENT_INFO
self._sampling_callback = sampling_callback or _default_sampling_callback
self._list_roots_callback = list_roots_callback or _default_list_roots_callback
self._logging_callback = logging_callback or _default_logging_callback
self._message_handler = message_handler or _default_message_handler

def initialize(self) -> types.InitializeResult:
sampling = types.SamplingCapability()
roots = types.RootsCapability(
# TODO: Should this be based on whether we
# _will_ send notifications, or only whether
# they're supported?
listChanged=True,
)

result = self.send_request(
types.ClientRequest(
types.InitializeRequest(
method="initialize",
params=types.InitializeRequestParams(
protocolVersion=types.LATEST_PROTOCOL_VERSION,
capabilities=types.ClientCapabilities(
sampling=sampling,
experimental=None,
roots=roots,
),
clientInfo=self._client_info,
),
)
),
types.InitializeResult,
)

if result.protocolVersion not in SUPPORTED_PROTOCOL_VERSIONS:
raise RuntimeError(f"Unsupported protocol version from the server: {result.protocolVersion}")

self.send_notification(
types.ClientNotification(types.InitializedNotification(method="notifications/initialized"))
)

return result

def send_ping(self) -> types.EmptyResult:
"""Send a ping request."""
return self.send_request(
types.ClientRequest(
types.PingRequest(
method="ping",
)
),
types.EmptyResult,
)

def send_progress_notification(
self, progress_token: str | int, progress: float, total: float | None = None
) -> None:
"""Send a progress notification."""
self.send_notification(
types.ClientNotification(
types.ProgressNotification(
method="notifications/progress",
params=types.ProgressNotificationParams(
progressToken=progress_token,
progress=progress,
total=total,
),
),
)
)

def set_logging_level(self, level: types.LoggingLevel) -> types.EmptyResult:
"""Send a logging/setLevel request."""
return self.send_request(
types.ClientRequest(
types.SetLevelRequest(
method="logging/setLevel",
params=types.SetLevelRequestParams(level=level),
)
),
types.EmptyResult,
)

def list_resources(self) -> types.ListResourcesResult:
"""Send a resources/list request."""
return self.send_request(
types.ClientRequest(
types.ListResourcesRequest(
method="resources/list",
)
),
types.ListResourcesResult,
)

def list_resource_templates(self) -> types.ListResourceTemplatesResult:
"""Send a resources/templates/list request."""
return self.send_request(
types.ClientRequest(
types.ListResourceTemplatesRequest(
method="resources/templates/list",
)
),
types.ListResourceTemplatesResult,
)

def read_resource(self, uri: AnyUrl) -> types.ReadResourceResult:
"""Send a resources/read request."""
return self.send_request(
types.ClientRequest(
types.ReadResourceRequest(
method="resources/read",
params=types.ReadResourceRequestParams(uri=uri),
)
),
types.ReadResourceResult,
)

def subscribe_resource(self, uri: AnyUrl) -> types.EmptyResult:
"""Send a resources/subscribe request."""
return self.send_request(
types.ClientRequest(
types.SubscribeRequest(
method="resources/subscribe",
params=types.SubscribeRequestParams(uri=uri),
)
),
types.EmptyResult,
)

def unsubscribe_resource(self, uri: AnyUrl) -> types.EmptyResult:
"""Send a resources/unsubscribe request."""
return self.send_request(
types.ClientRequest(
types.UnsubscribeRequest(
method="resources/unsubscribe",
params=types.UnsubscribeRequestParams(uri=uri),
)
),
types.EmptyResult,
)

def call_tool(
self,
name: str,
arguments: dict[str, Any] | None = None,
read_timeout_seconds: timedelta | None = None,
) -> types.CallToolResult:
"""Send a tools/call request."""

return self.send_request(
types.ClientRequest(
types.CallToolRequest(
method="tools/call",
params=types.CallToolRequestParams(name=name, arguments=arguments),
)
),
types.CallToolResult,
request_read_timeout_seconds=read_timeout_seconds,
)

def list_prompts(self) -> types.ListPromptsResult:
"""Send a prompts/list request."""
return self.send_request(
types.ClientRequest(
types.ListPromptsRequest(
method="prompts/list",
)
),
types.ListPromptsResult,
)

def get_prompt(self, name: str, arguments: dict[str, str] | None = None) -> types.GetPromptResult:
"""Send a prompts/get request."""
return self.send_request(
types.ClientRequest(
types.GetPromptRequest(
method="prompts/get",
params=types.GetPromptRequestParams(name=name, arguments=arguments),
)
),
types.GetPromptResult,
)

def complete(
self,
ref: types.ResourceReference | types.PromptReference,
argument: dict[str, str],
) -> types.CompleteResult:
"""Send a completion/complete request."""
return self.send_request(
types.ClientRequest(
types.CompleteRequest(
method="completion/complete",
params=types.CompleteRequestParams(
ref=ref,
argument=types.CompletionArgument(**argument),
),
)
),
types.CompleteResult,
)

def list_tools(self) -> types.ListToolsResult:
"""Send a tools/list request."""
return self.send_request(
types.ClientRequest(
types.ListToolsRequest(
method="tools/list",
)
),
types.ListToolsResult,
)

def send_roots_list_changed(self) -> None:
"""Send a roots/list_changed notification."""
self.send_notification(
types.ClientNotification(
types.RootsListChangedNotification(
method="notifications/roots/list_changed",
)
)
)

def _received_request(self, responder: RequestResponder[types.ServerRequest, types.ClientResult]) -> None:
ctx = RequestContext[ClientSession, Any](
request_id=responder.request_id,
meta=responder.request_meta,
session=self,
lifespan_context=None,
)

match responder.request.root:
case types.CreateMessageRequest(params=params):
with responder:
response = self._sampling_callback(ctx, params)
client_response = ClientResponse.validate_python(response)
responder.respond(client_response)

case types.ListRootsRequest():
with responder:
list_roots_response = self._list_roots_callback(ctx)
client_response = ClientResponse.validate_python(list_roots_response)
responder.respond(client_response)

case types.PingRequest():
with responder:
return responder.respond(types.ClientResult(root=types.EmptyResult()))

def _handle_incoming(
self,
req: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception,
) -> None:
"""Handle incoming messages by forwarding to the message handler."""
self._message_handler(req)

def _received_notification(self, notification: types.ServerNotification) -> None:
"""Handle notifications from the server."""
# Process specific notification types
match notification.root:
case types.LoggingMessageNotification(params=params):
self._logging_callback(params)
case _:
pass

+ 1217
- 0
api/core/mcp/types.py
ファイル差分が大きすぎるため省略します
ファイルの表示


+ 114
- 0
api/core/mcp/utils.py ファイルの表示

@@ -0,0 +1,114 @@
import json

import httpx

from configs import dify_config
from core.mcp.types import ErrorData, JSONRPCError
from core.model_runtime.utils.encoders import jsonable_encoder

HTTP_REQUEST_NODE_SSL_VERIFY = dify_config.HTTP_REQUEST_NODE_SSL_VERIFY

STATUS_FORCELIST = [429, 500, 502, 503, 504]


def create_ssrf_proxy_mcp_http_client(
headers: dict[str, str] | None = None,
timeout: httpx.Timeout | None = None,
) -> httpx.Client:
"""Create an HTTPX client with SSRF proxy configuration for MCP connections.

Args:
headers: Optional headers to include in the client
timeout: Optional timeout configuration

Returns:
Configured httpx.Client with proxy settings
"""
if dify_config.SSRF_PROXY_ALL_URL:
return httpx.Client(
verify=HTTP_REQUEST_NODE_SSL_VERIFY,
headers=headers or {},
timeout=timeout,
follow_redirects=True,
proxy=dify_config.SSRF_PROXY_ALL_URL,
)
elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL:
proxy_mounts = {
"http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY),
"https://": httpx.HTTPTransport(
proxy=dify_config.SSRF_PROXY_HTTPS_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY
),
}
return httpx.Client(
verify=HTTP_REQUEST_NODE_SSL_VERIFY,
headers=headers or {},
timeout=timeout,
follow_redirects=True,
mounts=proxy_mounts,
)
else:
return httpx.Client(
verify=HTTP_REQUEST_NODE_SSL_VERIFY,
headers=headers or {},
timeout=timeout,
follow_redirects=True,
)


def ssrf_proxy_sse_connect(url, **kwargs):
"""Connect to SSE endpoint with SSRF proxy protection.

This function creates an SSE connection using the configured proxy settings
to prevent SSRF attacks when connecting to external endpoints.

Args:
url: The SSE endpoint URL
**kwargs: Additional arguments passed to the SSE connection

Returns:
EventSource object for SSE streaming
"""
from httpx_sse import connect_sse

# Extract client if provided, otherwise create one
client = kwargs.pop("client", None)
if client is None:
# Create client with SSRF proxy configuration
timeout = kwargs.pop(
"timeout",
httpx.Timeout(
timeout=dify_config.SSRF_DEFAULT_TIME_OUT,
connect=dify_config.SSRF_DEFAULT_CONNECT_TIME_OUT,
read=dify_config.SSRF_DEFAULT_READ_TIME_OUT,
write=dify_config.SSRF_DEFAULT_WRITE_TIME_OUT,
),
)
headers = kwargs.pop("headers", {})
client = create_ssrf_proxy_mcp_http_client(headers=headers, timeout=timeout)
client_provided = False
else:
client_provided = True

# Extract method if provided, default to GET
method = kwargs.pop("method", "GET")

try:
return connect_sse(client, method, url, **kwargs)
except Exception:
# If we created the client, we need to clean it up on error
if not client_provided:
client.close()
raise


def create_mcp_error_response(request_id: int | str | None, code: int, message: str, data=None):
"""Create MCP error response"""
error_data = ErrorData(code=code, message=message, data=data)
json_response = JSONRPCError(
jsonrpc="2.0",
id=request_id or 1,
error=error_data,
)
json_data = json.dumps(jsonable_encoder(json_response))
sse_content = f"event: message\ndata: {json_data}\n\n".encode()
yield sse_content

+ 2
- 0
api/core/model_runtime/entities/provider_entities.py ファイルの表示

@@ -123,6 +123,8 @@ class ProviderEntity(BaseModel):
description: Optional[I18nObject] = None
icon_small: Optional[I18nObject] = None
icon_large: Optional[I18nObject] = None
icon_small_dark: Optional[I18nObject] = None
icon_large_dark: Optional[I18nObject] = None
background: Optional[str] = None
help: Optional[ProviderHelpEntity] = None
supported_model_types: Sequence[ModelType]

+ 41
- 0
api/core/plugin/entities/parameters.py ファイルの表示

@@ -43,6 +43,19 @@ class PluginParameterType(enum.StrEnum):
# deprecated, should not use.
SYSTEM_FILES = CommonParameterType.SYSTEM_FILES.value

# MCP object and array type parameters
ARRAY = CommonParameterType.ARRAY.value
OBJECT = CommonParameterType.OBJECT.value


class MCPServerParameterType(enum.StrEnum):
"""
MCP server got complex parameter types
"""

ARRAY = "array"
OBJECT = "object"


class PluginParameterAutoGenerate(BaseModel):
class Type(enum.StrEnum):
@@ -138,6 +151,34 @@ def cast_parameter_value(typ: enum.StrEnum, value: Any, /):
if value and not isinstance(value, list):
raise ValueError("The tools selector must be a list.")
return value
case PluginParameterType.ARRAY:
if not isinstance(value, list):
# Try to parse JSON string for arrays
if isinstance(value, str):
try:
import json

parsed_value = json.loads(value)
if isinstance(parsed_value, list):
return parsed_value
except (json.JSONDecodeError, ValueError):
pass
return [value]
return value
case PluginParameterType.OBJECT:
if not isinstance(value, dict):
# Try to parse JSON string for objects
if isinstance(value, str):
try:
import json

parsed_value = json.loads(value)
if isinstance(parsed_value, dict):
return parsed_value
except (json.JSONDecodeError, ValueError):
pass
return {}
return value
case _:
return str(value)
except ValueError:

+ 2
- 0
api/core/plugin/entities/plugin.py ファイルの表示

@@ -75,12 +75,14 @@ class PluginDeclaration(BaseModel):

class Meta(BaseModel):
minimum_dify_version: Optional[str] = Field(default=None, pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$")
version: Optional[str] = Field(default=None)

version: str = Field(..., pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$")
author: Optional[str] = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$")
name: str = Field(..., pattern=r"^[a-z0-9_-]{1,128}$")
description: I18nObject
icon: str
icon_dark: Optional[str] = Field(default=None)
label: I18nObject
category: PluginCategory
created_at: datetime.datetime

+ 1
- 0
api/core/plugin/entities/plugin_daemon.py ファイルの表示

@@ -62,6 +62,7 @@ class PluginAgentProviderEntity(BaseModel):
plugin_unique_identifier: str
plugin_id: str
declaration: AgentProviderEntityWithPlugin
meta: PluginDeclaration.Meta


class PluginBasicBooleanResponse(BaseModel):

+ 1
- 1
api/core/plugin/entities/request.py ファイルの表示

@@ -32,7 +32,7 @@ class RequestInvokeTool(BaseModel):
Request to invoke a tool
"""

tool_type: Literal["builtin", "workflow", "api"]
tool_type: Literal["builtin", "workflow", "api", "mcp"]
provider: str
tool: str
tool_parameters: dict

+ 2
- 0
api/core/rag/datasource/vdb/qdrant/qdrant_vector.py ファイルの表示

@@ -47,6 +47,7 @@ class QdrantConfig(BaseModel):
grpc_port: int = 6334
prefer_grpc: bool = False
replication_factor: int = 1
write_consistency_factor: int = 1

def to_qdrant_params(self):
if self.endpoint and self.endpoint.startswith("path:"):
@@ -127,6 +128,7 @@ class QdrantVector(BaseVector):
hnsw_config=hnsw_config,
timeout=int(self._client_config.timeout),
replication_factor=self._client_config.replication_factor,
write_consistency_factor=self._client_config.write_consistency_factor,
)

# create group_id payload index

+ 18
- 2
api/core/rag/datasource/vdb/vector_factory.py ファイルの表示

@@ -1,3 +1,5 @@
import logging
import time
from abc import ABC, abstractmethod
from typing import Any, Optional

@@ -13,6 +15,8 @@ from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.dataset import Dataset, Whitelist

logger = logging.getLogger(__name__)


class AbstractVectorFactory(ABC):
@abstractmethod
@@ -173,8 +177,20 @@ class Vector:

def create(self, texts: Optional[list] = None, **kwargs):
if texts:
embeddings = self._embeddings.embed_documents([document.page_content for document in texts])
self._vector_processor.create(texts=texts, embeddings=embeddings, **kwargs)
start = time.time()
logger.info(f"start embedding {len(texts)} texts {start}")
batch_size = 1000
total_batches = len(texts) + batch_size - 1
for i in range(0, len(texts), batch_size):
batch = texts[i : i + batch_size]
batch_start = time.time()
logger.info(f"Processing batch {i // batch_size + 1}/{total_batches} ({len(batch)} texts)")
batch_embeddings = self._embeddings.embed_documents([document.page_content for document in batch])
logger.info(
f"Embedding batch {i // batch_size + 1}/{total_batches} took {time.time() - batch_start:.3f}s"
)
self._vector_processor.create(texts=batch, embeddings=batch_embeddings, **kwargs)
logger.info(f"Embedding {len(texts)} texts took {time.time() - start:.3f}s")

def add_texts(self, documents: list[Document], **kwargs):
if kwargs.get("duplicate_check", False):

+ 25
- 3
api/core/tools/custom_tool/provider.py ファイルの表示

@@ -39,19 +39,22 @@ class ApiToolProviderController(ToolProviderController):
type=ProviderConfig.Type.SELECT,
options=[
ProviderConfig.Option(value="none", label=I18nObject(en_US="None", zh_Hans="无")),
ProviderConfig.Option(value="api_key", label=I18nObject(en_US="api_key", zh_Hans="api_key")),
ProviderConfig.Option(value="api_key_header", label=I18nObject(en_US="Header", zh_Hans="请求头")),
ProviderConfig.Option(
value="api_key_query", label=I18nObject(en_US="Query Param", zh_Hans="查询参数")
),
],
default="none",
help=I18nObject(en_US="The auth type of the api provider", zh_Hans="api provider 的认证类型"),
)
]
if auth_type == ApiProviderAuthType.API_KEY:
if auth_type == ApiProviderAuthType.API_KEY_HEADER:
credentials_schema = [
*credentials_schema,
ProviderConfig(
name="api_key_header",
required=False,
default="api_key",
default="Authorization",
type=ProviderConfig.Type.TEXT_INPUT,
help=I18nObject(en_US="The header name of the api key", zh_Hans="携带 api key 的 header 名称"),
),
@@ -74,6 +77,25 @@ class ApiToolProviderController(ToolProviderController):
],
),
]
elif auth_type == ApiProviderAuthType.API_KEY_QUERY:
credentials_schema = [
*credentials_schema,
ProviderConfig(
name="api_key_query_param",
required=False,
default="key",
type=ProviderConfig.Type.TEXT_INPUT,
help=I18nObject(
en_US="The query parameter name of the api key", zh_Hans="携带 api key 的查询参数名称"
),
),
ProviderConfig(
name="api_key_value",
required=True,
type=ProviderConfig.Type.SECRET_INPUT,
help=I18nObject(en_US="The api key", zh_Hans="api key 的值"),
),
]
elif auth_type == ApiProviderAuthType.NONE:
pass


+ 18
- 3
api/core/tools/custom_tool/tool.py ファイルの表示

@@ -78,8 +78,8 @@ class ApiTool(Tool):
if "auth_type" not in credentials:
raise ToolProviderCredentialValidationError("Missing auth_type")

if credentials["auth_type"] == "api_key":
api_key_header = "api_key"
if credentials["auth_type"] in ("api_key_header", "api_key"): # backward compatibility:
api_key_header = "Authorization"

if "api_key_header" in credentials:
api_key_header = credentials["api_key_header"]
@@ -100,6 +100,11 @@ class ApiTool(Tool):

headers[api_key_header] = credentials["api_key_value"]

elif credentials["auth_type"] == "api_key_query":
# For query parameter authentication, we don't add anything to headers
# The query parameter will be added in do_http_request method
pass

needed_parameters = [parameter for parameter in (self.api_bundle.parameters or []) if parameter.required]
for parameter in needed_parameters:
if parameter.required and parameter.name not in parameters:
@@ -154,6 +159,15 @@ class ApiTool(Tool):
cookies = {}
files = []

# Add API key to query parameters if auth_type is api_key_query
if self.runtime and self.runtime.credentials:
credentials = self.runtime.credentials
if credentials.get("auth_type") == "api_key_query":
api_key_query_param = credentials.get("api_key_query_param", "key")
api_key_value = credentials.get("api_key_value")
if api_key_value:
params[api_key_query_param] = api_key_value

# check parameters
for parameter in self.api_bundle.openapi.get("parameters", []):
value = self.get_parameter_value(parameter, parameters)
@@ -213,7 +227,8 @@ class ApiTool(Tool):
elif "default" in property:
body[name] = property["default"]
else:
body[name] = None
# omit optional parameters that weren't provided, instead of setting them to None
pass
break

# replace path parameters

+ 20
- 3
api/core/tools/entities/api_entities.py ファイルの表示

@@ -1,4 +1,5 @@
from typing import Literal, Optional
from datetime import datetime
from typing import Any, Literal, Optional

from pydantic import BaseModel, Field, field_validator

@@ -18,7 +19,7 @@ class ToolApiEntity(BaseModel):
output_schema: Optional[dict] = None


ToolProviderTypeApiLiteral = Optional[Literal["builtin", "api", "workflow"]]
ToolProviderTypeApiLiteral = Optional[Literal["builtin", "api", "workflow", "mcp"]]


class ToolProviderApiEntity(BaseModel):
@@ -27,6 +28,7 @@ class ToolProviderApiEntity(BaseModel):
name: str # identifier
description: I18nObject
icon: str | dict
icon_dark: Optional[str | dict] = Field(default=None, description="The dark icon of the tool")
label: I18nObject # label
type: ToolProviderType
masked_credentials: Optional[dict] = None
@@ -37,6 +39,10 @@ class ToolProviderApiEntity(BaseModel):
plugin_unique_identifier: Optional[str] = Field(default="", description="The unique identifier of the tool")
tools: list[ToolApiEntity] = Field(default_factory=list)
labels: list[str] = Field(default_factory=list)
# MCP
server_url: Optional[str] = Field(default="", description="The server url of the tool")
updated_at: int = Field(default_factory=lambda: int(datetime.now().timestamp()))
server_identifier: Optional[str] = Field(default="", description="The server identifier of the MCP tool")

@field_validator("tools", mode="before")
@classmethod
@@ -52,8 +58,13 @@ class ToolProviderApiEntity(BaseModel):
for parameter in tool.get("parameters"):
if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES.value:
parameter["type"] = "files"
if parameter.get("input_schema") is None:
parameter.pop("input_schema", None)
# -------------

optional_fields = self.optional_field("server_url", self.server_url)
if self.type == ToolProviderType.MCP.value:
optional_fields.update(self.optional_field("updated_at", self.updated_at))
optional_fields.update(self.optional_field("server_identifier", self.server_identifier))
return {
"id": self.id,
"author": self.author,
@@ -62,6 +73,7 @@ class ToolProviderApiEntity(BaseModel):
"plugin_unique_identifier": self.plugin_unique_identifier,
"description": self.description.to_dict(),
"icon": self.icon,
"icon_dark": self.icon_dark,
"label": self.label.to_dict(),
"type": self.type.value,
"team_credentials": self.masked_credentials,
@@ -69,4 +81,9 @@ class ToolProviderApiEntity(BaseModel):
"allow_delete": self.allow_delete,
"tools": tools,
"labels": self.labels,
**optional_fields,
}

def optional_field(self, key: str, value: Any) -> dict:
"""Return dict with key-value if value is truthy, empty dict otherwise."""
return {key: value} if value else {}

+ 11
- 1
api/core/tools/entities/tool_entities.py ファイルの表示

@@ -8,6 +8,7 @@ from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_seriali

from core.entities.provider_entities import ProviderConfig
from core.plugin.entities.parameters import (
MCPServerParameterType,
PluginParameter,
PluginParameterOption,
PluginParameterType,
@@ -49,6 +50,7 @@ class ToolProviderType(enum.StrEnum):
API = "api"
APP = "app"
DATASET_RETRIEVAL = "dataset-retrieval"
MCP = "mcp"

@classmethod
def value_of(cls, value: str) -> "ToolProviderType":
@@ -94,7 +96,8 @@ class ApiProviderAuthType(Enum):
"""

NONE = "none"
API_KEY = "api_key"
API_KEY_HEADER = "api_key_header"
API_KEY_QUERY = "api_key_query"

@classmethod
def value_of(cls, value: str) -> "ApiProviderAuthType":
@@ -242,6 +245,10 @@ class ToolParameter(PluginParameter):
MODEL_SELECTOR = PluginParameterType.MODEL_SELECTOR.value
DYNAMIC_SELECT = PluginParameterType.DYNAMIC_SELECT.value

# MCP object and array type parameters
ARRAY = MCPServerParameterType.ARRAY.value
OBJECT = MCPServerParameterType.OBJECT.value

# deprecated, should not use.
SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value

@@ -260,6 +267,8 @@ class ToolParameter(PluginParameter):
human_description: Optional[I18nObject] = Field(default=None, description="The description presented to the user")
form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm")
llm_description: Optional[str] = None
# MCP object and array type parameters use this field to store the schema
input_schema: Optional[dict] = None

@classmethod
def get_simple_instance(
@@ -309,6 +318,7 @@ class ToolProviderIdentity(BaseModel):
name: str = Field(..., description="The name of the tool")
description: I18nObject = Field(..., description="The description of the tool")
icon: str = Field(..., description="The icon of the tool")
icon_dark: Optional[str] = Field(default=None, description="The dark icon of the tool")
label: I18nObject = Field(..., description="The label of the tool")
tags: Optional[list[ToolLabelEnum]] = Field(
default=[],

+ 130
- 0
api/core/tools/mcp_tool/provider.py ファイルの表示

@@ -0,0 +1,130 @@
import json
from typing import Any

from core.mcp.types import Tool as RemoteMCPTool
from core.tools.__base.tool_provider import ToolProviderController
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import (
ToolDescription,
ToolEntity,
ToolIdentity,
ToolProviderEntityWithPlugin,
ToolProviderIdentity,
ToolProviderType,
)
from core.tools.mcp_tool.tool import MCPTool
from models.tools import MCPToolProvider
from services.tools.tools_transform_service import ToolTransformService


class MCPToolProviderController(ToolProviderController):
provider_id: str
entity: ToolProviderEntityWithPlugin

def __init__(self, entity: ToolProviderEntityWithPlugin, provider_id: str, tenant_id: str, server_url: str) -> None:
super().__init__(entity)
self.entity = entity
self.tenant_id = tenant_id
self.provider_id = provider_id
self.server_url = server_url

@property
def provider_type(self) -> ToolProviderType:
"""
returns the type of the provider

:return: type of the provider
"""
return ToolProviderType.MCP

@classmethod
def _from_db(cls, db_provider: MCPToolProvider) -> "MCPToolProviderController":
"""
from db provider
"""
tools = []
tools_data = json.loads(db_provider.tools)
remote_mcp_tools = [RemoteMCPTool(**tool) for tool in tools_data]
user = db_provider.load_user()
tools = [
ToolEntity(
identity=ToolIdentity(
author=user.name if user else "Anonymous",
name=remote_mcp_tool.name,
label=I18nObject(en_US=remote_mcp_tool.name, zh_Hans=remote_mcp_tool.name),
provider=db_provider.server_identifier,
icon=db_provider.icon,
),
parameters=ToolTransformService.convert_mcp_schema_to_parameter(remote_mcp_tool.inputSchema),
description=ToolDescription(
human=I18nObject(
en_US=remote_mcp_tool.description or "", zh_Hans=remote_mcp_tool.description or ""
),
llm=remote_mcp_tool.description or "",
),
output_schema=None,
has_runtime_parameters=len(remote_mcp_tool.inputSchema) > 0,
)
for remote_mcp_tool in remote_mcp_tools
]

return cls(
entity=ToolProviderEntityWithPlugin(
identity=ToolProviderIdentity(
author=user.name if user else "Anonymous",
name=db_provider.name,
label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name),
description=I18nObject(en_US="", zh_Hans=""),
icon=db_provider.icon,
),
plugin_id=None,
credentials_schema=[],
tools=tools,
),
provider_id=db_provider.server_identifier or "",
tenant_id=db_provider.tenant_id or "",
server_url=db_provider.decrypted_server_url,
)

def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None:
"""
validate the credentials of the provider
"""
pass

def get_tool(self, tool_name: str) -> MCPTool: # type: ignore
"""
return tool with given name
"""
tool_entity = next(
(tool_entity for tool_entity in self.entity.tools if tool_entity.identity.name == tool_name), None
)

if not tool_entity:
raise ValueError(f"Tool with name {tool_name} not found")

return MCPTool(
entity=tool_entity,
runtime=ToolRuntime(tenant_id=self.tenant_id),
tenant_id=self.tenant_id,
icon=self.entity.identity.icon,
server_url=self.server_url,
provider_id=self.provider_id,
)

def get_tools(self) -> list[MCPTool]: # type: ignore
"""
get all tools
"""
return [
MCPTool(
entity=tool_entity,
runtime=ToolRuntime(tenant_id=self.tenant_id),
tenant_id=self.tenant_id,
icon=self.entity.identity.icon,
server_url=self.server_url,
provider_id=self.provider_id,
)
for tool_entity in self.entity.tools
]

+ 92
- 0
api/core/tools/mcp_tool/tool.py ファイルの表示

@@ -0,0 +1,92 @@
import base64
import json
from collections.abc import Generator
from typing import Any, Optional

from core.mcp.error import MCPAuthError, MCPConnectionError
from core.mcp.mcp_client import MCPClient
from core.mcp.types import ImageContent, TextContent
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType


class MCPTool(Tool):
tenant_id: str
icon: str
runtime_parameters: Optional[list[ToolParameter]]
server_url: str
provider_id: str

def __init__(
self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, icon: str, server_url: str, provider_id: str
) -> None:
super().__init__(entity, runtime)
self.tenant_id = tenant_id
self.icon = icon
self.runtime_parameters = None
self.server_url = server_url
self.provider_id = provider_id

def tool_provider_type(self) -> ToolProviderType:
return ToolProviderType.MCP

def _invoke(
self,
user_id: str,
tool_parameters: dict[str, Any],
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
) -> Generator[ToolInvokeMessage, None, None]:
from core.tools.errors import ToolInvokeError

try:
with MCPClient(self.server_url, self.provider_id, self.tenant_id, authed=True) as mcp_client:
tool_parameters = self._handle_none_parameter(tool_parameters)
result = mcp_client.invoke_tool(tool_name=self.entity.identity.name, tool_args=tool_parameters)
except MCPAuthError as e:
raise ToolInvokeError("Please auth the tool first") from e
except MCPConnectionError as e:
raise ToolInvokeError(f"Failed to connect to MCP server: {e}") from e
except Exception as e:
raise ToolInvokeError(f"Failed to invoke tool: {e}") from e

for content in result.content:
if isinstance(content, TextContent):
try:
content_json = json.loads(content.text)
if isinstance(content_json, dict):
yield self.create_json_message(content_json)
elif isinstance(content_json, list):
for item in content_json:
yield self.create_json_message(item)
else:
yield self.create_text_message(content.text)
except json.JSONDecodeError:
yield self.create_text_message(content.text)

elif isinstance(content, ImageContent):
yield self.create_blob_message(
blob=base64.b64decode(content.data), meta={"mime_type": content.mimeType}
)

def fork_tool_runtime(self, runtime: ToolRuntime) -> "MCPTool":
return MCPTool(
entity=self.entity,
runtime=runtime,
tenant_id=self.tenant_id,
icon=self.icon,
server_url=self.server_url,
provider_id=self.provider_id,
)

def _handle_none_parameter(self, parameter: dict[str, Any]) -> dict[str, Any]:
"""
in mcp tool invoke, if the parameter is empty, it will be set to None
"""
return {
key: value
for key, value in parameter.items()
if value is not None and not (isinstance(value, str) and value.strip() == "")
}

+ 3
- 2
api/core/tools/signature.py ファイルの表示

@@ -9,9 +9,10 @@ from configs import dify_config

def sign_tool_file(tool_file_id: str, extension: str) -> str:
"""
sign file to get a temporary url
sign file to get a temporary url for plugin access
"""
base_url = dify_config.FILES_URL
# Use internal URL for plugin/tool file access in Docker environments
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
file_preview_url = f"{base_url}/files/tools/{tool_file_id}{extension}"

timestamp = str(int(time.time()))

+ 3
- 2
api/core/tools/tool_file_manager.py ファイルの表示

@@ -35,9 +35,10 @@ class ToolFileManager:
@staticmethod
def sign_file(tool_file_id: str, extension: str) -> str:
"""
sign file to get a temporary url
sign file to get a temporary url for plugin access
"""
base_url = dify_config.FILES_URL
# Use internal URL for plugin/tool file access in Docker environments
base_url = dify_config.INTERNAL_FILES_URL or dify_config.FILES_URL
file_preview_url = f"{base_url}/files/tools/{tool_file_id}{extension}"

timestamp = str(int(time.time()))

+ 135
- 32
api/core/tools/tool_manager.py ファイルの表示

@@ -4,7 +4,7 @@ import mimetypes
from collections.abc import Generator
from os import listdir, path
from threading import Lock
from typing import TYPE_CHECKING, Any, Union, cast
from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast

from yarl import URL

@@ -13,9 +13,13 @@ from core.plugin.entities.plugin import ToolProviderID
from core.plugin.impl.tool import PluginToolManager
from core.tools.__base.tool_provider import ToolProviderController
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.mcp_tool.provider import MCPToolProviderController
from core.tools.mcp_tool.tool import MCPTool
from core.tools.plugin_tool.provider import PluginToolProviderController
from core.tools.plugin_tool.tool import PluginTool
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
from core.workflow.entities.variable_pool import VariablePool
from services.tools.mcp_tools_mange_service import MCPToolManageService

if TYPE_CHECKING:
from core.workflow.nodes.tool.entities import ToolEntity
@@ -49,7 +53,7 @@ from core.tools.utils.configuration import (
)
from core.tools.workflow_as_tool.tool import WorkflowTool
from extensions.ext_database import db
from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider
from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider
from services.tools.tools_transform_service import ToolTransformService

logger = logging.getLogger(__name__)
@@ -156,7 +160,7 @@ class ToolManager:
tenant_id: str,
invoke_from: InvokeFrom = InvokeFrom.DEBUGGER,
tool_invoke_from: ToolInvokeFrom = ToolInvokeFrom.AGENT,
) -> Union[BuiltinTool, PluginTool, ApiTool, WorkflowTool]:
) -> Union[BuiltinTool, PluginTool, ApiTool, WorkflowTool, MCPTool]:
"""
get the tool runtime

@@ -292,6 +296,8 @@ class ToolManager:
raise NotImplementedError("app provider not implemented")
elif provider_type == ToolProviderType.PLUGIN:
return cls.get_plugin_provider(provider_id, tenant_id).get_tool(tool_name)
elif provider_type == ToolProviderType.MCP:
return cls.get_mcp_provider_controller(tenant_id, provider_id).get_tool(tool_name)
else:
raise ToolProviderNotFoundError(f"provider type {provider_type.value} not found")

@@ -302,6 +308,7 @@ class ToolManager:
app_id: str,
agent_tool: AgentToolEntity,
invoke_from: InvokeFrom = InvokeFrom.DEBUGGER,
variable_pool: Optional[VariablePool] = None,
) -> Tool:
"""
get the agent tool runtime
@@ -316,24 +323,9 @@ class ToolManager:
)
runtime_parameters = {}
parameters = tool_entity.get_merged_runtime_parameters()
for parameter in parameters:
# check file types
if (
parameter.type
in {
ToolParameter.ToolParameterType.SYSTEM_FILES,
ToolParameter.ToolParameterType.FILE,
ToolParameter.ToolParameterType.FILES,
}
and parameter.required
):
raise ValueError(f"file type parameter {parameter.name} not supported in agent")

if parameter.form == ToolParameter.ToolParameterForm.FORM:
# save tool parameter to tool entity memory
value = parameter.init_frontend_parameter(agent_tool.tool_parameters.get(parameter.name))
runtime_parameters[parameter.name] = value

runtime_parameters = cls._convert_tool_parameters_type(
parameters, variable_pool, agent_tool.tool_parameters, typ="agent"
)
# decrypt runtime parameters
encryption_manager = ToolParameterConfigurationManager(
tenant_id=tenant_id,
@@ -357,10 +349,12 @@ class ToolManager:
node_id: str,
workflow_tool: "ToolEntity",
invoke_from: InvokeFrom = InvokeFrom.DEBUGGER,
variable_pool: Optional[VariablePool] = None,
) -> Tool:
"""
get the workflow tool runtime
"""

tool_runtime = cls.get_tool_runtime(
provider_type=workflow_tool.provider_type,
provider_id=workflow_tool.provider_id,
@@ -369,15 +363,11 @@ class ToolManager:
invoke_from=invoke_from,
tool_invoke_from=ToolInvokeFrom.WORKFLOW,
)
runtime_parameters = {}
parameters = tool_runtime.get_merged_runtime_parameters()

for parameter in parameters:
# save tool parameter to tool entity memory
if parameter.form == ToolParameter.ToolParameterForm.FORM:
value = parameter.init_frontend_parameter(workflow_tool.tool_configurations.get(parameter.name))
runtime_parameters[parameter.name] = value

parameters = tool_runtime.get_merged_runtime_parameters()
runtime_parameters = cls._convert_tool_parameters_type(
parameters, variable_pool, workflow_tool.tool_configurations, typ="workflow"
)
# decrypt runtime parameters
encryption_manager = ToolParameterConfigurationManager(
tenant_id=tenant_id,
@@ -569,7 +559,7 @@ class ToolManager:

filters = []
if not typ:
filters.extend(["builtin", "api", "workflow"])
filters.extend(["builtin", "api", "workflow", "mcp"])
else:
filters.append(typ)

@@ -663,6 +653,10 @@ class ToolManager:
labels=labels.get(provider_controller.provider_id, []),
)
result_providers[f"workflow_provider.{user_provider.name}"] = user_provider
if "mcp" in filters:
mcp_providers = MCPToolManageService.retrieve_mcp_tools(tenant_id, for_list=True)
for mcp_provider in mcp_providers:
result_providers[f"mcp_provider.{mcp_provider.name}"] = mcp_provider

return BuiltinToolProviderSort.sort(list(result_providers.values()))

@@ -690,14 +684,47 @@ class ToolManager:
if provider is None:
raise ToolProviderNotFoundError(f"api provider {provider_id} not found")

auth_type = ApiProviderAuthType.NONE
provider_auth_type = provider.credentials.get("auth_type")
if provider_auth_type in ("api_key_header", "api_key"): # backward compatibility
auth_type = ApiProviderAuthType.API_KEY_HEADER
elif provider_auth_type == "api_key_query":
auth_type = ApiProviderAuthType.API_KEY_QUERY

controller = ApiToolProviderController.from_db(
provider,
ApiProviderAuthType.API_KEY if provider.credentials["auth_type"] == "api_key" else ApiProviderAuthType.NONE,
auth_type,
)
controller.load_bundled_tools(provider.tools)

return controller, provider.credentials

@classmethod
def get_mcp_provider_controller(cls, tenant_id: str, provider_id: str) -> MCPToolProviderController:
"""
get the api provider

:param tenant_id: the id of the tenant
:param provider_id: the id of the provider

:return: the provider controller, the credentials
"""
provider: MCPToolProvider | None = (
db.session.query(MCPToolProvider)
.filter(
MCPToolProvider.server_identifier == provider_id,
MCPToolProvider.tenant_id == tenant_id,
)
.first()
)

if provider is None:
raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found")

controller = MCPToolProviderController._from_db(provider)

return controller

@classmethod
def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict:
"""
@@ -725,9 +752,16 @@ class ToolManager:
credentials = {}

# package tool provider controller
auth_type = ApiProviderAuthType.NONE
credentials_auth_type = credentials.get("auth_type")
if credentials_auth_type in ("api_key_header", "api_key"): # backward compatibility
auth_type = ApiProviderAuthType.API_KEY_HEADER
elif credentials_auth_type == "api_key_query":
auth_type = ApiProviderAuthType.API_KEY_QUERY

controller = ApiToolProviderController.from_db(
provider_obj,
ApiProviderAuthType.API_KEY if credentials["auth_type"] == "api_key" else ApiProviderAuthType.NONE,
auth_type,
)
# init tool configuration
tool_configuration = ProviderConfigEncrypter(
@@ -826,6 +860,22 @@ class ToolManager:
except Exception:
return {"background": "#252525", "content": "\ud83d\ude01"}

@classmethod
def generate_mcp_tool_icon_url(cls, tenant_id: str, provider_id: str) -> dict[str, str] | str:
try:
mcp_provider: MCPToolProvider | None = (
db.session.query(MCPToolProvider)
.filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == provider_id)
.first()
)

if mcp_provider is None:
raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found")

return mcp_provider.provider_icon
except Exception:
return {"background": "#252525", "content": "\ud83d\ude01"}

@classmethod
def get_tool_icon(
cls,
@@ -863,8 +913,61 @@ class ToolManager:
except Exception:
return {"background": "#252525", "content": "\ud83d\ude01"}
raise ValueError(f"plugin provider {provider_id} not found")
elif provider_type == ToolProviderType.MCP:
return cls.generate_mcp_tool_icon_url(tenant_id, provider_id)
else:
raise ValueError(f"provider type {provider_type} not found")

@classmethod
def _convert_tool_parameters_type(
cls,
parameters: list[ToolParameter],
variable_pool: Optional[VariablePool],
tool_configurations: dict[str, Any],
typ: Literal["agent", "workflow", "tool"] = "workflow",
) -> dict[str, Any]:
"""
Convert tool parameters type
"""
from core.workflow.nodes.tool.entities import ToolNodeData
from core.workflow.nodes.tool.exc import ToolParameterError

runtime_parameters = {}
for parameter in parameters:
if (
parameter.type
in {
ToolParameter.ToolParameterType.SYSTEM_FILES,
ToolParameter.ToolParameterType.FILE,
ToolParameter.ToolParameterType.FILES,
}
and parameter.required
and typ == "agent"
):
raise ValueError(f"file type parameter {parameter.name} not supported in agent")
# save tool parameter to tool entity memory
if parameter.form == ToolParameter.ToolParameterForm.FORM:
if variable_pool:
config = tool_configurations.get(parameter.name, {})
if not (config and isinstance(config, dict) and config.get("value") is not None):
continue
tool_input = ToolNodeData.ToolInput(**tool_configurations.get(parameter.name, {}))
if tool_input.type == "variable":
variable = variable_pool.get(tool_input.value)
if variable is None:
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
parameter_value = variable.value
elif tool_input.type in {"mixed", "constant"}:
segment_group = variable_pool.convert_template(str(tool_input.value))
parameter_value = segment_group.text
else:
raise ToolParameterError(f"Unknown tool input type '{tool_input.type}'")
runtime_parameters[parameter.name] = parameter_value

else:
value = parameter.init_frontend_parameter(tool_configurations.get(parameter.name))
runtime_parameters[parameter.name] = value
return runtime_parameters


ToolManager.load_hardcoded_providers_cache()

+ 12
- 11
api/core/tools/utils/configuration.py ファイルの表示

@@ -72,21 +72,21 @@ class ProviderConfigEncrypter(BaseModel):

return data

def decrypt(self, data: dict[str, str]) -> dict[str, str]:
def decrypt(self, data: dict[str, str], use_cache: bool = True) -> dict[str, str]:
"""
decrypt tool credentials with tenant id

return a deep copy of credentials with decrypted values
"""
cache = ToolProviderCredentialsCache(
tenant_id=self.tenant_id,
identity_id=f"{self.provider_type}.{self.provider_identity}",
cache_type=ToolProviderCredentialsCacheType.PROVIDER,
)
cached_credentials = cache.get()
if cached_credentials:
return cached_credentials
if use_cache:
cache = ToolProviderCredentialsCache(
tenant_id=self.tenant_id,
identity_id=f"{self.provider_type}.{self.provider_identity}",
cache_type=ToolProviderCredentialsCacheType.PROVIDER,
)
cached_credentials = cache.get()
if cached_credentials:
return cached_credentials
data = self._deep_copy(data)
# get fields need to be decrypted
fields = dict[str, BasicProviderConfig]()
@@ -105,7 +105,8 @@ class ProviderConfigEncrypter(BaseModel):
except Exception:
pass

cache.set(data)
if use_cache:
cache.set(data)
return data

def delete_tool_credentials_cache(self):

+ 6
- 1
api/core/tools/workflow_as_tool/tool.py ファイルの表示

@@ -8,7 +8,12 @@ from flask_login import current_user
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType
from core.tools.entities.tool_entities import (
ToolEntity,
ToolInvokeMessage,
ToolParameter,
ToolProviderType,
)
from core.tools.errors import ToolInvokeError
from extensions.ext_database import db
from factories.file_factory import build_from_mapping

+ 3
- 3
api/core/workflow/callbacks/workflow_logging_callback.py ファイルの表示

@@ -232,14 +232,14 @@ class WorkflowLoggingCallback(WorkflowCallback):
Publish loop started
"""
self.print_text("\n[LoopRunStartedEvent]", color="blue")
self.print_text(f"Loop Node ID: {event.loop_id}", color="blue")
self.print_text(f"Loop Node ID: {event.loop_node_id}", color="blue")

def on_workflow_loop_next(self, event: LoopRunNextEvent) -> None:
"""
Publish loop next
"""
self.print_text("\n[LoopRunNextEvent]", color="blue")
self.print_text(f"Loop Node ID: {event.loop_id}", color="blue")
self.print_text(f"Loop Node ID: {event.loop_node_id}", color="blue")
self.print_text(f"Loop Index: {event.index}", color="blue")

def on_workflow_loop_completed(self, event: LoopRunSucceededEvent | LoopRunFailedEvent) -> None:
@@ -250,7 +250,7 @@ class WorkflowLoggingCallback(WorkflowCallback):
"\n[LoopRunSucceededEvent]" if isinstance(event, LoopRunSucceededEvent) else "\n[LoopRunFailedEvent]",
color="blue",
)
self.print_text(f"Node ID: {event.loop_id}", color="blue")
self.print_text(f"Loop Node ID: {event.loop_node_id}", color="blue")

def print_text(self, text: str, color: Optional[str] = None, end: str = "\n") -> None:
"""Print text with highlighting and no end characters."""

+ 1
- 1
api/core/workflow/graph_engine/entities/graph.py ファイルの表示

@@ -336,7 +336,7 @@ class Graph(BaseModel):

parallel = GraphParallel(
start_from_node_id=start_node_id,
parent_parallel_id=parent_parallel.id if parent_parallel else None,
parent_parallel_id=parent_parallel_id,
parent_parallel_start_node_id=parent_parallel.start_from_node_id if parent_parallel else None,
)
parallel_mapping[parallel.id] = parallel

+ 21
- 3
api/core/workflow/nodes/agent/agent_node.py ファイルの表示

@@ -3,11 +3,13 @@ import uuid
from collections.abc import Generator, Mapping, Sequence
from typing import Any, Optional, cast

from packaging.version import Version
from sqlalchemy import select
from sqlalchemy.orm import Session

from core.agent.entities import AgentToolEntity
from core.agent.plugin_entities import AgentStrategyParameter
from core.agent.strategy.plugin import PluginAgentStrategy
from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_manager import ModelInstance, ModelManager
from core.model_runtime.entities.model_entities import AIModelEntity, ModelType
@@ -73,12 +75,14 @@ class AgentNode(ToolNode):
agent_parameters=agent_parameters,
variable_pool=self.graph_runtime_state.variable_pool,
node_data=node_data,
strategy=strategy,
)
parameters_for_log = self._generate_agent_parameters(
agent_parameters=agent_parameters,
variable_pool=self.graph_runtime_state.variable_pool,
node_data=node_data,
for_log=True,
strategy=strategy,
)

# get conversation id
@@ -155,6 +159,7 @@ class AgentNode(ToolNode):
variable_pool: VariablePool,
node_data: AgentNodeData,
for_log: bool = False,
strategy: PluginAgentStrategy,
) -> dict[str, Any]:
"""
Generate parameters based on the given tool parameters, variable pool, and node data.
@@ -207,7 +212,7 @@ class AgentNode(ToolNode):
if parameter.type == "array[tools]":
value = cast(list[dict[str, Any]], value)
value = [tool for tool in value if tool.get("enabled", False)]
value = self._filter_mcp_type_tool(strategy, value)
for tool in value:
if "schemas" in tool:
tool.pop("schemas")
@@ -244,9 +249,9 @@ class AgentNode(ToolNode):
)

extra = tool.get("extra", {})
runtime_variable_pool = variable_pool if self.node_data.version != "1" else None
tool_runtime = ToolManager.get_agent_tool_runtime(
self.tenant_id, self.app_id, entity, self.invoke_from
self.tenant_id, self.app_id, entity, self.invoke_from, runtime_variable_pool
)
if tool_runtime.entity.description:
tool_runtime.entity.description.llm = (
@@ -398,3 +403,16 @@ class AgentNode(ToolNode):
except ValueError:
model_schema.features.remove(feature)
return model_schema

def _filter_mcp_type_tool(self, strategy: PluginAgentStrategy, tools: list[dict[str, Any]]) -> list[dict[str, Any]]:
"""
Filter MCP type tool
:param strategy: plugin agent strategy
:param tool: tool
:return: filtered tool dict
"""
meta_version = strategy.meta_version
if meta_version and Version(meta_version) > Version("0.0.1"):
return tools
else:
return [tool for tool in tools if tool.get("type") != ToolProviderType.MCP.value]

+ 2
- 0
api/core/workflow/nodes/node_mapping.py ファイルの表示

@@ -75,6 +75,7 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[BaseNode]]] = {
},
NodeType.TOOL: {
LATEST_VERSION: ToolNode,
"2": ToolNode,
"1": ToolNode,
},
NodeType.VARIABLE_AGGREGATOR: {
@@ -124,6 +125,7 @@ NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[BaseNode]]] = {
},
NodeType.AGENT: {
LATEST_VERSION: AgentNode,
"2": AgentNode,
"1": AgentNode,
},
NodeType.DATASOURCE: {

+ 23
- 0
api/core/workflow/nodes/tool/entities.py ファイルの表示

@@ -41,6 +41,10 @@ class ToolNodeData(BaseNodeData, ToolEntity):
def check_type(cls, value, validation_info: ValidationInfo):
typ = value
value = validation_info.data.get("value")

if value is None:
return typ

if typ == "mixed" and not isinstance(value, str):
raise ValueError("value must be a string")
elif typ == "variable":
@@ -54,3 +58,22 @@ class ToolNodeData(BaseNodeData, ToolEntity):
return typ

tool_parameters: dict[str, ToolInput]

@field_validator("tool_parameters", mode="before")
@classmethod
def filter_none_tool_inputs(cls, value):
if not isinstance(value, dict):
return value

return {
key: tool_input
for key, tool_input in value.items()
if tool_input is not None and cls._has_valid_value(tool_input)
}

@staticmethod
def _has_valid_value(tool_input):
"""Check if the value is valid"""
if isinstance(tool_input, dict):
return tool_input.get("value") is not None
return getattr(tool_input, "value", None) is not None

+ 29
- 25
api/core/workflow/nodes/tool/tool_node.py ファイルの表示

@@ -67,8 +67,9 @@ class ToolNode(BaseNode[ToolNodeData]):
try:
from core.tools.tool_manager import ToolManager

variable_pool = self.graph_runtime_state.variable_pool if self.node_data.version != "1" else None
tool_runtime = ToolManager.get_workflow_tool_runtime(
self.tenant_id, self.app_id, self.node_id, self.node_data, self.invoke_from
self.tenant_id, self.app_id, self.node_id, self.node_data, self.invoke_from, variable_pool
)
except ToolNodeError as e:
yield RunCompletedEvent(
@@ -95,7 +96,6 @@ class ToolNode(BaseNode[ToolNodeData]):
node_data=self.node_data,
for_log=True,
)

# get conversation id
conversation_id = self.graph_runtime_state.variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID])

@@ -285,7 +285,8 @@ class ToolNode(BaseNode[ToolNodeData]):
for key, value in msg_metadata.items()
if key in WorkflowNodeExecutionMetadataKey.__members__.values()
}
json.append(message.message.json_object)
if message.message.json_object is not None:
json.append(message.message.json_object)
elif message.type == ToolInvokeMessage.MessageType.LINK:
assert isinstance(message.message, ToolInvokeMessage.TextMessage)
stream_text = f"Link: {message.message.text}\n"
@@ -328,6 +329,7 @@ class ToolNode(BaseNode[ToolNodeData]):
icon = current_plugin.declaration.icon
except StopIteration:
pass
icon_dark = None
try:
builtin_tool = next(
provider
@@ -338,10 +340,12 @@ class ToolNode(BaseNode[ToolNodeData]):
if provider.name == dict_metadata["provider"]
)
icon = builtin_tool.icon
icon_dark = builtin_tool.icon_dark
except StopIteration:
pass

dict_metadata["icon"] = icon
dict_metadata["icon_dark"] = icon_dark
message.message.metadata = dict_metadata
agent_log = AgentLogEvent(
id=message.message.id,
@@ -369,31 +373,31 @@ class ToolNode(BaseNode[ToolNodeData]):
agent_logs.append(agent_log)

yield agent_log

# Add agent_logs to outputs['json'] to ensure frontend can access thinking process
json_output: dict[str, Any] = {}
if json:
if isinstance(json, list) and len(json) == 1:
# If json is a list with only one element, convert it to a dictionary
json_output = json[0] if isinstance(json[0], dict) else {"data": json[0]}
elif isinstance(json, list):
# If json is a list with multiple elements, create a dictionary containing all data
json_output = {"data": json}
json_output: list[dict[str, Any]] = []

# Step 1: append each agent log as its own dict.
if agent_logs:
# Add agent_logs to json output
json_output["agent_logs"] = [
{
"id": log.id,
"parent_id": log.parent_id,
"error": log.error,
"status": log.status,
"data": log.data,
"label": log.label,
"metadata": log.metadata,
"node_id": log.node_id,
}
for log in agent_logs
]
for log in agent_logs:
json_output.append(
{
"id": log.id,
"parent_id": log.parent_id,
"error": log.error,
"status": log.status,
"data": log.data,
"label": log.label,
"metadata": log.metadata,
"node_id": log.node_id,
}
)
# Step 2: normalize JSON into {"data": [...]}.change json to list[dict]
if json:
json_output.extend(json)
else:
json_output.append({"data": []})

yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.SUCCEEDED,

+ 2
- 0
api/extensions/ext_blueprints.py ファイルの表示

@@ -10,6 +10,7 @@ def init_app(app: DifyApp):
from controllers.console import bp as console_app_bp
from controllers.files import bp as files_bp
from controllers.inner_api import bp as inner_api_bp
from controllers.mcp import bp as mcp_bp
from controllers.service_api import bp as service_api_bp
from controllers.web import bp as web_bp

@@ -46,3 +47,4 @@ def init_app(app: DifyApp):
app.register_blueprint(files_bp)

app.register_blueprint(inner_api_bp)
app.register_blueprint(mcp_bp)

+ 16
- 1
api/extensions/ext_login.py ファイルの表示

@@ -10,7 +10,7 @@ from dify_app import DifyApp
from extensions.ext_database import db
from libs.passport import PassportService
from models.account import Account, Tenant, TenantAccountJoin
from models.model import EndUser
from models.model import AppMCPServer, EndUser
from services.account_service import AccountService

login_manager = flask_login.LoginManager()
@@ -74,6 +74,21 @@ def load_user_from_request(request_from_flask_login):
if not end_user:
raise NotFound("End user not found.")
return end_user
elif request.blueprint == "mcp":
server_code = request.view_args.get("server_code") if request.view_args else None
if not server_code:
raise Unauthorized("Invalid Authorization token.")
app_mcp_server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first()
if not app_mcp_server:
raise NotFound("App MCP server not found.")
end_user = (
db.session.query(EndUser)
.filter(EndUser.external_user_id == app_mcp_server.id, EndUser.type == "mcp")
.first()
)
if not end_user:
raise NotFound("End user not found.")
return end_user


@user_logged_in.connect

+ 28
- 0
api/extensions/ext_redis.py ファイルの表示

@@ -1,6 +1,10 @@
import functools
import logging
from collections.abc import Callable
from typing import Any, Union

import redis
from redis import RedisError
from redis.cache import CacheConfig
from redis.cluster import ClusterNode, RedisCluster
from redis.connection import Connection, SSLConnection
@@ -9,6 +13,8 @@ from redis.sentinel import Sentinel
from configs import dify_config
from dify_app import DifyApp

logger = logging.getLogger(__name__)


class RedisClientWrapper:
"""
@@ -115,3 +121,25 @@ def init_app(app: DifyApp):
redis_client.initialize(redis.Redis(connection_pool=pool))

app.extensions["redis"] = redis_client


def redis_fallback(default_return: Any = None):
"""
decorator to handle Redis operation exceptions and return a default value when Redis is unavailable.

Args:
default_return: The value to return when a Redis operation fails. Defaults to None.
"""

def decorator(func: Callable):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except RedisError as e:
logger.warning(f"Redis operation failed in {func.__name__}: {str(e)}", exc_info=True)
return default_return

return wrapper

return decorator

+ 1
- 1
api/factories/agent_factory.py ファイルの表示

@@ -10,6 +10,6 @@ def get_plugin_agent_strategy(
agent_provider = manager.fetch_agent_strategy_provider(tenant_id, agent_strategy_provider_name)
for agent_strategy in agent_provider.declaration.strategies:
if agent_strategy.identity.name == agent_strategy_name:
return PluginAgentStrategy(tenant_id, agent_strategy)
return PluginAgentStrategy(tenant_id, agent_strategy, agent_provider.meta.version)

raise ValueError(f"Agent strategy {agent_strategy_name} not found")

+ 24
- 0
api/fields/app_fields.py ファイルの表示

@@ -1,8 +1,21 @@
import json

from flask_restful import fields

from fields.workflow_fields import workflow_partial_fields
from libs.helper import AppIconUrlField, TimestampField


class JsonStringField(fields.Raw):
def format(self, value):
if isinstance(value, str):
try:
return json.loads(value)
except (json.JSONDecodeError, TypeError):
return value
return value


app_detail_kernel_fields = {
"id": fields.String,
"name": fields.String,
@@ -218,3 +231,14 @@ app_import_fields = {
app_import_check_dependencies_fields = {
"leaked_dependencies": fields.List(fields.Nested(leaked_dependency_fields)),
}

app_server_fields = {
"id": fields.String,
"name": fields.String,
"server_code": fields.String,
"description": fields.String,
"status": fields.String,
"parameters": JsonStringField,
"created_at": TimestampField,
"updated_at": TimestampField,
}

+ 4
- 2
api/libs/passport.py ファイルの表示

@@ -14,9 +14,11 @@ class PassportService:
def verify(self, token):
try:
return jwt.decode(token, self.sk, algorithms=["HS256"])
except jwt.exceptions.ExpiredSignatureError:
raise Unauthorized("Token has expired.")
except jwt.exceptions.InvalidSignatureError:
raise Unauthorized("Invalid token signature.")
except jwt.exceptions.DecodeError:
raise Unauthorized("Invalid token.")
except jwt.exceptions.ExpiredSignatureError:
raise Unauthorized("Token has expired.")
except jwt.exceptions.PyJWTError: # Catch-all for other JWT errors
raise Unauthorized("Invalid token.")

+ 64
- 0
api/migrations/versions/2025_06_25_0936-58eb7bdb93fe_add_mcp_server_tool_and_app_server.py ファイルの表示

@@ -0,0 +1,64 @@
"""add mcp server tool and app server

Revision ID: 58eb7bdb93fe
Revises: 0ab65e1cc7fa
Create Date: 2025-06-25 09:36:07.510570

"""
from alembic import op
import models as models
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '58eb7bdb93fe'
down_revision = '0ab65e1cc7fa'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('app_mcp_servers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.String(length=255), nullable=False),
sa.Column('server_code', sa.String(length=255), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'),
sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'),
sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code')
)
op.create_table('tool_mcp_providers',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('name', sa.String(length=40), nullable=False),
sa.Column('server_identifier', sa.String(length=24), nullable=False),
sa.Column('server_url', sa.Text(), nullable=False),
sa.Column('server_url_hash', sa.String(length=64), nullable=False),
sa.Column('icon', sa.String(length=255), nullable=True),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('user_id', models.types.StringUUID(), nullable=False),
sa.Column('encrypted_credentials', sa.Text(), nullable=True),
sa.Column('authed', sa.Boolean(), nullable=False),
sa.Column('tools', sa.Text(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'),
sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'),
sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'),
sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url')
)

# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tool_mcp_providers')
op.drop_table('app_mcp_servers')
# ### end Alembic commands ###

+ 2
- 0
api/models/__init__.py ファイルの表示

@@ -34,6 +34,7 @@ from .model import (
App,
AppAnnotationHitHistory,
AppAnnotationSetting,
AppMCPServer,
AppMode,
AppModelConfig,
Conversation,
@@ -104,6 +105,7 @@ __all__ = [
"AppAnnotationHitHistory",
"AppAnnotationSetting",
"AppDatasetJoin",
"AppMCPServer", # Added
"AppMode",
"AppModelConfig",
"BuiltinToolProvider",

+ 33
- 0
api/models/model.py ファイルの表示

@@ -1457,6 +1457,39 @@ class EndUser(Base, UserMixin):
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())


class AppMCPServer(Base):
__tablename__ = "app_mcp_servers"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="app_mcp_server_pkey"),
db.UniqueConstraint("tenant_id", "app_id", name="unique_app_mcp_server_tenant_app_id"),
db.UniqueConstraint("server_code", name="unique_app_mcp_server_server_code"),
)
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
tenant_id = db.Column(StringUUID, nullable=False)
app_id = db.Column(StringUUID, nullable=False)
name = db.Column(db.String(255), nullable=False)
description = db.Column(db.String(255), nullable=False)
server_code = db.Column(db.String(255), nullable=False)
status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying"))
parameters = db.Column(db.Text, nullable=False)

created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())

@staticmethod
def generate_server_code(n):
while True:
result = generate_string(n)
while db.session.query(AppMCPServer).filter(AppMCPServer.server_code == result).count() > 0:
result = generate_string(n)

return result

@property
def parameters_dict(self) -> dict[str, Any]:
return cast(dict[str, Any], json.loads(self.parameters))


class Site(Base):
__tablename__ = "sites"
__table_args__ = (

+ 106
- 0
api/models/tools.py ファイルの表示

@@ -1,12 +1,16 @@
import json
from datetime import datetime
from typing import Any, cast
from urllib.parse import urlparse

import sqlalchemy as sa
from deprecated import deprecated
from sqlalchemy import ForeignKey, func
from sqlalchemy.orm import Mapped, mapped_column

from core.file import helpers as file_helpers
from core.helper import encrypter
from core.mcp.types import Tool
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_bundle import ApiToolBundle
from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration
@@ -223,6 +227,108 @@ class WorkflowToolProvider(Base):
return db.session.query(App).filter(App.id == self.app_id).first()


class MCPToolProvider(Base):
"""
The table stores the mcp providers.
"""

__tablename__ = "tool_mcp_providers"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="tool_mcp_provider_pkey"),
db.UniqueConstraint("tenant_id", "server_url_hash", name="unique_mcp_provider_server_url"),
db.UniqueConstraint("tenant_id", "name", name="unique_mcp_provider_name"),
db.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"),
)

id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
# name of the mcp provider
name: Mapped[str] = mapped_column(db.String(40), nullable=False)
# server identifier of the mcp provider
server_identifier: Mapped[str] = mapped_column(db.String(24), nullable=False)
# encrypted url of the mcp provider
server_url: Mapped[str] = mapped_column(db.Text, nullable=False)
# hash of server_url for uniqueness check
server_url_hash: Mapped[str] = mapped_column(db.String(64), nullable=False)
# icon of the mcp provider
icon: Mapped[str] = mapped_column(db.String(255), nullable=True)
# tenant id
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# who created this tool
user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# encrypted credentials
encrypted_credentials: Mapped[str] = mapped_column(db.Text, nullable=True)
# authed
authed: Mapped[bool] = mapped_column(db.Boolean, nullable=False, default=False)
# tools
tools: Mapped[str] = mapped_column(db.Text, nullable=False, default="[]")
created_at: Mapped[datetime] = mapped_column(
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
)
updated_at: Mapped[datetime] = mapped_column(
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
)

def load_user(self) -> Account | None:
return db.session.query(Account).filter(Account.id == self.user_id).first()

@property
def tenant(self) -> Tenant | None:
return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first()

@property
def credentials(self) -> dict:
try:
return cast(dict, json.loads(self.encrypted_credentials)) or {}
except Exception:
return {}

@property
def mcp_tools(self) -> list[Tool]:
return [Tool(**tool) for tool in json.loads(self.tools)]

@property
def provider_icon(self) -> dict[str, str] | str:
try:
return cast(dict[str, str], json.loads(self.icon))
except json.JSONDecodeError:
return file_helpers.get_signed_file_url(self.icon)

@property
def decrypted_server_url(self) -> str:
return cast(str, encrypter.decrypt_token(self.tenant_id, self.server_url))

@property
def masked_server_url(self) -> str:
def mask_url(url: str, mask_char: str = "*") -> str:
"""
mask the url to a simple string
"""
parsed = urlparse(url)
base_url = f"{parsed.scheme}://{parsed.netloc}"

if parsed.path and parsed.path != "/":
return f"{base_url}/{mask_char * 6}"
else:
return base_url

return mask_url(self.decrypted_server_url)

@property
def decrypted_credentials(self) -> dict:
from core.tools.mcp_tool.provider import MCPToolProviderController
from core.tools.utils.configuration import ProviderConfigEncrypter

provider_controller = MCPToolProviderController._from_db(self)

tool_configuration = ProviderConfigEncrypter(
tenant_id=self.tenant_id,
config=list(provider_controller.get_credentials_schema()),
provider_type=provider_controller.provider_type.value,
provider_identity=provider_controller.provider_id,
)
return tool_configuration.decrypt(self.credentials, use_cache=False)


class ToolModelInvoke(Base):
"""
store the invoke logs from tool invoke

+ 4
- 2
api/pyproject.toml ファイルの表示

@@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.5.1"
version = "1.6.0"
requires-python = ">=3.11,<3.13"

dependencies = [
@@ -82,6 +82,8 @@ dependencies = [
"weave~=0.51.0",
"yarl~=1.18.3",
"webvtt-py~=0.5.1",
"sseclient-py>=1.8.0",
"httpx-sse>=0.4.0",
"sendgrid~=6.12.3",
]
# Before adding new dependency, consider place it in
@@ -106,7 +108,7 @@ dev = [
"faker~=32.1.0",
"lxml-stubs~=0.5.1",
"mypy~=1.16.0",
"ruff~=0.11.5",
"ruff~=0.12.3",
"pytest~=8.3.2",
"pytest-benchmark~=4.0.0",
"pytest-cov~=4.1.0",

+ 8
- 1
api/services/account_service.py ファイルの表示

@@ -16,7 +16,7 @@ from configs import dify_config
from constants.languages import language_timezone_mapping, languages
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from extensions.ext_redis import redis_client, redis_fallback
from libs.helper import RateLimiter, TokenManager
from libs.passport import PassportService
from libs.password import compare_password, hash_password, valid_password
@@ -495,6 +495,7 @@ class AccountService:
return account

@staticmethod
@redis_fallback(default_return=None)
def add_login_error_rate_limit(email: str) -> None:
key = f"login_error_rate_limit:{email}"
count = redis_client.get(key)
@@ -504,6 +505,7 @@ class AccountService:
redis_client.setex(key, dify_config.LOGIN_LOCKOUT_DURATION, count)

@staticmethod
@redis_fallback(default_return=False)
def is_login_error_rate_limit(email: str) -> bool:
key = f"login_error_rate_limit:{email}"
count = redis_client.get(key)
@@ -516,11 +518,13 @@ class AccountService:
return False

@staticmethod
@redis_fallback(default_return=None)
def reset_login_error_rate_limit(email: str):
key = f"login_error_rate_limit:{email}"
redis_client.delete(key)

@staticmethod
@redis_fallback(default_return=None)
def add_forgot_password_error_rate_limit(email: str) -> None:
key = f"forgot_password_error_rate_limit:{email}"
count = redis_client.get(key)
@@ -530,6 +534,7 @@ class AccountService:
redis_client.setex(key, dify_config.FORGOT_PASSWORD_LOCKOUT_DURATION, count)

@staticmethod
@redis_fallback(default_return=False)
def is_forgot_password_error_rate_limit(email: str) -> bool:
key = f"forgot_password_error_rate_limit:{email}"
count = redis_client.get(key)
@@ -542,11 +547,13 @@ class AccountService:
return False

@staticmethod
@redis_fallback(default_return=None)
def reset_forgot_password_error_rate_limit(email: str):
key = f"forgot_password_error_rate_limit:{email}"
redis_client.delete(key)

@staticmethod
@redis_fallback(default_return=False)
def is_email_send_ip_limit(ip_address: str):
minute_key = f"email_send_ip_limit_minute:{ip_address}"
freeze_key = f"email_send_ip_limit_freeze:{ip_address}"

+ 2
- 2
api/services/enterprise/enterprise_service.py ファイルの表示

@@ -29,7 +29,7 @@ class EnterpriseService:
raise ValueError("No data found.")
try:
# parse the UTC timestamp from the response
return datetime.fromisoformat(data.replace("Z", "+00:00"))
return datetime.fromisoformat(data)
except ValueError as e:
raise ValueError(f"Invalid date format: {data}") from e

@@ -40,7 +40,7 @@ class EnterpriseService:
raise ValueError("No data found.")
try:
# parse the UTC timestamp from the response
return datetime.fromisoformat(data.replace("Z", "+00:00"))
return datetime.fromisoformat(data)
except ValueError as e:
raise ValueError(f"Invalid date format: {data}") from e


+ 1
- 1
api/services/entities/knowledge_entities/knowledge_entities.py ファイルの表示

@@ -95,7 +95,7 @@ class WeightKeywordSetting(BaseModel):


class WeightModel(BaseModel):
weight_type: Optional[str] = None
weight_type: Optional[Literal["semantic_first", "keyword_first", "customized"]] = None
vector_setting: Optional[WeightVectorSetting] = None
keyword_setting: Optional[WeightKeywordSetting] = None


+ 10
- 7
api/services/plugin/plugin_service.py ファイルの表示

@@ -427,6 +427,9 @@ class PluginService:

manager = PluginInstaller()

# collect actual plugin_unique_identifiers
actual_plugin_unique_identifiers = []
metas = []
features = FeatureService.get_system_features()

# check if already downloaded
@@ -437,6 +440,8 @@ class PluginService:
# check if the plugin is available to install
PluginService._check_plugin_installation_scope(plugin_decode_response.verification)
# already downloaded, skip
actual_plugin_unique_identifiers.append(plugin_unique_identifier)
metas.append({"plugin_unique_identifier": plugin_unique_identifier})
except Exception:
# plugin not installed, download and upload pkg
pkg = download_plugin_pkg(plugin_unique_identifier)
@@ -447,17 +452,15 @@ class PluginService:
)
# check if the plugin is available to install
PluginService._check_plugin_installation_scope(response.verification)
# use response plugin_unique_identifier
actual_plugin_unique_identifiers.append(response.unique_identifier)
metas.append({"plugin_unique_identifier": response.unique_identifier})

return manager.install_from_identifiers(
tenant_id,
plugin_unique_identifiers,
actual_plugin_unique_identifiers,
PluginInstallationSource.Marketplace,
[
{
"plugin_unique_identifier": plugin_unique_identifier,
}
for plugin_unique_identifier in plugin_unique_identifiers
],
metas,
)

@staticmethod

+ 231
- 0
api/services/tools/mcp_tools_mange_service.py ファイルの表示

@@ -0,0 +1,231 @@
import hashlib
import json
from datetime import datetime
from typing import Any

from sqlalchemy import or_
from sqlalchemy.exc import IntegrityError

from core.helper import encrypter
from core.mcp.error import MCPAuthError, MCPError
from core.mcp.mcp_client import MCPClient
from core.tools.entities.api_entities import ToolProviderApiEntity
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolProviderType
from core.tools.mcp_tool.provider import MCPToolProviderController
from core.tools.utils.configuration import ProviderConfigEncrypter
from extensions.ext_database import db
from models.tools import MCPToolProvider
from services.tools.tools_transform_service import ToolTransformService

UNCHANGED_SERVER_URL_PLACEHOLDER = "[__HIDDEN__]"


class MCPToolManageService:
"""
Service class for managing mcp tools.
"""

@staticmethod
def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider:
res = (
db.session.query(MCPToolProvider)
.filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.id == provider_id)
.first()
)
if not res:
raise ValueError("MCP tool not found")
return res

@staticmethod
def get_mcp_provider_by_server_identifier(server_identifier: str, tenant_id: str) -> MCPToolProvider:
res = (
db.session.query(MCPToolProvider)
.filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == server_identifier)
.first()
)
if not res:
raise ValueError("MCP tool not found")
return res

@staticmethod
def create_mcp_provider(
tenant_id: str,
name: str,
server_url: str,
user_id: str,
icon: str,
icon_type: str,
icon_background: str,
server_identifier: str,
) -> ToolProviderApiEntity:
server_url_hash = hashlib.sha256(server_url.encode()).hexdigest()
existing_provider = (
db.session.query(MCPToolProvider)
.filter(
MCPToolProvider.tenant_id == tenant_id,
or_(
MCPToolProvider.name == name,
MCPToolProvider.server_url_hash == server_url_hash,
MCPToolProvider.server_identifier == server_identifier,
),
)
.first()
)
if existing_provider:
if existing_provider.name == name:
raise ValueError(f"MCP tool {name} already exists")
elif existing_provider.server_url_hash == server_url_hash:
raise ValueError(f"MCP tool {server_url} already exists")
elif existing_provider.server_identifier == server_identifier:
raise ValueError(f"MCP tool {server_identifier} already exists")
encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url)
mcp_tool = MCPToolProvider(
tenant_id=tenant_id,
name=name,
server_url=encrypted_server_url,
server_url_hash=server_url_hash,
user_id=user_id,
authed=False,
tools="[]",
icon=json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon,
server_identifier=server_identifier,
)
db.session.add(mcp_tool)
db.session.commit()
return ToolTransformService.mcp_provider_to_user_provider(mcp_tool, for_list=True)

@staticmethod
def retrieve_mcp_tools(tenant_id: str, for_list: bool = False) -> list[ToolProviderApiEntity]:
mcp_providers = (
db.session.query(MCPToolProvider)
.filter(MCPToolProvider.tenant_id == tenant_id)
.order_by(MCPToolProvider.name)
.all()
)
return [
ToolTransformService.mcp_provider_to_user_provider(mcp_provider, for_list=for_list)
for mcp_provider in mcp_providers
]

@classmethod
def list_mcp_tool_from_remote_server(cls, tenant_id: str, provider_id: str):
mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id)

try:
with MCPClient(
mcp_provider.decrypted_server_url, provider_id, tenant_id, authed=mcp_provider.authed, for_list=True
) as mcp_client:
tools = mcp_client.list_tools()
except MCPAuthError as e:
raise ValueError("Please auth the tool first")
except MCPError as e:
raise ValueError(f"Failed to connect to MCP server: {e}")
mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools])
mcp_provider.authed = True
mcp_provider.updated_at = datetime.now()
db.session.commit()
user = mcp_provider.load_user()
return ToolProviderApiEntity(
id=mcp_provider.id,
name=mcp_provider.name,
tools=ToolTransformService.mcp_tool_to_user_tool(mcp_provider, tools),
type=ToolProviderType.MCP,
icon=mcp_provider.icon,
author=user.name if user else "Anonymous",
server_url=mcp_provider.masked_server_url,
updated_at=int(mcp_provider.updated_at.timestamp()),
description=I18nObject(en_US="", zh_Hans=""),
label=I18nObject(en_US=mcp_provider.name, zh_Hans=mcp_provider.name),
plugin_unique_identifier=mcp_provider.server_identifier,
)

@classmethod
def delete_mcp_tool(cls, tenant_id: str, provider_id: str):
mcp_tool = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id)

db.session.delete(mcp_tool)
db.session.commit()

@classmethod
def update_mcp_provider(
cls,
tenant_id: str,
provider_id: str,
name: str,
server_url: str,
icon: str,
icon_type: str,
icon_background: str,
server_identifier: str,
):
mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id)
mcp_provider.updated_at = datetime.now()
mcp_provider.name = name
mcp_provider.icon = (
json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon
)
mcp_provider.server_identifier = server_identifier

if UNCHANGED_SERVER_URL_PLACEHOLDER not in server_url:
encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url)
mcp_provider.server_url = encrypted_server_url
server_url_hash = hashlib.sha256(server_url.encode()).hexdigest()

if server_url_hash != mcp_provider.server_url_hash:
cls._re_connect_mcp_provider(mcp_provider, provider_id, tenant_id)
mcp_provider.server_url_hash = server_url_hash
try:
db.session.commit()
except IntegrityError as e:
db.session.rollback()
error_msg = str(e.orig)
if "unique_mcp_provider_name" in error_msg:
raise ValueError(f"MCP tool {name} already exists")
elif "unique_mcp_provider_server_url" in error_msg:
raise ValueError(f"MCP tool {server_url} already exists")
elif "unique_mcp_provider_server_identifier" in error_msg:
raise ValueError(f"MCP tool {server_identifier} already exists")
else:
raise

@classmethod
def update_mcp_provider_credentials(
cls, mcp_provider: MCPToolProvider, credentials: dict[str, Any], authed: bool = False
):
provider_controller = MCPToolProviderController._from_db(mcp_provider)
tool_configuration = ProviderConfigEncrypter(
tenant_id=mcp_provider.tenant_id,
config=list(provider_controller.get_credentials_schema()),
provider_type=provider_controller.provider_type.value,
provider_identity=provider_controller.provider_id,
)
credentials = tool_configuration.encrypt(credentials)
mcp_provider.updated_at = datetime.now()
mcp_provider.encrypted_credentials = json.dumps({**mcp_provider.credentials, **credentials})
mcp_provider.authed = authed
if not authed:
mcp_provider.tools = "[]"
db.session.commit()

@classmethod
def _re_connect_mcp_provider(cls, mcp_provider: MCPToolProvider, provider_id: str, tenant_id: str):
"""re-connect mcp provider"""
try:
with MCPClient(
mcp_provider.decrypted_server_url,
provider_id,
tenant_id,
authed=False,
for_list=True,
) as mcp_client:
tools = mcp_client.list_tools()
mcp_provider.authed = True
mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools])
except MCPAuthError:
mcp_provider.authed = False
mcp_provider.tools = "[]"
except MCPError as e:
raise ValueError(f"Failed to re-connect MCP server: {e}") from e
# reset credentials
mcp_provider.encrypted_credentials = "{}"

+ 108
- 6
api/services/tools/tools_transform_service.py ファイルの表示

@@ -1,10 +1,11 @@
import json
import logging
from typing import Optional, Union, cast
from typing import Any, Optional, Union, cast

from yarl import URL

from configs import dify_config
from core.mcp.types import Tool as MCPTool
from core.plugin.entities.plugin_daemon import PluginDatasourceProviderEntity
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
@@ -22,7 +23,7 @@ from core.tools.plugin_tool.provider import PluginToolProviderController
from core.tools.utils.configuration import ProviderConfigEncrypter
from core.tools.workflow_as_tool.provider import WorkflowToolProviderController
from core.tools.workflow_as_tool.tool import WorkflowTool
from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider
from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider

logger = logging.getLogger(__name__)

@@ -53,7 +54,8 @@ class ToolTransformService:
return icon
except Exception:
return {"background": "#252525", "content": "\ud83d\ude01"}

elif provider_type == ToolProviderType.MCP.value:
return icon
return ""

@staticmethod
@@ -74,10 +76,18 @@ class ToolTransformService:
provider.icon = ToolTransformService.get_plugin_icon_url(
tenant_id=tenant_id, filename=provider.icon
)
if isinstance(provider.icon_dark, str) and provider.icon_dark:
provider.icon_dark = ToolTransformService.get_plugin_icon_url(
tenant_id=tenant_id, filename=provider.icon_dark
)
else:
provider.icon = ToolTransformService.get_tool_provider_icon_url(
provider_type=provider.type.value, provider_name=provider.name, icon=provider.icon
)
if provider.icon_dark:
provider.icon_dark = ToolTransformService.get_tool_provider_icon_url(
provider_type=provider.type.value, provider_name=provider.name, icon=provider.icon_dark
)
elif isinstance(provider, PluginDatasourceProviderEntity):
if provider.plugin_id:
if isinstance(provider.declaration.identity.icon, str):
@@ -107,6 +117,7 @@ class ToolTransformService:
name=provider_controller.entity.identity.name,
description=provider_controller.entity.identity.description,
icon=provider_controller.entity.identity.icon,
icon_dark=provider_controller.entity.identity.icon_dark,
label=provider_controller.entity.identity.label,
type=ToolProviderType.BUILT_IN,
masked_credentials={},
@@ -161,11 +172,16 @@ class ToolTransformService:
convert provider controller to user provider
"""
# package tool provider controller
auth_type = ApiProviderAuthType.NONE
credentials_auth_type = db_provider.credentials.get("auth_type")
if credentials_auth_type in ("api_key_header", "api_key"): # backward compatibility
auth_type = ApiProviderAuthType.API_KEY_HEADER
elif credentials_auth_type == "api_key_query":
auth_type = ApiProviderAuthType.API_KEY_QUERY

controller = ApiToolProviderController.from_db(
db_provider=db_provider,
auth_type=ApiProviderAuthType.API_KEY
if db_provider.credentials["auth_type"] == "api_key"
else ApiProviderAuthType.NONE,
auth_type=auth_type,
)

return controller
@@ -190,6 +206,7 @@ class ToolTransformService:
name=provider_controller.entity.identity.name,
description=provider_controller.entity.identity.description,
icon=provider_controller.entity.identity.icon,
icon_dark=provider_controller.entity.identity.icon_dark,
label=provider_controller.entity.identity.label,
type=ToolProviderType.WORKFLOW,
masked_credentials={},
@@ -200,6 +217,41 @@ class ToolTransformService:
labels=labels or [],
)

@staticmethod
def mcp_provider_to_user_provider(db_provider: MCPToolProvider, for_list: bool = False) -> ToolProviderApiEntity:
user = db_provider.load_user()
return ToolProviderApiEntity(
id=db_provider.server_identifier if not for_list else db_provider.id,
author=user.name if user else "Anonymous",
name=db_provider.name,
icon=db_provider.provider_icon,
type=ToolProviderType.MCP,
is_team_authorization=db_provider.authed,
server_url=db_provider.masked_server_url,
tools=ToolTransformService.mcp_tool_to_user_tool(
db_provider, [MCPTool(**tool) for tool in json.loads(db_provider.tools)]
),
updated_at=int(db_provider.updated_at.timestamp()),
label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name),
description=I18nObject(en_US="", zh_Hans=""),
server_identifier=db_provider.server_identifier,
)

@staticmethod
def mcp_tool_to_user_tool(mcp_provider: MCPToolProvider, tools: list[MCPTool]) -> list[ToolApiEntity]:
user = mcp_provider.load_user()
return [
ToolApiEntity(
author=user.name if user else "Anonymous",
name=tool.name,
label=I18nObject(en_US=tool.name, zh_Hans=tool.name),
description=I18nObject(en_US=tool.description, zh_Hans=tool.description),
parameters=ToolTransformService.convert_mcp_schema_to_parameter(tool.inputSchema),
labels=[],
)
for tool in tools
]

@classmethod
def api_provider_to_user_provider(
cls,
@@ -317,3 +369,53 @@ class ToolTransformService:
parameters=tool.parameters,
labels=labels or [],
)

@staticmethod
def convert_mcp_schema_to_parameter(schema: dict) -> list["ToolParameter"]:
"""
Convert MCP JSON schema to tool parameters

:param schema: JSON schema dictionary
:return: list of ToolParameter instances
"""

def create_parameter(
name: str, description: str, param_type: str, required: bool, input_schema: dict | None = None
) -> ToolParameter:
"""Create a ToolParameter instance with given attributes"""
input_schema_dict: dict[str, Any] = {"input_schema": input_schema} if input_schema else {}
return ToolParameter(
name=name,
llm_description=description,
label=I18nObject(en_US=name),
form=ToolParameter.ToolParameterForm.LLM,
required=required,
type=ToolParameter.ToolParameterType(param_type),
human_description=I18nObject(en_US=description),
**input_schema_dict,
)

def process_properties(props: dict, required: list, prefix: str = "") -> list[ToolParameter]:
"""Process properties recursively"""
TYPE_MAPPING = {"integer": "number", "float": "number"}
COMPLEX_TYPES = ["array", "object"]

parameters = []
for name, prop in props.items():
current_description = prop.get("description", "")
prop_type = prop.get("type", "string")

if isinstance(prop_type, list):
prop_type = prop_type[0]
if prop_type in TYPE_MAPPING:
prop_type = TYPE_MAPPING[prop_type]
input_schema = prop if prop_type in COMPLEX_TYPES else None
parameters.append(
create_parameter(name, current_description, prop_type, name in required, input_schema)
)

return parameters

if schema.get("type") == "object" and "properties" in schema:
return process_properties(schema["properties"], schema.get("required", []))
return []

+ 14
- 0
api/tasks/remove_app_and_related_data_task.py ファイルの表示

@@ -13,6 +13,7 @@ from models import (
AppAnnotationHitHistory,
AppAnnotationSetting,
AppDatasetJoin,
AppMCPServer,
AppModelConfig,
Conversation,
EndUser,
@@ -41,6 +42,7 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str):
# Delete related data
_delete_app_model_configs(tenant_id, app_id)
_delete_app_site(tenant_id, app_id)
_delete_app_mcp_servers(tenant_id, app_id)
_delete_app_api_tokens(tenant_id, app_id)
_delete_installed_apps(tenant_id, app_id)
_delete_recommended_apps(tenant_id, app_id)
@@ -89,6 +91,18 @@ def _delete_app_site(tenant_id: str, app_id: str):
_delete_records("""select id from sites where app_id=:app_id limit 1000""", {"app_id": app_id}, del_site, "site")


def _delete_app_mcp_servers(tenant_id: str, app_id: str):
def del_mcp_server(mcp_server_id: str):
db.session.query(AppMCPServer).filter(AppMCPServer.id == mcp_server_id).delete(synchronize_session=False)

_delete_records(
"""select id from app_mcp_servers where app_id=:app_id limit 1000""",
{"app_id": app_id},
del_mcp_server,
"app mcp server",
)


def _delete_app_api_tokens(tenant_id: str, app_id: str):
def del_api_token(api_token_id: str):
db.session.query(ApiToken).filter(ApiToken.id == api_token_id).delete(synchronize_session=False)

+ 0
- 1
api/tests/integration_tests/vdb/couchbase/test_couchbase.py ファイルの表示

@@ -4,7 +4,6 @@ import time
from core.rag.datasource.vdb.couchbase.couchbase_vector import CouchbaseConfig, CouchbaseVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
setup_mock_redis,
)


+ 0
- 1
api/tests/integration_tests/vdb/matrixone/test_matrixone.py ファイルの表示

@@ -1,7 +1,6 @@
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
setup_mock_redis,
)


+ 0
- 1
api/tests/integration_tests/vdb/opengauss/test_opengauss.py ファイルの表示

@@ -5,7 +5,6 @@ import psycopg2 # type: ignore
from core.rag.datasource.vdb.opengauss.opengauss import OpenGauss, OpenGaussConfig
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
setup_mock_redis,
)


+ 0
- 1
api/tests/integration_tests/vdb/pyvastbase/test_vastbase_vector.py ファイルの表示

@@ -1,7 +1,6 @@
from core.rag.datasource.vdb.pyvastbase.vastbase_vector import VastbaseVector, VastbaseVectorConfig
from tests.integration_tests.vdb.test_vector_store import (
AbstractVectorTest,
get_example_text,
setup_mock_redis,
)


+ 4
- 7
api/tests/integration_tests/workflow/nodes/test_llm.py ファイルの表示

@@ -1,5 +1,4 @@
import json
import os
import time
import uuid
from collections.abc import Generator
@@ -113,17 +112,15 @@ def test_execute_llm(flask_req_ctx):
},
)

credentials = {"openai_api_key": os.environ.get("OPENAI_API_KEY")}

# Create a proper LLM result with real entities
mock_usage = LLMUsage(
prompt_tokens=30,
prompt_unit_price=Decimal("0.001"),
prompt_price_unit=Decimal("1000"),
prompt_price_unit=Decimal(1000),
prompt_price=Decimal("0.00003"),
completion_tokens=20,
completion_unit_price=Decimal("0.002"),
completion_price_unit=Decimal("1000"),
completion_price_unit=Decimal(1000),
completion_price=Decimal("0.00004"),
total_tokens=50,
total_price=Decimal("0.00007"),
@@ -222,11 +219,11 @@ def test_execute_llm_with_jinja2(flask_req_ctx, setup_code_executor_mock):
mock_usage = LLMUsage(
prompt_tokens=30,
prompt_unit_price=Decimal("0.001"),
prompt_price_unit=Decimal("1000"),
prompt_price_unit=Decimal(1000),
prompt_price=Decimal("0.00003"),
completion_tokens=20,
completion_unit_price=Decimal("0.002"),
completion_price_unit=Decimal("1000"),
completion_price_unit=Decimal(1000),
completion_price=Decimal("0.00004"),
total_tokens=50,
total_price=Decimal("0.00007"),

+ 471
- 0
api/tests/unit_tests/core/mcp/client/test_session.py ファイルの表示

@@ -0,0 +1,471 @@
import queue
import threading
from typing import Any

from core.mcp import types
from core.mcp.entities import RequestContext
from core.mcp.session.base_session import RequestResponder
from core.mcp.session.client_session import DEFAULT_CLIENT_INFO, ClientSession
from core.mcp.types import (
LATEST_PROTOCOL_VERSION,
ClientNotification,
ClientRequest,
Implementation,
InitializedNotification,
InitializeRequest,
InitializeResult,
JSONRPCMessage,
JSONRPCNotification,
JSONRPCRequest,
JSONRPCResponse,
ServerCapabilities,
ServerResult,
SessionMessage,
)


def test_client_session_initialize():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

initialized_notification = None

def mock_server():
nonlocal initialized_notification

# Receive initialization request
session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)

# Create response
result = ServerResult(
InitializeResult(
protocolVersion=LATEST_PROTOCOL_VERSION,
capabilities=ServerCapabilities(
logging=None,
resources=None,
tools=None,
experimental=None,
prompts=None,
),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
instructions="The server instructions.",
)
)

# Send response
server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)

# Receive initialized notification
session_notification = client_to_server.get(timeout=5.0)
jsonrpc_notification = session_notification.message
assert isinstance(jsonrpc_notification.root, JSONRPCNotification)
initialized_notification = ClientNotification.model_validate(
jsonrpc_notification.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)

# Create message handler
def message_handler(
message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception,
) -> None:
if isinstance(message, Exception):
raise message

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

# Create and use client session
with ClientSession(
server_to_client,
client_to_server,
message_handler=message_handler,
) as session:
result = session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)

# Assert results
assert isinstance(result, InitializeResult)
assert result.protocolVersion == LATEST_PROTOCOL_VERSION
assert isinstance(result.capabilities, ServerCapabilities)
assert result.serverInfo == Implementation(name="mock-server", version="0.1.0")
assert result.instructions == "The server instructions."

# Check that client sent initialized notification
assert initialized_notification
assert isinstance(initialized_notification.root, InitializedNotification)


def test_client_session_custom_client_info():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

custom_client_info = Implementation(name="test-client", version="1.2.3")
received_client_info = None

def mock_server():
nonlocal received_client_info

session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)
received_client_info = request.root.params.clientInfo

result = ServerResult(
InitializeResult(
protocolVersion=LATEST_PROTOCOL_VERSION,
capabilities=ServerCapabilities(),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
)
)

server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)
# Receive initialized notification
client_to_server.get(timeout=5.0)

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

with ClientSession(
server_to_client,
client_to_server,
client_info=custom_client_info,
) as session:
session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)

# Assert that custom client info was sent
assert received_client_info == custom_client_info


def test_client_session_default_client_info():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

received_client_info = None

def mock_server():
nonlocal received_client_info

session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)
received_client_info = request.root.params.clientInfo

result = ServerResult(
InitializeResult(
protocolVersion=LATEST_PROTOCOL_VERSION,
capabilities=ServerCapabilities(),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
)
)

server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)
# Receive initialized notification
client_to_server.get(timeout=5.0)

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

with ClientSession(
server_to_client,
client_to_server,
) as session:
session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)

# Assert that default client info was used
assert received_client_info == DEFAULT_CLIENT_INFO


def test_client_session_version_negotiation_success():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

def mock_server():
session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)

# Send supported protocol version
result = ServerResult(
InitializeResult(
protocolVersion=LATEST_PROTOCOL_VERSION,
capabilities=ServerCapabilities(),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
)
)

server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)
# Receive initialized notification
client_to_server.get(timeout=5.0)

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

with ClientSession(
server_to_client,
client_to_server,
) as session:
result = session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)

# Should successfully initialize
assert isinstance(result, InitializeResult)
assert result.protocolVersion == LATEST_PROTOCOL_VERSION


def test_client_session_version_negotiation_failure():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

def mock_server():
session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)

# Send unsupported protocol version
result = ServerResult(
InitializeResult(
protocolVersion="99.99.99", # Unsupported version
capabilities=ServerCapabilities(),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
)
)

server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

with ClientSession(
server_to_client,
client_to_server,
) as session:
import pytest

with pytest.raises(RuntimeError, match="Unsupported protocol version"):
session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)


def test_client_capabilities_default():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

received_capabilities = None

def mock_server():
nonlocal received_capabilities

session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)
received_capabilities = request.root.params.capabilities

result = ServerResult(
InitializeResult(
protocolVersion=LATEST_PROTOCOL_VERSION,
capabilities=ServerCapabilities(),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
)
)

server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)
# Receive initialized notification
client_to_server.get(timeout=5.0)

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

with ClientSession(
server_to_client,
client_to_server,
) as session:
session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)

# Assert default capabilities
assert received_capabilities is not None
assert received_capabilities.sampling is not None
assert received_capabilities.roots is not None
assert received_capabilities.roots.listChanged is True


def test_client_capabilities_with_custom_callbacks():
# Create synchronous queues to replace async streams
client_to_server: queue.Queue[SessionMessage] = queue.Queue()
server_to_client: queue.Queue[SessionMessage] = queue.Queue()

def custom_sampling_callback(
context: RequestContext["ClientSession", Any],
params: types.CreateMessageRequestParams,
) -> types.CreateMessageResult | types.ErrorData:
return types.CreateMessageResult(
model="test-model",
role="assistant",
content=types.TextContent(type="text", text="Custom response"),
)

def custom_list_roots_callback(
context: RequestContext["ClientSession", Any],
) -> types.ListRootsResult | types.ErrorData:
return types.ListRootsResult(roots=[])

def mock_server():
session_message = client_to_server.get(timeout=5.0)
jsonrpc_request = session_message.message
assert isinstance(jsonrpc_request.root, JSONRPCRequest)
request = ClientRequest.model_validate(
jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True)
)
assert isinstance(request.root, InitializeRequest)

result = ServerResult(
InitializeResult(
protocolVersion=LATEST_PROTOCOL_VERSION,
capabilities=ServerCapabilities(),
serverInfo=Implementation(name="mock-server", version="0.1.0"),
)
)

server_to_client.put(
SessionMessage(
message=JSONRPCMessage(
JSONRPCResponse(
jsonrpc="2.0",
id=jsonrpc_request.root.id,
result=result.model_dump(by_alias=True, mode="json", exclude_none=True),
)
)
)
)
# Receive initialized notification
client_to_server.get(timeout=5.0)

# Start mock server thread
server_thread = threading.Thread(target=mock_server, daemon=True)
server_thread.start()

with ClientSession(
server_to_client,
client_to_server,
sampling_callback=custom_sampling_callback,
list_roots_callback=custom_list_roots_callback,
) as session:
result = session.initialize()

# Wait for server thread to complete
server_thread.join(timeout=10.0)

# Verify initialization succeeded
assert isinstance(result, InitializeResult)
assert result.protocolVersion == LATEST_PROTOCOL_VERSION

+ 349
- 0
api/tests/unit_tests/core/mcp/client/test_sse.py ファイルの表示

@@ -0,0 +1,349 @@
import json
import queue
import threading
import time
from typing import Any
from unittest.mock import Mock, patch

import httpx
import pytest

from core.mcp import types
from core.mcp.client.sse_client import sse_client
from core.mcp.error import MCPAuthError, MCPConnectionError

SERVER_NAME = "test_server_for_SSE"


def test_sse_message_id_coercion():
"""Test that string message IDs that look like integers are parsed as integers.

See <https://github.com/modelcontextprotocol/python-sdk/pull/851> for more details.
"""
json_message = '{"jsonrpc": "2.0", "id": "123", "method": "ping", "params": null}'
msg = types.JSONRPCMessage.model_validate_json(json_message)
expected = types.JSONRPCMessage(root=types.JSONRPCRequest(method="ping", jsonrpc="2.0", id=123))

# Check if both are JSONRPCRequest instances
assert isinstance(msg.root, types.JSONRPCRequest)
assert isinstance(expected.root, types.JSONRPCRequest)

assert msg.root.id == expected.root.id
assert msg.root.method == expected.root.method
assert msg.root.jsonrpc == expected.root.jsonrpc


class MockSSEClient:
"""Mock SSE client for testing."""

def __init__(self, url: str, headers: dict[str, Any] | None = None):
self.url = url
self.headers = headers or {}
self.connected = False
self.read_queue: queue.Queue = queue.Queue()
self.write_queue: queue.Queue = queue.Queue()

def connect(self):
"""Simulate connection establishment."""
self.connected = True

# Send endpoint event
endpoint_data = "/messages/?session_id=test-session-123"
self.read_queue.put(("endpoint", endpoint_data))

return self.read_queue, self.write_queue

def send_initialize_response(self):
"""Send a mock initialize response."""
response = {
"jsonrpc": "2.0",
"id": 1,
"result": {
"protocolVersion": types.LATEST_PROTOCOL_VERSION,
"capabilities": {
"logging": None,
"resources": None,
"tools": None,
"experimental": None,
"prompts": None,
},
"serverInfo": {"name": SERVER_NAME, "version": "0.1.0"},
"instructions": "Test server instructions.",
},
}
self.read_queue.put(("message", json.dumps(response)))


def test_sse_client_message_id_handling():
"""Test SSE client properly handles message ID coercion."""
mock_client = MockSSEClient("http://test.example/sse")
read_queue, write_queue = mock_client.connect()

# Send a message with string ID that should be coerced to int
message_data = {
"jsonrpc": "2.0",
"id": "456", # String ID
"result": {"test": "data"},
}
read_queue.put(("message", json.dumps(message_data)))
read_queue.get(timeout=1.0)
# Get the message from queue
event_type, data = read_queue.get(timeout=1.0)
assert event_type == "message"

# Parse the message
parsed_message = types.JSONRPCMessage.model_validate_json(data)
# Check that it's a JSONRPCResponse and verify the ID
assert isinstance(parsed_message.root, types.JSONRPCResponse)
assert parsed_message.root.id == 456 # Should be converted to int


def test_sse_client_connection_validation():
"""Test SSE client validates endpoint URLs properly."""
test_url = "http://test.example/sse"

with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect:
# Mock the HTTP client
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

# Mock the SSE connection
mock_event_source = Mock()
mock_event_source.response.raise_for_status.return_value = None
mock_sse_connect.return_value.__enter__.return_value = mock_event_source

# Mock SSE events
class MockSSEEvent:
def __init__(self, event_type: str, data: str):
self.event = event_type
self.data = data

# Simulate endpoint event
endpoint_event = MockSSEEvent("endpoint", "/messages/?session_id=test-123")
mock_event_source.iter_sse.return_value = [endpoint_event]

# Test connection
try:
with sse_client(test_url) as (read_queue, write_queue):
assert read_queue is not None
assert write_queue is not None
except Exception as e:
# Connection might fail due to mocking, but we're testing the validation logic
pass


def test_sse_client_error_handling():
"""Test SSE client properly handles various error conditions."""
test_url = "http://test.example/sse"

# Test 401 error handling
with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect:
# Mock 401 HTTP error
mock_error = httpx.HTTPStatusError("Unauthorized", request=Mock(), response=Mock(status_code=401))
mock_sse_connect.side_effect = mock_error

with pytest.raises(MCPAuthError):
with sse_client(test_url):
pass

# Test other HTTP errors
with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect:
# Mock other HTTP error
mock_error = httpx.HTTPStatusError("Server Error", request=Mock(), response=Mock(status_code=500))
mock_sse_connect.side_effect = mock_error

with pytest.raises(MCPConnectionError):
with sse_client(test_url):
pass


def test_sse_client_timeout_configuration():
"""Test SSE client timeout configuration."""
test_url = "http://test.example/sse"
custom_timeout = 10.0
custom_sse_timeout = 300.0
custom_headers = {"Authorization": "Bearer test-token"}

with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect:
# Mock successful connection
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

mock_event_source = Mock()
mock_event_source.response.raise_for_status.return_value = None
mock_event_source.iter_sse.return_value = []
mock_sse_connect.return_value.__enter__.return_value = mock_event_source

try:
with sse_client(
test_url, headers=custom_headers, timeout=custom_timeout, sse_read_timeout=custom_sse_timeout
) as (read_queue, write_queue):
# Verify the configuration was passed correctly
mock_client_factory.assert_called_with(headers=custom_headers)

# Check that timeout was configured
call_args = mock_sse_connect.call_args
assert call_args is not None
timeout_arg = call_args[1]["timeout"]
assert timeout_arg.read == custom_sse_timeout
except Exception:
# Connection might fail due to mocking, but we tested the configuration
pass


def test_sse_transport_endpoint_validation():
"""Test SSE transport validates endpoint URLs correctly."""
from core.mcp.client.sse_client import SSETransport

transport = SSETransport("http://example.com/sse")

# Valid endpoint (same origin)
valid_endpoint = "http://example.com/messages/session123"
assert transport._validate_endpoint_url(valid_endpoint) == True

# Invalid endpoint (different origin)
invalid_endpoint = "http://malicious.com/messages/session123"
assert transport._validate_endpoint_url(invalid_endpoint) == False

# Invalid endpoint (different scheme)
invalid_scheme = "https://example.com/messages/session123"
assert transport._validate_endpoint_url(invalid_scheme) == False


def test_sse_transport_message_parsing():
"""Test SSE transport properly parses different message types."""
from core.mcp.client.sse_client import SSETransport

transport = SSETransport("http://example.com/sse")
read_queue: queue.Queue = queue.Queue()

# Test valid JSON-RPC message
valid_message = '{"jsonrpc": "2.0", "id": 1, "method": "ping"}'
transport._handle_message_event(valid_message, read_queue)

# Should have a SessionMessage in the queue
message = read_queue.get(timeout=1.0)
assert message is not None
assert hasattr(message, "message")

# Test invalid JSON
invalid_json = '{"invalid": json}'
transport._handle_message_event(invalid_json, read_queue)

# Should have an exception in the queue
error = read_queue.get(timeout=1.0)
assert isinstance(error, Exception)


def test_sse_client_queue_cleanup():
"""Test that SSE client properly cleans up queues on exit."""
test_url = "http://test.example/sse"

read_queue = None
write_queue = None

with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect:
# Mock connection that raises an exception
mock_sse_connect.side_effect = Exception("Connection failed")

try:
with sse_client(test_url) as (rq, wq):
read_queue = rq
write_queue = wq
except Exception:
pass # Expected to fail

# Queues should be cleaned up even on exception
# Note: In real implementation, cleanup should put None to signal shutdown


def test_sse_client_url_processing():
"""Test SSE client URL processing functions."""
from core.mcp.client.sse_client import remove_request_params

# Test URL with parameters
url_with_params = "http://example.com/sse?param1=value1&param2=value2"
cleaned_url = remove_request_params(url_with_params)
assert cleaned_url == "http://example.com/sse"

# Test URL without parameters
url_without_params = "http://example.com/sse"
cleaned_url = remove_request_params(url_without_params)
assert cleaned_url == "http://example.com/sse"

# Test URL with path and parameters
complex_url = "http://example.com/path/to/sse?session=123&token=abc"
cleaned_url = remove_request_params(complex_url)
assert cleaned_url == "http://example.com/path/to/sse"


def test_sse_client_headers_propagation():
"""Test that custom headers are properly propagated in SSE client."""
test_url = "http://test.example/sse"
custom_headers = {
"Authorization": "Bearer test-token",
"X-Custom-Header": "test-value",
"User-Agent": "test-client/1.0",
}

with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect:
# Mock the client factory to capture headers
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

# Mock the SSE connection
mock_event_source = Mock()
mock_event_source.response.raise_for_status.return_value = None
mock_event_source.iter_sse.return_value = []
mock_sse_connect.return_value.__enter__.return_value = mock_event_source

try:
with sse_client(test_url, headers=custom_headers):
pass
except Exception:
pass # Expected due to mocking

# Verify headers were passed to client factory
mock_client_factory.assert_called_with(headers=custom_headers)


def test_sse_client_concurrent_access():
"""Test SSE client behavior with concurrent queue access."""
test_read_queue: queue.Queue = queue.Queue()

# Simulate concurrent producers and consumers
def producer():
for i in range(10):
test_read_queue.put(f"message_{i}")
time.sleep(0.01) # Small delay to simulate real conditions

def consumer():
received = []
for _ in range(10):
try:
msg = test_read_queue.get(timeout=2.0)
received.append(msg)
except queue.Empty:
break
return received

# Start producer in separate thread
producer_thread = threading.Thread(target=producer, daemon=True)
producer_thread.start()

# Consume messages
received_messages = consumer()

# Wait for producer to finish
producer_thread.join(timeout=5.0)

# Verify all messages were received
assert len(received_messages) == 10
for i in range(10):
assert f"message_{i}" in received_messages

+ 450
- 0
api/tests/unit_tests/core/mcp/client/test_streamable_http.py ファイルの表示

@@ -0,0 +1,450 @@
"""
Tests for the StreamableHTTP client transport.

Contains tests for only the client side of the StreamableHTTP transport.
"""

import queue
import threading
import time
from typing import Any
from unittest.mock import Mock, patch

from core.mcp import types
from core.mcp.client.streamable_client import streamablehttp_client

# Test constants
SERVER_NAME = "test_streamable_http_server"
TEST_SESSION_ID = "test-session-id-12345"
INIT_REQUEST = {
"jsonrpc": "2.0",
"method": "initialize",
"params": {
"clientInfo": {"name": "test-client", "version": "1.0"},
"protocolVersion": "2025-03-26",
"capabilities": {},
},
"id": "init-1",
}


class MockStreamableHTTPClient:
"""Mock StreamableHTTP client for testing."""

def __init__(self, url: str, headers: dict[str, Any] | None = None):
self.url = url
self.headers = headers or {}
self.connected = False
self.read_queue: queue.Queue = queue.Queue()
self.write_queue: queue.Queue = queue.Queue()
self.session_id = TEST_SESSION_ID

def connect(self):
"""Simulate connection establishment."""
self.connected = True
return self.read_queue, self.write_queue, lambda: self.session_id

def send_initialize_response(self):
"""Send a mock initialize response."""
session_message = types.SessionMessage(
message=types.JSONRPCMessage(
root=types.JSONRPCResponse(
jsonrpc="2.0",
id="init-1",
result={
"protocolVersion": types.LATEST_PROTOCOL_VERSION,
"capabilities": {
"logging": None,
"resources": None,
"tools": None,
"experimental": None,
"prompts": None,
},
"serverInfo": {"name": SERVER_NAME, "version": "0.1.0"},
"instructions": "Test server instructions.",
},
)
)
)
self.read_queue.put(session_message)

def send_tools_response(self):
"""Send a mock tools list response."""
session_message = types.SessionMessage(
message=types.JSONRPCMessage(
root=types.JSONRPCResponse(
jsonrpc="2.0",
id="tools-1",
result={
"tools": [
{
"name": "test_tool",
"description": "A test tool",
"inputSchema": {"type": "object", "properties": {}},
}
],
},
)
)
)
self.read_queue.put(session_message)


def test_streamablehttp_client_message_id_handling():
"""Test StreamableHTTP client properly handles message ID coercion."""
mock_client = MockStreamableHTTPClient("http://test.example/mcp")
read_queue, write_queue, get_session_id = mock_client.connect()

# Send a message with string ID that should be coerced to int
response_message = types.SessionMessage(
message=types.JSONRPCMessage(root=types.JSONRPCResponse(jsonrpc="2.0", id="789", result={"test": "data"}))
)
read_queue.put(response_message)

# Get the message from queue
message = read_queue.get(timeout=1.0)
assert message is not None
assert isinstance(message, types.SessionMessage)

# Check that the ID was properly handled
assert isinstance(message.message.root, types.JSONRPCResponse)
assert message.message.root.id == 789 # ID should be coerced to int due to union_mode="left_to_right"


def test_streamablehttp_client_connection_validation():
"""Test StreamableHTTP client validates connections properly."""
test_url = "http://test.example/mcp"

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
# Mock the HTTP client
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

# Mock successful response
mock_response = Mock()
mock_response.status_code = 200
mock_response.headers = {"content-type": "application/json"}
mock_response.raise_for_status.return_value = None
mock_client.post.return_value = mock_response

# Test connection
try:
with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id):
assert read_queue is not None
assert write_queue is not None
assert get_session_id is not None
except Exception:
# Connection might fail due to mocking, but we're testing the validation logic
pass


def test_streamablehttp_client_timeout_configuration():
"""Test StreamableHTTP client timeout configuration."""
test_url = "http://test.example/mcp"
custom_headers = {"Authorization": "Bearer test-token"}

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
# Mock successful connection
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

mock_response = Mock()
mock_response.status_code = 200
mock_response.headers = {"content-type": "application/json"}
mock_response.raise_for_status.return_value = None
mock_client.post.return_value = mock_response

try:
with streamablehttp_client(test_url, headers=custom_headers) as (read_queue, write_queue, get_session_id):
# Verify the configuration was passed correctly
mock_client_factory.assert_called_with(headers=custom_headers)
except Exception:
# Connection might fail due to mocking, but we tested the configuration
pass


def test_streamablehttp_client_session_id_handling():
"""Test StreamableHTTP client properly handles session IDs."""
mock_client = MockStreamableHTTPClient("http://test.example/mcp")
read_queue, write_queue, get_session_id = mock_client.connect()

# Test that session ID is available
session_id = get_session_id()
assert session_id == TEST_SESSION_ID

# Test that we can use the session ID in subsequent requests
assert session_id is not None
assert len(session_id) > 0


def test_streamablehttp_client_message_parsing():
"""Test StreamableHTTP client properly parses different message types."""
mock_client = MockStreamableHTTPClient("http://test.example/mcp")
read_queue, write_queue, get_session_id = mock_client.connect()

# Test valid initialization response
mock_client.send_initialize_response()

# Should have a SessionMessage in the queue
message = read_queue.get(timeout=1.0)
assert message is not None
assert isinstance(message, types.SessionMessage)
assert isinstance(message.message.root, types.JSONRPCResponse)

# Test tools response
mock_client.send_tools_response()

tools_message = read_queue.get(timeout=1.0)
assert tools_message is not None
assert isinstance(tools_message, types.SessionMessage)


def test_streamablehttp_client_queue_cleanup():
"""Test that StreamableHTTP client properly cleans up queues on exit."""
test_url = "http://test.example/mcp"

read_queue = None
write_queue = None

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
# Mock connection that raises an exception
mock_client_factory.side_effect = Exception("Connection failed")

try:
with streamablehttp_client(test_url) as (rq, wq, get_session_id):
read_queue = rq
write_queue = wq
except Exception:
pass # Expected to fail

# Queues should be cleaned up even on exception
# Note: In real implementation, cleanup should put None to signal shutdown


def test_streamablehttp_client_headers_propagation():
"""Test that custom headers are properly propagated in StreamableHTTP client."""
test_url = "http://test.example/mcp"
custom_headers = {
"Authorization": "Bearer test-token",
"X-Custom-Header": "test-value",
"User-Agent": "test-client/1.0",
}

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
# Mock the client factory to capture headers
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

mock_response = Mock()
mock_response.status_code = 200
mock_response.headers = {"content-type": "application/json"}
mock_response.raise_for_status.return_value = None
mock_client.post.return_value = mock_response

try:
with streamablehttp_client(test_url, headers=custom_headers):
pass
except Exception:
pass # Expected due to mocking

# Verify headers were passed to client factory
# Check that the call was made with headers that include our custom headers
mock_client_factory.assert_called_once()
call_args = mock_client_factory.call_args
assert "headers" in call_args.kwargs
passed_headers = call_args.kwargs["headers"]

# Verify all custom headers are present
for key, value in custom_headers.items():
assert key in passed_headers
assert passed_headers[key] == value


def test_streamablehttp_client_concurrent_access():
"""Test StreamableHTTP client behavior with concurrent queue access."""
test_read_queue: queue.Queue = queue.Queue()
test_write_queue: queue.Queue = queue.Queue()

# Simulate concurrent producers and consumers
def producer():
for i in range(10):
test_read_queue.put(f"message_{i}")
time.sleep(0.01) # Small delay to simulate real conditions

def consumer():
received = []
for _ in range(10):
try:
msg = test_read_queue.get(timeout=2.0)
received.append(msg)
except queue.Empty:
break
return received

# Start producer in separate thread
producer_thread = threading.Thread(target=producer, daemon=True)
producer_thread.start()

# Consume messages
received_messages = consumer()

# Wait for producer to finish
producer_thread.join(timeout=5.0)

# Verify all messages were received
assert len(received_messages) == 10
for i in range(10):
assert f"message_{i}" in received_messages


def test_streamablehttp_client_json_vs_sse_mode():
"""Test StreamableHTTP client handling of JSON vs SSE response modes."""
test_url = "http://test.example/mcp"

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

# Mock JSON response
mock_json_response = Mock()
mock_json_response.status_code = 200
mock_json_response.headers = {"content-type": "application/json"}
mock_json_response.json.return_value = {"result": "json_mode"}
mock_json_response.raise_for_status.return_value = None

# Mock SSE response
mock_sse_response = Mock()
mock_sse_response.status_code = 200
mock_sse_response.headers = {"content-type": "text/event-stream"}
mock_sse_response.raise_for_status.return_value = None

# Test JSON mode
mock_client.post.return_value = mock_json_response

try:
with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id):
# Should handle JSON responses
assert read_queue is not None
assert write_queue is not None
except Exception:
pass # Expected due to mocking

# Test SSE mode
mock_client.post.return_value = mock_sse_response

try:
with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id):
# Should handle SSE responses
assert read_queue is not None
assert write_queue is not None
except Exception:
pass # Expected due to mocking


def test_streamablehttp_client_terminate_on_close():
"""Test StreamableHTTP client terminate_on_close parameter."""
test_url = "http://test.example/mcp"

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

mock_response = Mock()
mock_response.status_code = 200
mock_response.headers = {"content-type": "application/json"}
mock_response.raise_for_status.return_value = None
mock_client.post.return_value = mock_response
mock_client.delete.return_value = mock_response

# Test with terminate_on_close=True (default)
try:
with streamablehttp_client(test_url, terminate_on_close=True) as (read_queue, write_queue, get_session_id):
pass
except Exception:
pass # Expected due to mocking

# Test with terminate_on_close=False
try:
with streamablehttp_client(test_url, terminate_on_close=False) as (read_queue, write_queue, get_session_id):
pass
except Exception:
pass # Expected due to mocking


def test_streamablehttp_client_protocol_version_handling():
"""Test StreamableHTTP client protocol version handling."""
mock_client = MockStreamableHTTPClient("http://test.example/mcp")
read_queue, write_queue, get_session_id = mock_client.connect()

# Send initialize response with specific protocol version

session_message = types.SessionMessage(
message=types.JSONRPCMessage(
root=types.JSONRPCResponse(
jsonrpc="2.0",
id="init-1",
result={
"protocolVersion": "2024-11-05",
"capabilities": {},
"serverInfo": {"name": SERVER_NAME, "version": "0.1.0"},
},
)
)
)
read_queue.put(session_message)

# Get the message and verify protocol version
message = read_queue.get(timeout=1.0)
assert message is not None
assert isinstance(message.message.root, types.JSONRPCResponse)
result = message.message.root.result
assert result["protocolVersion"] == "2024-11-05"


def test_streamablehttp_client_error_response_handling():
"""Test StreamableHTTP client handling of error responses."""
mock_client = MockStreamableHTTPClient("http://test.example/mcp")
read_queue, write_queue, get_session_id = mock_client.connect()

# Send an error response
session_message = types.SessionMessage(
message=types.JSONRPCMessage(
root=types.JSONRPCError(
jsonrpc="2.0",
id="test-1",
error=types.ErrorData(code=-32601, message="Method not found", data=None),
)
)
)
read_queue.put(session_message)

# Get the error message
message = read_queue.get(timeout=1.0)
assert message is not None
assert isinstance(message.message.root, types.JSONRPCError)
assert message.message.root.error.code == -32601
assert message.message.root.error.message == "Method not found"


def test_streamablehttp_client_resumption_token_handling():
"""Test StreamableHTTP client resumption token functionality."""
test_url = "http://test.example/mcp"
test_resumption_token = "resume-token-123"

with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory:
mock_client = Mock()
mock_client_factory.return_value.__enter__.return_value = mock_client

mock_response = Mock()
mock_response.status_code = 200
mock_response.headers = {"content-type": "application/json", "last-event-id": test_resumption_token}
mock_response.raise_for_status.return_value = None
mock_client.post.return_value = mock_response

try:
with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id):
# Test that resumption token can be captured from headers
assert read_queue is not None
assert write_queue is not None
except Exception:
pass # Expected due to mocking

+ 53
- 0
api/tests/unit_tests/extensions/test_redis.py ファイルの表示

@@ -0,0 +1,53 @@
from redis import RedisError

from extensions.ext_redis import redis_fallback


def test_redis_fallback_success():
@redis_fallback(default_return=None)
def test_func():
return "success"

assert test_func() == "success"


def test_redis_fallback_error():
@redis_fallback(default_return="fallback")
def test_func():
raise RedisError("Redis error")

assert test_func() == "fallback"


def test_redis_fallback_none_default():
@redis_fallback()
def test_func():
raise RedisError("Redis error")

assert test_func() is None


def test_redis_fallback_with_args():
@redis_fallback(default_return=0)
def test_func(x, y):
raise RedisError("Redis error")

assert test_func(1, 2) == 0


def test_redis_fallback_with_kwargs():
@redis_fallback(default_return={})
def test_func(x=None, y=None):
raise RedisError("Redis error")

assert test_func(x=1, y=2) == {}


def test_redis_fallback_preserves_function_metadata():
@redis_fallback(default_return=None)
def test_func():
"""Test function docstring"""
pass

assert test_func.__name__ == "test_func"
assert test_func.__doc__ == "Test function docstring"

+ 0
- 4
api/tests/unit_tests/factories/test_variable_factory.py ファイルの表示

@@ -14,9 +14,7 @@ from core.variables import (
ArrayStringVariable,
FloatVariable,
IntegerVariable,
ObjectSegment,
SecretVariable,
SegmentType,
StringVariable,
)
from core.variables.exc import VariableError
@@ -418,8 +416,6 @@ def test_build_segment_file_array_with_different_file_types():

@st.composite
def _generate_file(draw) -> File:
file_id = draw(st.text(min_size=1, max_size=10))
tenant_id = draw(st.text(min_size=1, max_size=10))
file_type, mime_type, extension = draw(
st.sampled_from(
[

+ 232
- 0
api/tests/unit_tests/libs/test_login.py ファイルの表示

@@ -0,0 +1,232 @@
from unittest.mock import MagicMock, patch

import pytest
from flask import Flask, g
from flask_login import LoginManager, UserMixin

from libs.login import _get_user, current_user, login_required


class MockUser(UserMixin):
"""Mock user class for testing."""

def __init__(self, id: str, is_authenticated: bool = True):
self.id = id
self._is_authenticated = is_authenticated

@property
def is_authenticated(self):
return self._is_authenticated


class TestLoginRequired:
"""Test cases for login_required decorator."""

@pytest.fixture
def setup_app(self, app: Flask):
"""Set up Flask app with login manager."""
# Initialize login manager
login_manager = LoginManager()
login_manager.init_app(app)

# Mock unauthorized handler
login_manager.unauthorized = MagicMock(return_value="Unauthorized")

# Add a dummy user loader to prevent exceptions
@login_manager.user_loader
def load_user(user_id):
return None

return app

def test_authenticated_user_can_access_protected_view(self, setup_app: Flask):
"""Test that authenticated users can access protected views."""

@login_required
def protected_view():
return "Protected content"

with setup_app.test_request_context():
# Mock authenticated user
mock_user = MockUser("test_user", is_authenticated=True)
with patch("libs.login._get_user", return_value=mock_user):
result = protected_view()
assert result == "Protected content"

def test_unauthenticated_user_cannot_access_protected_view(self, setup_app: Flask):
"""Test that unauthenticated users are redirected."""

@login_required
def protected_view():
return "Protected content"

with setup_app.test_request_context():
# Mock unauthenticated user
mock_user = MockUser("test_user", is_authenticated=False)
with patch("libs.login._get_user", return_value=mock_user):
result = protected_view()
assert result == "Unauthorized"
setup_app.login_manager.unauthorized.assert_called_once()

def test_login_disabled_allows_unauthenticated_access(self, setup_app: Flask):
"""Test that LOGIN_DISABLED config bypasses authentication."""

@login_required
def protected_view():
return "Protected content"

with setup_app.test_request_context():
# Mock unauthenticated user and LOGIN_DISABLED
mock_user = MockUser("test_user", is_authenticated=False)
with patch("libs.login._get_user", return_value=mock_user):
with patch("libs.login.dify_config") as mock_config:
mock_config.LOGIN_DISABLED = True

result = protected_view()
assert result == "Protected content"
# Ensure unauthorized was not called
setup_app.login_manager.unauthorized.assert_not_called()

def test_options_request_bypasses_authentication(self, setup_app: Flask):
"""Test that OPTIONS requests are exempt from authentication."""

@login_required
def protected_view():
return "Protected content"

with setup_app.test_request_context(method="OPTIONS"):
# Mock unauthenticated user
mock_user = MockUser("test_user", is_authenticated=False)
with patch("libs.login._get_user", return_value=mock_user):
result = protected_view()
assert result == "Protected content"
# Ensure unauthorized was not called
setup_app.login_manager.unauthorized.assert_not_called()

def test_flask_2_compatibility(self, setup_app: Flask):
"""Test Flask 2.x compatibility with ensure_sync."""

@login_required
def protected_view():
return "Protected content"

# Mock Flask 2.x ensure_sync
setup_app.ensure_sync = MagicMock(return_value=lambda: "Synced content")

with setup_app.test_request_context():
mock_user = MockUser("test_user", is_authenticated=True)
with patch("libs.login._get_user", return_value=mock_user):
result = protected_view()
assert result == "Synced content"
setup_app.ensure_sync.assert_called_once()

def test_flask_1_compatibility(self, setup_app: Flask):
"""Test Flask 1.x compatibility without ensure_sync."""

@login_required
def protected_view():
return "Protected content"

# Remove ensure_sync to simulate Flask 1.x
if hasattr(setup_app, "ensure_sync"):
delattr(setup_app, "ensure_sync")

with setup_app.test_request_context():
mock_user = MockUser("test_user", is_authenticated=True)
with patch("libs.login._get_user", return_value=mock_user):
result = protected_view()
assert result == "Protected content"


class TestGetUser:
"""Test cases for _get_user function."""

def test_get_user_returns_user_from_g(self, app: Flask):
"""Test that _get_user returns user from g._login_user."""
mock_user = MockUser("test_user")

with app.test_request_context():
g._login_user = mock_user
user = _get_user()
assert user == mock_user
assert user.id == "test_user"

def test_get_user_loads_user_if_not_in_g(self, app: Flask):
"""Test that _get_user loads user if not already in g."""
mock_user = MockUser("test_user")

# Mock login manager
login_manager = MagicMock()
login_manager._load_user = MagicMock()
app.login_manager = login_manager

with app.test_request_context():
# Simulate _load_user setting g._login_user
def side_effect():
g._login_user = mock_user

login_manager._load_user.side_effect = side_effect

user = _get_user()
assert user == mock_user
login_manager._load_user.assert_called_once()

def test_get_user_returns_none_without_request_context(self, app: Flask):
"""Test that _get_user returns None outside request context."""
# Outside of request context
user = _get_user()
assert user is None


class TestCurrentUser:
"""Test cases for current_user proxy."""

def test_current_user_proxy_returns_authenticated_user(self, app: Flask):
"""Test that current_user proxy returns authenticated user."""
mock_user = MockUser("test_user", is_authenticated=True)

with app.test_request_context():
with patch("libs.login._get_user", return_value=mock_user):
assert current_user.id == "test_user"
assert current_user.is_authenticated is True

def test_current_user_proxy_returns_none_when_no_user(self, app: Flask):
"""Test that current_user proxy handles None user."""
with app.test_request_context():
with patch("libs.login._get_user", return_value=None):
# When _get_user returns None, accessing attributes should fail
# or current_user should evaluate to falsy
try:
# Try to access an attribute that would exist on a real user
_ = current_user.id
pytest.fail("Should have raised AttributeError")
except AttributeError:
# This is expected when current_user is None
pass

def test_current_user_proxy_thread_safety(self, app: Flask):
"""Test that current_user proxy is thread-safe."""
import threading

results = {}

def check_user_in_thread(user_id: str, index: int):
with app.test_request_context():
mock_user = MockUser(user_id)
with patch("libs.login._get_user", return_value=mock_user):
results[index] = current_user.id

# Create multiple threads with different users
threads = []
for i in range(5):
thread = threading.Thread(target=check_user_in_thread, args=(f"user_{i}", i))
threads.append(thread)
thread.start()

# Wait for all threads to complete
for thread in threads:
thread.join()

# Verify each thread got its own user
for i in range(5):
assert results[i] == f"user_{i}"

+ 205
- 0
api/tests/unit_tests/libs/test_passport.py ファイルの表示

@@ -0,0 +1,205 @@
from datetime import UTC, datetime, timedelta
from unittest.mock import patch

import jwt
import pytest
from werkzeug.exceptions import Unauthorized

from libs.passport import PassportService


class TestPassportService:
"""Test PassportService JWT operations"""

@pytest.fixture
def passport_service(self):
"""Create PassportService instance with test secret key"""
with patch("libs.passport.dify_config") as mock_config:
mock_config.SECRET_KEY = "test-secret-key-for-testing"
return PassportService()

@pytest.fixture
def another_passport_service(self):
"""Create another PassportService instance with different secret key"""
with patch("libs.passport.dify_config") as mock_config:
mock_config.SECRET_KEY = "another-secret-key-for-testing"
return PassportService()

# Core functionality tests
def test_should_issue_and_verify_token(self, passport_service):
"""Test complete JWT lifecycle: issue and verify"""
payload = {"user_id": "123", "app_code": "test-app"}
token = passport_service.issue(payload)

# Verify token format
assert isinstance(token, str)
assert len(token.split(".")) == 3 # JWT format: header.payload.signature

# Verify token content
decoded = passport_service.verify(token)
assert decoded == payload

def test_should_handle_different_payload_types(self, passport_service):
"""Test issuing and verifying tokens with different payload types"""
test_cases = [
{"string": "value"},
{"number": 42},
{"float": 3.14},
{"boolean": True},
{"null": None},
{"array": [1, 2, 3]},
{"nested": {"key": "value"}},
{"unicode": "中文测试"},
{"emoji": "🔐"},
{}, # Empty payload
]

for payload in test_cases:
token = passport_service.issue(payload)
decoded = passport_service.verify(token)
assert decoded == payload

# Security tests
def test_should_reject_modified_token(self, passport_service):
"""Test that any modification to token invalidates it"""
token = passport_service.issue({"user": "test"})

# Test multiple modification points
test_positions = [0, len(token) // 3, len(token) // 2, len(token) - 1]

for pos in test_positions:
if pos < len(token) and token[pos] != ".":
# Change one character
tampered = token[:pos] + ("X" if token[pos] != "X" else "Y") + token[pos + 1 :]
with pytest.raises(Unauthorized):
passport_service.verify(tampered)

def test_should_reject_token_with_different_secret_key(self, passport_service, another_passport_service):
"""Test key isolation - token from one service should not work with another"""
payload = {"user_id": "123", "app_code": "test-app"}
token = passport_service.issue(payload)

with pytest.raises(Unauthorized) as exc_info:
another_passport_service.verify(token)
assert str(exc_info.value) == "401 Unauthorized: Invalid token signature."

def test_should_use_hs256_algorithm(self, passport_service):
"""Test that HS256 algorithm is used for signing"""
payload = {"test": "data"}
token = passport_service.issue(payload)

# Decode header without relying on JWT internals
# Use jwt.get_unverified_header which is a public API
header = jwt.get_unverified_header(token)
assert header["alg"] == "HS256"

def test_should_reject_token_with_wrong_algorithm(self, passport_service):
"""Test rejection of token signed with different algorithm"""
payload = {"user_id": "123"}

# Create token with different algorithm
with patch("libs.passport.dify_config") as mock_config:
mock_config.SECRET_KEY = "test-secret-key-for-testing"
# Create token with HS512 instead of HS256
wrong_alg_token = jwt.encode(payload, mock_config.SECRET_KEY, algorithm="HS512")

# Should fail because service expects HS256
# InvalidAlgorithmError is now caught by PyJWTError handler
with pytest.raises(Unauthorized) as exc_info:
passport_service.verify(wrong_alg_token)
assert str(exc_info.value) == "401 Unauthorized: Invalid token."

# Exception handling tests
def test_should_handle_invalid_tokens(self, passport_service):
"""Test handling of various invalid token formats"""
invalid_tokens = [
("not.a.token", "Invalid token."),
("invalid-jwt-format", "Invalid token."),
("xxx.yyy.zzz", "Invalid token."),
("a.b", "Invalid token."), # Missing signature
("", "Invalid token."), # Empty string
(" ", "Invalid token."), # Whitespace
(None, "Invalid token."), # None value
# Malformed base64
("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.INVALID_BASE64!@#$.signature", "Invalid token."),
]

for invalid_token, expected_message in invalid_tokens:
with pytest.raises(Unauthorized) as exc_info:
passport_service.verify(invalid_token)
assert expected_message in str(exc_info.value)

def test_should_reject_expired_token(self, passport_service):
"""Test rejection of expired token"""
past_time = datetime.now(UTC) - timedelta(hours=1)
payload = {"user_id": "123", "exp": past_time.timestamp()}

with patch("libs.passport.dify_config") as mock_config:
mock_config.SECRET_KEY = "test-secret-key-for-testing"
token = jwt.encode(payload, mock_config.SECRET_KEY, algorithm="HS256")

with pytest.raises(Unauthorized) as exc_info:
passport_service.verify(token)
assert str(exc_info.value) == "401 Unauthorized: Token has expired."

# Configuration tests
def test_should_handle_empty_secret_key(self):
"""Test behavior when SECRET_KEY is empty"""
with patch("libs.passport.dify_config") as mock_config:
mock_config.SECRET_KEY = ""
service = PassportService()

# Empty secret key should still work but is insecure
payload = {"test": "data"}
token = service.issue(payload)
decoded = service.verify(token)
assert decoded == payload

def test_should_handle_none_secret_key(self):
"""Test behavior when SECRET_KEY is None"""
with patch("libs.passport.dify_config") as mock_config:
mock_config.SECRET_KEY = None
service = PassportService()

payload = {"test": "data"}
# JWT library will raise TypeError when secret is None
with pytest.raises((TypeError, jwt.exceptions.InvalidKeyError)):
service.issue(payload)

# Boundary condition tests
def test_should_handle_large_payload(self, passport_service):
"""Test handling of large payload"""
# Test with 100KB instead of 1MB for faster tests
large_data = "x" * (100 * 1024)
payload = {"data": large_data}

token = passport_service.issue(payload)
decoded = passport_service.verify(token)

assert decoded["data"] == large_data

def test_should_handle_special_characters_in_payload(self, passport_service):
"""Test handling of special characters in payload"""
special_payloads = [
{"special": "!@#$%^&*()"},
{"quotes": 'He said "Hello"'},
{"backslash": "path\\to\\file"},
{"newline": "line1\nline2"},
{"unicode": "🔐🔑🛡️"},
{"mixed": "Test123!@#中文🔐"},
]

for payload in special_payloads:
token = passport_service.issue(payload)
decoded = passport_service.verify(token)
assert decoded == payload

def test_should_catch_generic_pyjwt_errors(self, passport_service):
"""Test that generic PyJWTError exceptions are caught and converted to Unauthorized"""
# Mock jwt.decode to raise a generic PyJWTError
with patch("libs.passport.jwt.decode") as mock_decode:
mock_decode.side_effect = jwt.exceptions.PyJWTError("Generic JWT error")

with pytest.raises(Unauthorized) as exc_info:
passport_service.verify("some-token")
assert str(exc_info.value) == "401 Unauthorized: Invalid token."

+ 59
- 0
api/tests/unit_tests/services/services_test_help.py ファイルの表示

@@ -0,0 +1,59 @@
from unittest.mock import MagicMock


class ServiceDbTestHelper:
"""
Helper class for service database query tests.
"""

@staticmethod
def setup_db_query_filter_by_mock(mock_db, query_results):
"""
Smart database query mock that responds based on model type and query parameters.

Args:
mock_db: Mock database session
query_results: Dict mapping (model_name, filter_key, filter_value) to return value
Example: {('Account', 'email', 'test@example.com'): mock_account}
"""

def query_side_effect(model):
mock_query = MagicMock()

def filter_by_side_effect(**kwargs):
mock_filter_result = MagicMock()

def first_side_effect():
# Find matching result based on model and filter parameters
for (model_name, filter_key, filter_value), result in query_results.items():
if model.__name__ == model_name and filter_key in kwargs and kwargs[filter_key] == filter_value:
return result
return None

mock_filter_result.first.side_effect = first_side_effect

# Handle order_by calls for complex queries
def order_by_side_effect(*args, **kwargs):
mock_order_result = MagicMock()

def order_first_side_effect():
# Look for order_by results in the same query_results dict
for (model_name, filter_key, filter_value), result in query_results.items():
if (
model.__name__ == model_name
and filter_key == "order_by"
and filter_value == "first_available"
):
return result
return None

mock_order_result.first.side_effect = order_first_side_effect
return mock_order_result

mock_filter_result.order_by.side_effect = order_by_side_effect
return mock_filter_result

mock_query.filter_by.side_effect = filter_by_side_effect
return mock_query

mock_db.session.query.side_effect = query_side_effect

+ 1545
- 0
api/tests/unit_tests/services/test_account_service.py
ファイル差分が大きすぎるため省略します
ファイルの表示


+ 0
- 1
api/tests/unit_tests/services/test_dataset_service_update_dataset.py ファイルの表示

@@ -10,7 +10,6 @@ from core.model_runtime.entities.model_entities import ModelType
from models.dataset import Dataset, ExternalKnowledgeBindings
from services.dataset_service import DatasetService
from services.errors.account import NoPermissionError
from tests.unit_tests.conftest import redis_mock


class DatasetUpdateTestDataFactory:

+ 2
- 2
api/tests/unit_tests/utils/structured_output_parser/test_structured_output_parser.py ファイルの表示

@@ -27,11 +27,11 @@ def create_mock_usage(prompt_tokens: int = 10, completion_tokens: int = 5) -> LL
return LLMUsage(
prompt_tokens=prompt_tokens,
prompt_unit_price=Decimal("0.001"),
prompt_price_unit=Decimal("1"),
prompt_price_unit=Decimal(1),
prompt_price=Decimal(str(prompt_tokens)) * Decimal("0.001"),
completion_tokens=completion_tokens,
completion_unit_price=Decimal("0.002"),
completion_price_unit=Decimal("1"),
completion_price_unit=Decimal(1),
completion_price=Decimal(str(completion_tokens)) * Decimal("0.002"),
total_tokens=prompt_tokens + completion_tokens,
total_price=Decimal(str(prompt_tokens)) * Decimal("0.001") + Decimal(str(completion_tokens)) * Decimal("0.002"),

+ 139
- 108
api/uv.lock ファイルの表示

@@ -99,28 +99,29 @@ wheels = [

[[package]]
name = "aiosignal"
version = "1.3.2"
version = "1.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "frozenlist" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424, upload-time = "2024-12-13T17:10:40.86Z" }
sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597, upload-time = "2024-12-13T17:10:38.469Z" },
{ url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
]

[[package]]
name = "alembic"
version = "1.16.2"
version = "1.16.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mako" },
{ name = "sqlalchemy" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9c/35/116797ff14635e496bbda0c168987f5326a6555b09312e9b817e360d1f56/alembic-1.16.2.tar.gz", hash = "sha256:e53c38ff88dadb92eb22f8b150708367db731d58ad7e9d417c9168ab516cbed8", size = 1963563, upload-time = "2025-06-16T18:05:08.566Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/40/28683414cc8711035a65256ca689e159471aa9ef08e8741ad1605bc01066/alembic-1.16.3.tar.gz", hash = "sha256:18ad13c1f40a5796deee4b2346d1a9c382f44b8af98053897484fa6cf88025e4", size = 1967462, upload-time = "2025-07-08T18:57:50.991Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dd/e2/88e425adac5ad887a087c38d04fe2030010572a3e0e627f8a6e8c33eeda8/alembic-1.16.2-py3-none-any.whl", hash = "sha256:5f42e9bd0afdbd1d5e3ad856c01754530367debdebf21ed6894e34af52b3bb03", size = 242717, upload-time = "2025-06-16T18:05:10.27Z" },
{ url = "https://files.pythonhosted.org/packages/e6/68/1dea77887af7304528ea944c355d769a7ccc4599d3a23bd39182486deb42/alembic-1.16.3-py3-none-any.whl", hash = "sha256:70a7c7829b792de52d08ca0e3aefaf060687cb8ed6bebfa557e597a1a5e5a481", size = 246933, upload-time = "2025-07-08T18:57:52.793Z" },
]

[[package]]
@@ -243,7 +244,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984c

[[package]]
name = "alibabacloud-tea-openapi"
version = "0.3.15"
version = "0.3.16"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-credentials" },
@@ -252,7 +253,7 @@ dependencies = [
{ name = "alibabacloud-tea-util" },
{ name = "alibabacloud-tea-xml" },
]
sdist = { url = "https://files.pythonhosted.org/packages/be/cb/f1b10b1da37e4c0de2aa9ca1e7153a6960a7f2dc496664e85fdc8b621f84/alibabacloud_tea_openapi-0.3.15.tar.gz", hash = "sha256:56a0aa6d51d8cf18c0cf3d219d861f4697f59d3e17fa6726b1101826d93988a2", size = 13021, upload-time = "2025-05-06T12:56:29.402Z" }
sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087, upload-time = "2025-07-04T09:30:10.689Z" }

[[package]]
name = "alibabacloud-tea-util"
@@ -370,11 +371,11 @@ wheels = [

[[package]]
name = "asgiref"
version = "3.8.1"
version = "3.9.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/29/38/b3395cc9ad1b56d2ddac9970bc8f4141312dbaec28bc7c218b0dfafd0f42/asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590", size = 35186, upload-time = "2024-03-22T14:39:36.863Z" }
sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828, upload-time = "2024-03-22T14:39:34.521Z" },
{ url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" },
]

[[package]]
@@ -559,16 +560,16 @@ wheels = [

[[package]]
name = "boto3-stubs"
version = "1.39.2"
version = "1.39.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore-stubs" },
{ name = "types-s3transfer" },
{ name = "typing-extensions", marker = "python_full_version < '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/09/206a17938bfc7ec6e7c0b13ed58ad78146e46c29436d324ed55ceb5136ed/boto3_stubs-1.39.2.tar.gz", hash = "sha256:b1f1baef1658bd575a29ca85cc0877dbb3adeb376ffa8cbf242b876719ae0f95", size = 99939, upload-time = "2025-07-02T19:28:20.423Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/ea/85b9940d6eedc04d0c6febf24d27311b6ee54f85ccc37192eb4db0dff5d6/boto3_stubs-1.39.3.tar.gz", hash = "sha256:9aad443b1d690951fd9ccb6fa20ad387bd0b1054c704566ff65dd0043a63fc26", size = 99947, upload-time = "2025-07-03T19:28:15.602Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/39/be/9c65f2bfc6df27ec5f16d28c454e2e3cb9a7af3ef8588440658334325a85/boto3_stubs-1.39.2-py3-none-any.whl", hash = "sha256:ce98d96fe1a7177b05067be3cd933277c88f745de836752f9ef8b4286dbfa53b", size = 69196, upload-time = "2025-07-02T19:28:07.025Z" },
{ url = "https://files.pythonhosted.org/packages/be/b8/0c56297e5f290de17e838c7e4ff338f5b94351c6566aed70ee197a671dc5/boto3_stubs-1.39.3-py3-none-any.whl", hash = "sha256:4daddb19374efa6d1bef7aded9cede0075f380722a9e60ab129ebba14ae66b69", size = 69196, upload-time = "2025-07-03T19:28:09.4Z" },
]

[package.optional-dependencies]
@@ -1216,7 +1217,7 @@ wheels = [

[[package]]
name = "dify-api"
version = "1.5.1"
version = "1.6.0"
source = { virtual = "." }
dependencies = [
{ name = "arize-phoenix-otel" },
@@ -1245,6 +1246,7 @@ dependencies = [
{ name = "googleapis-common-protos" },
{ name = "gunicorn" },
{ name = "httpx", extra = ["socks"] },
{ name = "httpx-sse" },
{ name = "jieba" },
{ name = "json-repair" },
{ name = "langfuse" },
@@ -1289,6 +1291,7 @@ dependencies = [
{ name = "sendgrid" },
{ name = "sentry-sdk", extra = ["flask"] },
{ name = "sqlalchemy" },
{ name = "sseclient-py" },
{ name = "starlette" },
{ name = "tiktoken" },
{ name = "transformers" },
@@ -1425,6 +1428,7 @@ requires-dist = [
{ name = "googleapis-common-protos", specifier = "==1.63.0" },
{ name = "gunicorn", specifier = "~=23.0.0" },
{ name = "httpx", extras = ["socks"], specifier = "~=0.27.0" },
{ name = "httpx-sse", specifier = ">=0.4.0" },
{ name = "jieba", specifier = "==0.42.1" },
{ name = "json-repair", specifier = ">=0.41.1" },
{ name = "langfuse", specifier = "~=2.51.3" },
@@ -1469,6 +1473,7 @@ requires-dist = [
{ name = "sendgrid", specifier = "~=6.12.3" },
{ name = "sentry-sdk", extras = ["flask"], specifier = "~=2.28.0" },
{ name = "sqlalchemy", specifier = "~=2.0.29" },
{ name = "sseclient-py", specifier = ">=1.8.0" },
{ name = "starlette", specifier = "==0.41.0" },
{ name = "tiktoken", specifier = "~=0.9.0" },
{ name = "transformers", specifier = "~=4.51.0" },
@@ -1493,7 +1498,7 @@ dev = [
{ name = "pytest-cov", specifier = "~=4.1.0" },
{ name = "pytest-env", specifier = "~=1.1.3" },
{ name = "pytest-mock", specifier = "~=3.14.0" },
{ name = "ruff", specifier = "~=0.11.5" },
{ name = "ruff", specifier = "~=0.12.3" },
{ name = "scipy-stubs", specifier = ">=1.15.3.0" },
{ name = "types-aiofiles", specifier = "~=24.1.0" },
{ name = "types-beautifulsoup4", specifier = "~=4.12.0" },
@@ -1708,16 +1713,16 @@ wheels = [

[[package]]
name = "fastapi"
version = "0.115.14"
version = "0.116.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "starlette" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ca/53/8c38a874844a8b0fa10dd8adf3836ac154082cf88d3f22b544e9ceea0a15/fastapi-0.115.14.tar.gz", hash = "sha256:b1de15cdc1c499a4da47914db35d0e4ef8f1ce62b624e94e0e5824421df99739", size = 296263, upload-time = "2025-06-26T15:29:08.21Z" }
sdist = { url = "https://files.pythonhosted.org/packages/20/38/e1da78736143fd885c36213a3ccc493c384ae8fea6a0f0bc272ef42ebea8/fastapi-0.116.0.tar.gz", hash = "sha256:80dc0794627af0390353a6d1171618276616310d37d24faba6648398e57d687a", size = 296518, upload-time = "2025-07-07T15:09:27.82Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/53/50/b1222562c6d270fea83e9c9075b8e8600b8479150a18e4516a6138b980d1/fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca", size = 95514, upload-time = "2025-06-26T15:29:06.49Z" },
{ url = "https://files.pythonhosted.org/packages/2f/68/d80347fe2360445b5f58cf290e588a4729746e7501080947e6cdae114b1f/fastapi-0.116.0-py3-none-any.whl", hash = "sha256:fdcc9ed272eaef038952923bef2b735c02372402d1203ee1210af4eea7a78d2b", size = 95625, upload-time = "2025-07-07T15:09:26.348Z" },
]

[[package]]
@@ -2532,6 +2537,15 @@ socks = [
{ name = "socksio" },
]

[[package]]
name = "httpx-sse"
version = "0.4.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" },
]

[[package]]
name = "huggingface-hub"
version = "0.33.2"
@@ -2574,15 +2588,15 @@ wheels = [

[[package]]
name = "hypothesis"
version = "6.135.24"
version = "6.135.26"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "attrs" },
{ name = "sortedcontainers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cf/ae/f846b67ce9fc80cf51cece6b7adaa3fe2de4251242d142e241ce5d4aa26f/hypothesis-6.135.24.tar.gz", hash = "sha256:e301aeb2691ec0a1f62bfc405eaa966055d603e328cd854c1ed59e1728e35ab6", size = 454011, upload-time = "2025-07-03T02:46:51.776Z" }
sdist = { url = "https://files.pythonhosted.org/packages/da/83/15c4e30561a0d8c8d076c88cb159187823d877118f34c851ada3b9b02a7b/hypothesis-6.135.26.tar.gz", hash = "sha256:73af0e46cd5039c6806f514fed6a3c185d91ef88b5a1577477099ddbd1a2e300", size = 454523, upload-time = "2025-07-05T04:59:45.443Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ed/cb/c38acf27826a96712302229622f32dd356b9c4fbe52a3e9f615706027af8/hypothesis-6.135.24-py3-none-any.whl", hash = "sha256:88ed21fbfa481ca9851a9080841b3caca14cd4ed51a165dfae8006325775ee72", size = 520920, upload-time = "2025-07-03T02:46:48.286Z" },
{ url = "https://files.pythonhosted.org/packages/3c/78/db4fdc464219455f8dde90074660c3faf8429101b2d1299cac7d219e3176/hypothesis-6.135.26-py3-none-any.whl", hash = "sha256:fa237cbe2ae2c31d65f7230dcb866139ace635dcfec6c30dddf25974dd8ff4b9", size = 521517, upload-time = "2025-07-05T04:59:42.061Z" },
]

[[package]]
@@ -2892,10 +2906,12 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289, upload-time = "2025-06-28T18:47:25.602Z" },
{ url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310, upload-time = "2025-06-28T18:47:28.136Z" },
{ url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457, upload-time = "2025-06-26T16:26:15.068Z" },
{ url = "https://files.pythonhosted.org/packages/99/b6/3a7971aa05b7be7dfebc7ab57262ec527775c2c3c5b2f43675cac0458cad/lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991", size = 5657016, upload-time = "2025-07-03T19:19:06.008Z" },
{ url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565, upload-time = "2025-06-26T16:26:17.906Z" },
{ url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390, upload-time = "2025-06-26T16:26:20.292Z" },
{ url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103, upload-time = "2025-06-26T16:26:22.765Z" },
{ url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428, upload-time = "2025-06-26T16:26:26.461Z" },
{ url = "https://files.pythonhosted.org/packages/29/31/c0267d03b16954a85ed6b065116b621d37f559553d9339c7dcc4943a76f1/lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16", size = 5678523, upload-time = "2025-07-03T19:19:09.837Z" },
{ url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290, upload-time = "2025-06-26T16:26:29.406Z" },
{ url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495, upload-time = "2025-06-26T16:26:31.588Z" },
{ url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711, upload-time = "2025-06-26T16:26:33.723Z" },
@@ -3732,7 +3748,7 @@ wheels = [

[[package]]
name = "opik"
version = "1.7.41"
version = "1.7.43"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "boto3-stubs", extra = ["bedrock-runtime"] },
@@ -3751,9 +3767,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "uuid6" },
]
sdist = { url = "https://files.pythonhosted.org/packages/82/81/6cddb705b3f416cfe4f0507916f51d0886087695f9dab49cfc6b00eb0266/opik-1.7.41.tar.gz", hash = "sha256:6ce2f72c7d23a62e2c13d419ce50754f6e17234825dcf26506e7def34dd38e26", size = 323333, upload-time = "2025-07-02T12:35:31.76Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886, upload-time = "2025-07-07T10:30:07.715Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/46/ee27d06cc2049619806c992bdaa10e25b93d19ecedbc5c0fa772d8ac9a6d/opik-1.7.41-py3-none-any.whl", hash = "sha256:99df9c7b7b504777a51300b27a72bc646903201629611082b9b1f3c3adfbb3bf", size = 614890, upload-time = "2025-07-02T12:35:29.562Z" },
{ url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356, upload-time = "2025-07-07T10:30:06.389Z" },
]

[[package]]
@@ -3975,6 +3991,8 @@ sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3
wheels = [
{ url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" },
{ url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" },
{ url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" },
{ url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" },
{ url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" },
{ url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" },
{ url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" },
@@ -3984,6 +4002,8 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" },
{ url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" },
{ url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" },
{ url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" },
{ url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" },
{ url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" },
{ url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" },
{ url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" },
@@ -3993,6 +4013,8 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" },
{ url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" },
{ url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" },
{ url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" },
{ url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" },
{ url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" },
{ url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" },
{ url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" },
@@ -4065,7 +4087,7 @@ wheels = [

[[package]]
name = "posthog"
version = "6.0.2"
version = "6.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "backoff" },
@@ -4075,9 +4097,9 @@ dependencies = [
{ name = "six" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d9/10/37ea988b3ae73cbfd1f2d5e523cca31cecfcc40cbd0de6511f40462fdb78/posthog-6.0.2.tar.gz", hash = "sha256:94a28e65d7a2d1b2952e53a1b97fa4d6504b8d7e4c197c57f653621e55b549eb", size = 88141, upload-time = "2025-07-02T19:21:50.306Z" }
sdist = { url = "https://files.pythonhosted.org/packages/39/a2/1b68562124b0d0e615fa8431cc88c84b3db6526275c2c19a419579a49277/posthog-6.0.3.tar.gz", hash = "sha256:9005abb341af8fedd9d82ca0359b3d35a9537555cdc9881bfb469f7c0b4b0ec5", size = 91861, upload-time = "2025-07-07T07:14:08.21Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/85/2c/0c5dbbf9bc30401ae2a1b6b52b8abc19e4060cf28c3288ae9d962e65e3ad/posthog-6.0.2-py3-none-any.whl", hash = "sha256:756cc9adad9e42961454f8ac391b92a2f70ebb6607d29b0c568de08e5d8f1b18", size = 104946, upload-time = "2025-07-02T19:21:48.77Z" },
{ url = "https://files.pythonhosted.org/packages/ca/f1/a8d86245d41c8686f7d828a4959bdf483e8ac331b249b48b8c61fc884a1c/posthog-6.0.3-py3-none-any.whl", hash = "sha256:4b808c907f3623216a9362d91fdafce8e2f57a8387fb3020475c62ec809be56d", size = 108978, upload-time = "2025-07-07T07:14:06.451Z" },
]

[[package]]
@@ -4585,39 +4607,39 @@ wheels = [

[[package]]
name = "python-calamine"
version = "0.3.2"
version = "0.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "packaging" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6b/21/387b92059909e741af7837194d84250335d2a057f614752b6364aaaa2f56/python_calamine-0.3.2.tar.gz", hash = "sha256:5cf12f2086373047cdea681711857b672cba77a34a66dd3755d60686fc974e06", size = 117336, upload-time = "2025-04-02T10:06:23.14Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/b7/d59863ebe319150739d0c352c6dea2710a2f90254ed32304d52e8349edce/python_calamine-0.3.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5251746816069c38eafdd1e4eb7b83870e1fe0ff6191ce9a809b187ffba8ce93", size = 830854, upload-time = "2025-04-02T10:04:14.673Z" },
{ url = "https://files.pythonhosted.org/packages/d3/01/b48c6f2c2e530a1a031199c5c5bf35f7c2cf7f16f3989263e616e3bc86ce/python_calamine-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9775dbc93bc635d48f45433f8869a546cca28c2a86512581a05333f97a18337b", size = 809411, upload-time = "2025-04-02T10:04:16.067Z" },
{ url = "https://files.pythonhosted.org/packages/fe/6d/69c53ffb11b3ee1bf5bd945cc2514848adea492c879a50f38e2ed4424727/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff4318b72ba78e8a04fb4c45342cfa23eab6f81ecdb85548cdab9f2db8ac9c7", size = 872905, upload-time = "2025-04-02T10:04:17.487Z" },
{ url = "https://files.pythonhosted.org/packages/be/ec/b02c4bc04c426d153af1f5ff07e797dd81ada6f47c170e0207d07c90b53a/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cd8eb1ef8644da71788a33d3de602d1c08ff1c4136942d87e25f09580b512ef", size = 876464, upload-time = "2025-04-02T10:04:19.53Z" },
{ url = "https://files.pythonhosted.org/packages/46/ef/8403ee595207de5bd277279b56384b31390987df8a61c280b4176802481a/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd560d8f88f39d23b829f666ebae4bd8daeec7ed57adfb9313543f3c5fa35", size = 942289, upload-time = "2025-04-02T10:04:20.902Z" },
{ url = "https://files.pythonhosted.org/packages/89/97/b4e5b77c70b36613c10f2dbeece75b5d43727335a33bf5176792ec83c3fc/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5e79b9eae4b30c82d045f9952314137c7089c88274e1802947f9e3adb778a59", size = 978699, upload-time = "2025-04-02T10:04:22.263Z" },
{ url = "https://files.pythonhosted.org/packages/5f/e9/03bbafd6b11cdf70c004f2e856978fc252ec5ea7e77529f14f969134c7a8/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce5e8cc518c8e3e5988c5c658f9dcd8229f5541ca63353175bb15b6ad8c456d0", size = 886008, upload-time = "2025-04-02T10:04:23.754Z" },
{ url = "https://files.pythonhosted.org/packages/7b/20/e18f534e49b403ba0b979a4dfead146001d867f5be846b91f81ed5377972/python_calamine-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a0e596b1346c28b2de15c9f86186cceefa4accb8882992aa0b7499c593446ed", size = 925104, upload-time = "2025-04-02T10:04:25.255Z" },
{ url = "https://files.pythonhosted.org/packages/54/4c/58933e69a0a7871487d10b958c1f83384bc430d53efbbfbf1dea141a0d85/python_calamine-0.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f521de16a9f3e951ec2e5e35d76752fe004088dbac4cdbf4dd62d0ad2bbf650f", size = 1050448, upload-time = "2025-04-02T10:04:26.649Z" },
{ url = "https://files.pythonhosted.org/packages/83/95/5c96d093eaaa2d15c63b43bcf8c87708eaab8428c72b6ebdcafc2604aa47/python_calamine-0.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417d6825a36bba526ae17bed1b6ca576fbb54e23dc60c97eeb536c622e77c62f", size = 1056840, upload-time = "2025-04-02T10:04:28.18Z" },
{ url = "https://files.pythonhosted.org/packages/23/e0/b03cc3ad4f40fd3be0ebac0b71d273864ddf2bf0e611ec309328fdedded9/python_calamine-0.3.2-cp311-cp311-win32.whl", hash = "sha256:cd3ea1ca768139753633f9f0b16997648db5919894579f363d71f914f85f7ade", size = 663268, upload-time = "2025-04-02T10:04:29.659Z" },
{ url = "https://files.pythonhosted.org/packages/6b/bd/550da64770257fc70a185482f6353c0654a11f381227e146bb0170db040f/python_calamine-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:4560100412d8727c49048cca102eadeb004f91cfb9c99ae63cd7d4dc0a61333a", size = 692393, upload-time = "2025-04-02T10:04:31.534Z" },
{ url = "https://files.pythonhosted.org/packages/be/2e/0b4b7a146c3bb41116fe8e59a2f616340786db12aed51c7a9e75817cfa03/python_calamine-0.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:a2526e6ba79087b1634f49064800339edb7316780dd7e1e86d10a0ca9de4e90f", size = 667312, upload-time = "2025-04-02T10:04:32.911Z" },
{ url = "https://files.pythonhosted.org/packages/f2/0f/c2e3e3bae774dae47cba6ffa640ff95525bd6a10a13d3cd998f33aeafc7f/python_calamine-0.3.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7c063b1f783352d6c6792305b2b0123784882e2436b638a9b9a1e97f6d74fa51", size = 825179, upload-time = "2025-04-02T10:04:34.377Z" },
{ url = "https://files.pythonhosted.org/packages/c7/81/a05285f06d71ea38ab99b09f3119f93f575487c9d24d7a1bab65657b258b/python_calamine-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85016728937e8f5d1810ff3c9603ffd2458d66e34d495202d7759fa8219871cd", size = 804036, upload-time = "2025-04-02T10:04:35.938Z" },
{ url = "https://files.pythonhosted.org/packages/24/b5/320f366ffd91ee5d5f0f77817d4fb684f62a5a68e438dcdb90e4f5f35137/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81f243323bf712bb0b2baf0b938a2e6d6c9fa3b9902a44c0654474d04f999fac", size = 871527, upload-time = "2025-04-02T10:04:38.272Z" },
{ url = "https://files.pythonhosted.org/packages/13/19/063afced19620b829697b90329c62ad73274cc38faaa91d9ee41047f5f8c/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b719dd2b10237b0cfb2062e3eaf199f220918a5623197e8449f37c8de845a7c", size = 875411, upload-time = "2025-04-02T10:04:39.647Z" },
{ url = "https://files.pythonhosted.org/packages/d7/6a/c93c52414ec62cc51c4820aff434f03c4a1c69ced15cec3e4b93885e4012/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5158310b9140e8ee8665c9541a11030901e7275eb036988150c93f01c5133bf", size = 943525, upload-time = "2025-04-02T10:04:41.025Z" },
{ url = "https://files.pythonhosted.org/packages/0a/0a/5bdecee03d235e8d111b1e8ee3ea0c0ed4ae43a402f75cebbe719930cf04/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2c1b248e8bf10194c449cb57e6ccb3f2fe3dc86975a6d746908cf2d37b048cc", size = 976332, upload-time = "2025-04-02T10:04:42.454Z" },
{ url = "https://files.pythonhosted.org/packages/05/ad/43ff92366856ee34f958e9cf4f5b98e63b0dc219e06ccba4ad6f63463756/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a13ad8e5b6843a73933b8d1710bc4df39a9152cb57c11227ad51f47b5838a4", size = 885549, upload-time = "2025-04-02T10:04:43.869Z" },
{ url = "https://files.pythonhosted.org/packages/ff/b9/76afb867e2bb4bfc296446b741cee01ae4ce6a094b43f4ed4eaed5189de4/python_calamine-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe950975a5758423c982ce1e2fdcb5c9c664d1a20b41ea21e619e5003bb4f96b", size = 926005, upload-time = "2025-04-02T10:04:45.884Z" },
{ url = "https://files.pythonhosted.org/packages/23/cf/5252b237b0e70c263f86741aea02e8e57aedb2bce9898468be1d9d55b9da/python_calamine-0.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8707622ba816d6c26e36f1506ecda66a6a6cf43e55a43a8ef4c3bf8a805d3cfb", size = 1049380, upload-time = "2025-04-02T10:04:49.202Z" },
{ url = "https://files.pythonhosted.org/packages/1a/4d/f151e8923e53457ca49ceeaa3a34cb23afee7d7b46e6546ab2a29adc9125/python_calamine-0.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e6eac46475c26e162a037f6711b663767f61f8fca3daffeb35aa3fc7ee6267cc", size = 1056720, upload-time = "2025-04-02T10:04:51.002Z" },
{ url = "https://files.pythonhosted.org/packages/f5/cb/1b5db3e4a8bbaaaa7706b270570d4a65133618fa0ca7efafe5ce680f6cee/python_calamine-0.3.2-cp312-cp312-win32.whl", hash = "sha256:0dee82aedef3db27368a388d6741d69334c1d4d7a8087ddd33f1912166e17e37", size = 663502, upload-time = "2025-04-02T10:04:52.402Z" },
{ url = "https://files.pythonhosted.org/packages/5a/53/920fa8e7b570647c08da0f1158d781db2e318918b06cb28fe0363c3398ac/python_calamine-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:ae09b779718809d31ca5d722464be2776b7d79278b1da56e159bbbe11880eecf", size = 692660, upload-time = "2025-04-02T10:04:53.721Z" },
{ url = "https://files.pythonhosted.org/packages/a5/ea/5d0ecf5c345c4d78964a5f97e61848bc912965b276a54fb8ae698a9419a8/python_calamine-0.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:435546e401a5821fa70048b6c03a70db3b27d00037e2c4999c2126d8c40b51df", size = 666205, upload-time = "2025-04-02T10:04:56.377Z" },
sdist = { url = "https://files.pythonhosted.org/packages/cc/03/269f96535705b2f18c8977fa58e76763b4e4727a9b3ae277a9468c8ffe05/python_calamine-0.4.0.tar.gz", hash = "sha256:94afcbae3fec36d2d7475095a59d4dc6fae45829968c743cb799ebae269d7bbf", size = 127737, upload-time = "2025-07-04T06:05:28.626Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d4/a5/bcd82326d0ff1ab5889e7a5e13c868b483fc56398e143aae8e93149ba43b/python_calamine-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d1687f8c4d7852920c7b4e398072f183f88dd273baf5153391edc88b7454b8c0", size = 833019, upload-time = "2025-07-04T06:03:32.214Z" },
{ url = "https://files.pythonhosted.org/packages/f6/1a/a681f1d2f28164552e91ef47bcde6708098aa64a5f5fe3952f22362d340a/python_calamine-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:258d04230bebbbafa370a15838049d912d6a0a2c4da128943d8160ca4b6db58e", size = 812268, upload-time = "2025-07-04T06:03:33.855Z" },
{ url = "https://files.pythonhosted.org/packages/3d/92/2fc911431733739d4e7a633cefa903fa49a6b7a61e8765bad29a4a7c47b1/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686e491634934f059553d55f77ac67ca4c235452d5b444f98fe79b3579f1ea5", size = 875733, upload-time = "2025-07-04T06:03:35.154Z" },
{ url = "https://files.pythonhosted.org/packages/f4/f0/48bfae6802eb360028ca6c15e9edf42243aadd0006b6ac3e9edb41a57119/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4480af7babcc2f919c638a554b06b7b145d9ab3da47fd696d68c2fc6f67f9541", size = 878325, upload-time = "2025-07-04T06:03:36.638Z" },
{ url = "https://files.pythonhosted.org/packages/a4/dc/f8c956e15bac9d5d1e05cd1b907ae780e40522d2fd103c8c6e2f21dff4ed/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e405b87a8cd1e90a994e570705898634f105442029f25bab7da658ee9cbaa771", size = 1015038, upload-time = "2025-07-04T06:03:37.971Z" },
{ url = "https://files.pythonhosted.org/packages/54/3f/e69ab97c7734fb850fba2f506b775912fd59f04e17488582c8fbf52dbc72/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a831345ee42615f0dfcb0ed60a3b1601d2f946d4166edae64fd9a6f9bbd57fc1", size = 924969, upload-time = "2025-07-04T06:03:39.253Z" },
{ url = "https://files.pythonhosted.org/packages/79/03/b4c056b468908d87a3de94389166e0f4dba725a70bc39e03bc039ba96f6b/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9951b8e4cafb3e1623bb5dfc31a18d38ef43589275f9657e99dfcbe4c8c4b33e", size = 888020, upload-time = "2025-07-04T06:03:41.099Z" },
{ url = "https://files.pythonhosted.org/packages/86/4f/b9092f7c970894054083656953184e44cb2dadff8852425e950d4ca419af/python_calamine-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6619fe3b5c9633ed8b178684605f8076c9d8d85b29ade15f7a7713fcfdee2d0", size = 930337, upload-time = "2025-07-04T06:03:42.89Z" },
{ url = "https://files.pythonhosted.org/packages/64/da/137239027bf253aabe7063450950085ec9abd827d0cbc5170f585f38f464/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cc45b8e76ee331f6ea88ca23677be0b7a05b502cd4423ba2c2bc8dad53af1be", size = 1054568, upload-time = "2025-07-04T06:03:44.153Z" },
{ url = "https://files.pythonhosted.org/packages/80/96/74c38bcf6b6825d5180c0e147b85be8c52dbfba11848b1e98ba358e32a64/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b2cfb7ced1a7c80befa0cfddfe4aae65663eb4d63c4ae484b9b7a80ebe1b528", size = 1058317, upload-time = "2025-07-04T06:03:45.873Z" },
{ url = "https://files.pythonhosted.org/packages/33/95/9d7b8fe8b32d99a6c79534df3132cfe40e9df4a0f5204048bf5e66ddbd93/python_calamine-0.4.0-cp311-cp311-win32.whl", hash = "sha256:04f4e32ee16814fc1fafc49300be8eeb280d94878461634768b51497e1444bd6", size = 663934, upload-time = "2025-07-04T06:03:47.407Z" },
{ url = "https://files.pythonhosted.org/packages/7c/e3/1c6cd9fd499083bea6ff1c30033ee8215b9f64e862babf5be170cacae190/python_calamine-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:a8543f69afac2213c0257bb56215b03dadd11763064a9d6b19786f27d1bef586", size = 692535, upload-time = "2025-07-04T06:03:48.699Z" },
{ url = "https://files.pythonhosted.org/packages/94/1c/3105d19fbab6b66874ce8831652caedd73b23b72e88ce18addf8ceca8c12/python_calamine-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:54622e35ec7c3b6f07d119da49aa821731c185e951918f152c2dbf3bec1e15d6", size = 671751, upload-time = "2025-07-04T06:03:49.979Z" },
{ url = "https://files.pythonhosted.org/packages/63/60/f951513aaaa470b3a38a87d65eca45e0a02bc329b47864f5a17db563f746/python_calamine-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:74bca5d44a73acf3dcfa5370820797fcfd225c8c71abcddea987c5b4f5077e98", size = 826603, upload-time = "2025-07-04T06:03:51.245Z" },
{ url = "https://files.pythonhosted.org/packages/76/3f/789955bbc77831c639890758f945eb2b25d6358065edf00da6751226cf31/python_calamine-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf80178f5d1b0ee2ccfffb8549c50855f6249e930664adc5807f4d0d6c2b269c", size = 805826, upload-time = "2025-07-04T06:03:52.482Z" },
{ url = "https://files.pythonhosted.org/packages/00/4c/f87d17d996f647030a40bfd124fe45fe893c002bee35ae6aca9910a923ae/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65cfef345386ae86f7720f1be93495a40fd7e7feabb8caa1df5025d7fbc58a1f", size = 874989, upload-time = "2025-07-04T06:03:53.794Z" },
{ url = "https://files.pythonhosted.org/packages/47/d2/3269367303f6c0488cf1bfebded3f9fe968d118a988222e04c9b2636bf2e/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f23e6214dbf9b29065a5dcfd6a6c674dd0e251407298c9138611c907d53423ff", size = 877504, upload-time = "2025-07-04T06:03:55.095Z" },
{ url = "https://files.pythonhosted.org/packages/f9/6d/c7ac35f5c7125e8bd07eb36773f300fda20dd2da635eae78a8cebb0b6ab7/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d792d304ee232ab01598e1d3ab22e074a32c2511476b5fb4f16f4222d9c2a265", size = 1014171, upload-time = "2025-07-04T06:03:56.777Z" },
{ url = "https://files.pythonhosted.org/packages/f0/81/5ea8792a2e9ab5e2a05872db3a4d3ed3538ad5af1861282c789e2f13a8cf/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf813425918fd68f3e991ef7c4b5015be0a1a95fc4a8ab7e73c016ef1b881bb4", size = 926737, upload-time = "2025-07-04T06:03:58.024Z" },
{ url = "https://files.pythonhosted.org/packages/cc/6e/989e56e6f073fc0981a74ba7a393881eb351bb143e5486aa629b5e5d6a8b/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbe2a0ccb4d003635888eea83a995ff56b0748c8c76fc71923544f5a4a7d4cd7", size = 887032, upload-time = "2025-07-04T06:03:59.298Z" },
{ url = "https://files.pythonhosted.org/packages/5d/92/2c9bd64277c6fe4be695d7d5a803b38d953ec8565037486be7506642c27c/python_calamine-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7b3bb5f0d910b9b03c240987560f843256626fd443279759df4e91b717826d2", size = 929700, upload-time = "2025-07-04T06:04:01.388Z" },
{ url = "https://files.pythonhosted.org/packages/64/fa/fc758ca37701d354a6bc7d63118699f1c73788a1f2e1b44d720824992764/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bd2c0fc2b5eabd08ceac8a2935bffa88dbc6116db971aa8c3f244bad3fd0f644", size = 1053971, upload-time = "2025-07-04T06:04:02.704Z" },
{ url = "https://files.pythonhosted.org/packages/65/52/40d7e08ae0ddba331cdc9f7fb3e92972f8f38d7afbd00228158ff6d1fceb/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:85b547cb1c5b692a0c2406678d666dbc1cec65a714046104683fe4f504a1721d", size = 1057057, upload-time = "2025-07-04T06:04:04.014Z" },
{ url = "https://files.pythonhosted.org/packages/16/de/e8a071c0adfda73285d891898a24f6e99338328c404f497ff5b0e6bc3d45/python_calamine-0.4.0-cp312-cp312-win32.whl", hash = "sha256:4c2a1e3a0db4d6de4587999a21cc35845648c84fba81c03dd6f3072c690888e4", size = 665540, upload-time = "2025-07-04T06:04:05.679Z" },
{ url = "https://files.pythonhosted.org/packages/5e/f2/7fdfada13f80db12356853cf08697ff4e38800a1809c2bdd26ee60962e7a/python_calamine-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b193c89ffcc146019475cd121c552b23348411e19c04dedf5c766a20db64399a", size = 695366, upload-time = "2025-07-04T06:04:06.977Z" },
{ url = "https://files.pythonhosted.org/packages/20/66/d37412ad854480ce32f50d9f74f2a2f88b1b8a6fbc32f70aabf3211ae89e/python_calamine-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:43a0f15e0b60c75a71b21a012b911d5d6f5fa052afad2a8edbc728af43af0fcf", size = 670740, upload-time = "2025-07-04T06:04:08.656Z" },
]

[[package]]
@@ -5066,27 +5088,27 @@ wheels = [

[[package]]
name = "ruff"
version = "0.11.13"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ed/da/9c6f995903b4d9474b39da91d2d626659af3ff1eeb43e9ae7c119349dba6/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514", size = 4282054, upload-time = "2025-06-05T21:00:15.721Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7d/ce/a11d381192966e0b4290842cc8d4fac7dc9214ddf627c11c1afff87da29b/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46", size = 10292516, upload-time = "2025-06-05T20:59:32.944Z" },
{ url = "https://files.pythonhosted.org/packages/78/db/87c3b59b0d4e753e40b6a3b4a2642dfd1dcaefbff121ddc64d6c8b47ba00/ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48", size = 11106083, upload-time = "2025-06-05T20:59:37.03Z" },
{ url = "https://files.pythonhosted.org/packages/77/79/d8cec175856ff810a19825d09ce700265f905c643c69f45d2b737e4a470a/ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b", size = 10436024, upload-time = "2025-06-05T20:59:39.741Z" },
{ url = "https://files.pythonhosted.org/packages/8b/5b/f6d94f2980fa1ee854b41568368a2e1252681b9238ab2895e133d303538f/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a", size = 10646324, upload-time = "2025-06-05T20:59:42.185Z" },
{ url = "https://files.pythonhosted.org/packages/6c/9c/b4c2acf24ea4426016d511dfdc787f4ce1ceb835f3c5fbdbcb32b1c63bda/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc", size = 10174416, upload-time = "2025-06-05T20:59:44.319Z" },
{ url = "https://files.pythonhosted.org/packages/f3/10/e2e62f77c65ede8cd032c2ca39c41f48feabedb6e282bfd6073d81bb671d/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629", size = 11724197, upload-time = "2025-06-05T20:59:46.935Z" },
{ url = "https://files.pythonhosted.org/packages/bb/f0/466fe8469b85c561e081d798c45f8a1d21e0b4a5ef795a1d7f1a9a9ec182/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933", size = 12511615, upload-time = "2025-06-05T20:59:49.534Z" },
{ url = "https://files.pythonhosted.org/packages/17/0e/cefe778b46dbd0cbcb03a839946c8f80a06f7968eb298aa4d1a4293f3448/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165", size = 12117080, upload-time = "2025-06-05T20:59:51.654Z" },
{ url = "https://files.pythonhosted.org/packages/5d/2c/caaeda564cbe103bed145ea557cb86795b18651b0f6b3ff6a10e84e5a33f/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71", size = 11326315, upload-time = "2025-06-05T20:59:54.469Z" },
{ url = "https://files.pythonhosted.org/packages/75/f0/782e7d681d660eda8c536962920c41309e6dd4ebcea9a2714ed5127d44bd/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9", size = 11555640, upload-time = "2025-06-05T20:59:56.986Z" },
{ url = "https://files.pythonhosted.org/packages/5d/d4/3d580c616316c7f07fb3c99dbecfe01fbaea7b6fd9a82b801e72e5de742a/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc", size = 10507364, upload-time = "2025-06-05T20:59:59.154Z" },
{ url = "https://files.pythonhosted.org/packages/5a/dc/195e6f17d7b3ea6b12dc4f3e9de575db7983db187c378d44606e5d503319/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7", size = 10141462, upload-time = "2025-06-05T21:00:01.481Z" },
{ url = "https://files.pythonhosted.org/packages/f4/8e/39a094af6967faa57ecdeacb91bedfb232474ff8c3d20f16a5514e6b3534/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432", size = 11121028, upload-time = "2025-06-05T21:00:04.06Z" },
{ url = "https://files.pythonhosted.org/packages/5a/c0/b0b508193b0e8a1654ec683ebab18d309861f8bd64e3a2f9648b80d392cb/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492", size = 11602992, upload-time = "2025-06-05T21:00:06.249Z" },
{ url = "https://files.pythonhosted.org/packages/7c/91/263e33ab93ab09ca06ce4f8f8547a858cc198072f873ebc9be7466790bae/ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250", size = 10474944, upload-time = "2025-06-05T21:00:08.459Z" },
{ url = "https://files.pythonhosted.org/packages/46/f4/7c27734ac2073aae8efb0119cae6931b6fb48017adf048fdf85c19337afc/ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3", size = 11548669, upload-time = "2025-06-05T21:00:11.147Z" },
{ url = "https://files.pythonhosted.org/packages/ec/bf/b273dd11673fed8a6bd46032c0ea2a04b2ac9bfa9c628756a5856ba113b0/ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b", size = 10683928, upload-time = "2025-06-05T21:00:13.758Z" },
version = "0.12.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c3/2a/43955b530c49684d3c38fcda18c43caf91e99204c2a065552528e0552d4f/ruff-0.12.3.tar.gz", hash = "sha256:f1b5a4b6668fd7b7ea3697d8d98857390b40c1320a63a178eee6be0899ea2d77", size = 4459341, upload-time = "2025-07-11T13:21:16.086Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e2/fd/b44c5115539de0d598d75232a1cc7201430b6891808df111b8b0506aae43/ruff-0.12.3-py3-none-linux_armv6l.whl", hash = "sha256:47552138f7206454eaf0c4fe827e546e9ddac62c2a3d2585ca54d29a890137a2", size = 10430499, upload-time = "2025-07-11T13:20:26.321Z" },
{ url = "https://files.pythonhosted.org/packages/43/c5/9eba4f337970d7f639a37077be067e4ec80a2ad359e4cc6c5b56805cbc66/ruff-0.12.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0a9153b000c6fe169bb307f5bd1b691221c4286c133407b8827c406a55282041", size = 11213413, upload-time = "2025-07-11T13:20:30.017Z" },
{ url = "https://files.pythonhosted.org/packages/e2/2c/fac3016236cf1fe0bdc8e5de4f24c76ce53c6dd9b5f350d902549b7719b2/ruff-0.12.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fa6b24600cf3b750e48ddb6057e901dd5b9aa426e316addb2a1af185a7509882", size = 10586941, upload-time = "2025-07-11T13:20:33.046Z" },
{ url = "https://files.pythonhosted.org/packages/c5/0f/41fec224e9dfa49a139f0b402ad6f5d53696ba1800e0f77b279d55210ca9/ruff-0.12.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2506961bf6ead54887ba3562604d69cb430f59b42133d36976421bc8bd45901", size = 10783001, upload-time = "2025-07-11T13:20:35.534Z" },
{ url = "https://files.pythonhosted.org/packages/0d/ca/dd64a9ce56d9ed6cad109606ac014860b1c217c883e93bf61536400ba107/ruff-0.12.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4faaff1f90cea9d3033cbbcdf1acf5d7fb11d8180758feb31337391691f3df0", size = 10269641, upload-time = "2025-07-11T13:20:38.459Z" },
{ url = "https://files.pythonhosted.org/packages/63/5c/2be545034c6bd5ce5bb740ced3e7014d7916f4c445974be11d2a406d5088/ruff-0.12.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40dced4a79d7c264389de1c59467d5d5cefd79e7e06d1dfa2c75497b5269a5a6", size = 11875059, upload-time = "2025-07-11T13:20:41.517Z" },
{ url = "https://files.pythonhosted.org/packages/8e/d4/a74ef1e801ceb5855e9527dae105eaff136afcb9cc4d2056d44feb0e4792/ruff-0.12.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0262d50ba2767ed0fe212aa7e62112a1dcbfd46b858c5bf7bbd11f326998bafc", size = 12658890, upload-time = "2025-07-11T13:20:44.442Z" },
{ url = "https://files.pythonhosted.org/packages/13/c8/1057916416de02e6d7c9bcd550868a49b72df94e3cca0aeb77457dcd9644/ruff-0.12.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12371aec33e1a3758597c5c631bae9a5286f3c963bdfb4d17acdd2d395406687", size = 12232008, upload-time = "2025-07-11T13:20:47.374Z" },
{ url = "https://files.pythonhosted.org/packages/f5/59/4f7c130cc25220392051fadfe15f63ed70001487eca21d1796db46cbcc04/ruff-0.12.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:560f13b6baa49785665276c963edc363f8ad4b4fc910a883e2625bdb14a83a9e", size = 11499096, upload-time = "2025-07-11T13:20:50.348Z" },
{ url = "https://files.pythonhosted.org/packages/d4/01/a0ad24a5d2ed6be03a312e30d32d4e3904bfdbc1cdbe63c47be9d0e82c79/ruff-0.12.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023040a3499f6f974ae9091bcdd0385dd9e9eb4942f231c23c57708147b06311", size = 11688307, upload-time = "2025-07-11T13:20:52.945Z" },
{ url = "https://files.pythonhosted.org/packages/93/72/08f9e826085b1f57c9a0226e48acb27643ff19b61516a34c6cab9d6ff3fa/ruff-0.12.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:883d844967bffff5ab28bba1a4d246c1a1b2933f48cb9840f3fdc5111c603b07", size = 10661020, upload-time = "2025-07-11T13:20:55.799Z" },
{ url = "https://files.pythonhosted.org/packages/80/a0/68da1250d12893466c78e54b4a0ff381370a33d848804bb51279367fc688/ruff-0.12.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2120d3aa855ff385e0e562fdee14d564c9675edbe41625c87eeab744a7830d12", size = 10246300, upload-time = "2025-07-11T13:20:58.222Z" },
{ url = "https://files.pythonhosted.org/packages/6a/22/5f0093d556403e04b6fd0984fc0fb32fbb6f6ce116828fd54306a946f444/ruff-0.12.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b16647cbb470eaf4750d27dddc6ebf7758b918887b56d39e9c22cce2049082b", size = 11263119, upload-time = "2025-07-11T13:21:01.503Z" },
{ url = "https://files.pythonhosted.org/packages/92/c9/f4c0b69bdaffb9968ba40dd5fa7df354ae0c73d01f988601d8fac0c639b1/ruff-0.12.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e1417051edb436230023575b149e8ff843a324557fe0a265863b7602df86722f", size = 11746990, upload-time = "2025-07-11T13:21:04.524Z" },
{ url = "https://files.pythonhosted.org/packages/fe/84/7cc7bd73924ee6be4724be0db5414a4a2ed82d06b30827342315a1be9e9c/ruff-0.12.3-py3-none-win32.whl", hash = "sha256:dfd45e6e926deb6409d0616078a666ebce93e55e07f0fb0228d4b2608b2c248d", size = 10589263, upload-time = "2025-07-11T13:21:07.148Z" },
{ url = "https://files.pythonhosted.org/packages/07/87/c070f5f027bd81f3efee7d14cb4d84067ecf67a3a8efb43aadfc72aa79a6/ruff-0.12.3-py3-none-win_amd64.whl", hash = "sha256:a946cf1e7ba3209bdef039eb97647f1c77f6f540e5845ec9c114d3af8df873e7", size = 11695072, upload-time = "2025-07-11T13:21:11.004Z" },
{ url = "https://files.pythonhosted.org/packages/e0/30/f3eaf6563c637b6e66238ed6535f6775480db973c836336e4122161986fc/ruff-0.12.3-py3-none-win_arm64.whl", hash = "sha256:5f9c7c9c8f84c2d7f27e93674d27136fbf489720251544c4da7fb3d742e011b1", size = 10805855, upload-time = "2025-07-11T13:21:13.547Z" },
]

[[package]]
@@ -5297,6 +5319,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" },
]

[[package]]
name = "sseclient-py"
version = "1.8.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/ed/3df5ab8bb0c12f86c28d0cadb11ed1de44a92ed35ce7ff4fd5518a809325/sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8", size = 7791, upload-time = "2023-09-01T19:39:20.45Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/49/58/97655efdfeb5b4eeab85b1fc5d3fa1023661246c2ab2a26ea8e47402d4f2/sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83", size = 8828, upload-time = "2023-09-01T19:39:17.627Z" },
]

[[package]]
name = "starlette"
version = "0.41.0"
@@ -5599,11 +5630,11 @@ wheels = [

[[package]]
name = "types-aiofiles"
version = "24.1.0.20250606"
version = "24.1.0.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/64/6e/fac4ffc896cb3faf2ac5d23747b65dd8bae1d9ee23305d1a3b12111c3989/types_aiofiles-24.1.0.20250606.tar.gz", hash = "sha256:48f9e26d2738a21e0b0f19381f713dcdb852a36727da8414b1ada145d40a18fe", size = 14364, upload-time = "2025-06-06T03:09:26.515Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4a/d6/5c44761bc11cb5c7505013a39f397a9016bfb3a5c932032b2db16c38b87b/types_aiofiles-24.1.0.20250708.tar.gz", hash = "sha256:c8207ed7385491ce5ba94da02658164ebd66b69a44e892288c9f20cbbf5284ff", size = 14322, upload-time = "2025-07-08T03:14:44.814Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/71/de/f2fa2ab8a5943898e93d8036941e05bfd1e1f377a675ee52c7c307dccb75/types_aiofiles-24.1.0.20250606-py3-none-any.whl", hash = "sha256:e568c53fb9017c80897a9aa15c74bf43b7ee90e412286ec1e0912b6e79301aee", size = 14276, upload-time = "2025-06-06T03:09:25.662Z" },
{ url = "https://files.pythonhosted.org/packages/44/e9/4e0cc79c630040aae0634ac9393341dc2aff1a5be454be9741cc6cc8989f/types_aiofiles-24.1.0.20250708-py3-none-any.whl", hash = "sha256:07f8f06465fd415d9293467d1c66cd074b2c3b62b679e26e353e560a8cf63720", size = 14320, upload-time = "2025-07-08T03:14:44.009Z" },
]

[[package]]
@@ -5659,11 +5690,11 @@ wheels = [

[[package]]
name = "types-defusedxml"
version = "0.7.0.20250516"
version = "0.7.0.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/55/9d/3ba8b80536402f1a125bc5a44d82ab686aafa55a85f56160e076b2ac30de/types_defusedxml-0.7.0.20250516.tar.gz", hash = "sha256:164c2945077fa450f24ed09633f8b3a80694687fefbbc1cba5f24e4ba570666b", size = 10298, upload-time = "2025-05-16T03:08:18.951Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/4b/79d046a7211e110afd885be04bb9423546df2a662ed28251512d60e51fb6/types_defusedxml-0.7.0.20250708.tar.gz", hash = "sha256:7b785780cc11c18a1af086308bf94bf53a0907943a1d145dbe00189bef323cb8", size = 10541, upload-time = "2025-07-08T03:14:33.325Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/7b/567b0978150edccf7fa3aa8f2566ea9c3ffc9481ce7d64428166934d6d7f/types_defusedxml-0.7.0.20250516-py3-none-any.whl", hash = "sha256:00e793e5c385c3e142d7c2acc3b4ccea2fe0828cee11e35501f0ba40386630a0", size = 12576, upload-time = "2025-05-16T03:08:17.892Z" },
{ url = "https://files.pythonhosted.org/packages/24/f8/870de7fbd5fee5643f05061db948df6bd574a05a42aee91e37ad47c999ef/types_defusedxml-0.7.0.20250708-py3-none-any.whl", hash = "sha256:cc426cbc31c61a0f1b1c2ad9b9ef9ef846645f28fd708cd7727a6353b5c52e54", size = 13478, upload-time = "2025-07-08T03:14:32.633Z" },
]

[[package]]
@@ -5677,11 +5708,11 @@ wheels = [

[[package]]
name = "types-docutils"
version = "0.21.0.20250604"
version = "0.21.0.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ef/d0/d28035370d669f14d4e23bd63d093207331f361afa24d2686d2c3fe6be8d/types_docutils-0.21.0.20250604.tar.gz", hash = "sha256:5a9cc7f5a4c5ef694aa0abc61111e0b1376a53dee90d65757f77f31acfcca8f2", size = 40953, upload-time = "2025-06-04T03:10:27.439Z" }
sdist = { url = "https://files.pythonhosted.org/packages/39/86/24394a71a04f416ca03df51863a3d3e2cd0542fdc40989188dca30ffb5bf/types_docutils-0.21.0.20250708.tar.gz", hash = "sha256:5625a82a9a2f26d8384545607c157e023a48ed60d940dfc738db125282864172", size = 42011, upload-time = "2025-07-08T03:14:24.214Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/89/91/887e9591c1ee50dfbf7c2fa2f3f51bc6db683013b6d2b0cd3983adf3d502/types_docutils-0.21.0.20250604-py3-none-any.whl", hash = "sha256:bfa8628176c06a80cdd1d6f3fb32e972e042db53538596488dfe0e9c5962b222", size = 65915, upload-time = "2025-06-04T03:10:26.067Z" },
{ url = "https://files.pythonhosted.org/packages/bd/17/8c1153fc1576a0dcffdd157c69a12863c3f9485054256f6791ea17d95aed/types_docutils-0.21.0.20250708-py3-none-any.whl", hash = "sha256:166630d1aec18b9ca02547873210e04bf7674ba8f8da9cd9e6a5e77dc99372c2", size = 67953, upload-time = "2025-07-08T03:14:23.057Z" },
]

[[package]]
@@ -5733,11 +5764,11 @@ wheels = [

[[package]]
name = "types-html5lib"
version = "1.1.11.20250516"
version = "1.1.11.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d0/ed/9f092ff479e2b5598941855f314a22953bb04b5fb38bcba3f880feb833ba/types_html5lib-1.1.11.20250516.tar.gz", hash = "sha256:65043a6718c97f7d52567cc0cdf41efbfc33b1f92c6c0c5e19f60a7ec69ae720", size = 16136, upload-time = "2025-05-16T03:07:12.231Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d4/3b/1f5ba4358cfc1421cced5cdb9d2b08b4b99e4f9a41da88ce079f6d1a7bf1/types_html5lib-1.1.11.20250708.tar.gz", hash = "sha256:24321720fdbac71cee50d5a4bec9b7448495b7217974cffe3fcf1ede4eef7afe", size = 16799, upload-time = "2025-07-08T03:13:53.14Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/3b/cb5b23c7b51bf48b8c9f175abb9dce2f1ecd2d2c25f92ea9f4e3720e9398/types_html5lib-1.1.11.20250516-py3-none-any.whl", hash = "sha256:5e407b14b1bd2b9b1107cbd1e2e19d4a0c46d60febd231c7ab7313d7405663c1", size = 21770, upload-time = "2025-05-16T03:07:11.102Z" },
{ url = "https://files.pythonhosted.org/packages/a8/50/5fc23cf647eee23acdd337c8150861d39980cf11f33dd87f78e87d2a4bad/types_html5lib-1.1.11.20250708-py3-none-any.whl", hash = "sha256:bb898066b155de7081cb182179e2ded31b9e0e234605e2cb46536894e68a6954", size = 22913, upload-time = "2025-07-08T03:13:52.098Z" },
]

[[package]]
@@ -5856,11 +5887,11 @@ wheels = [

[[package]]
name = "types-pymysql"
version = "1.1.0.20250516"
version = "1.1.0.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/db/11/cdaa90b82cb25c5e04e75f0b0616872aa5775b001096779375084f8dbbcf/types_pymysql-1.1.0.20250516.tar.gz", hash = "sha256:fea4a9776101cf893dfc868f42ce10d2e46dcc498c792cc7c9c0fe00cb744234", size = 19640, upload-time = "2025-05-16T03:06:54.568Z" }
sdist = { url = "https://files.pythonhosted.org/packages/65/a3/db349a06c64b8c041c165fc470b81d37404ec342014625c7a6b7f7a4f680/types_pymysql-1.1.0.20250708.tar.gz", hash = "sha256:2cbd7cfcf9313eda784910578c4f1d06f8cc03a15cd30ce588aa92dd6255011d", size = 21715, upload-time = "2025-07-08T03:13:56.463Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/64/129656e04ddda35d69faae914ce67cf60d83407ddd7afdef1e7c50bbb74a/types_pymysql-1.1.0.20250516-py3-none-any.whl", hash = "sha256:41c87a832e3ff503d5120cc6cebd64f6dcb3c407d9580a98b2cb3e3bcd109aa6", size = 20328, upload-time = "2025-05-16T03:06:53.681Z" },
{ url = "https://files.pythonhosted.org/packages/88/e5/7f72c520f527175b6455e955426fd4f971128b4fa2f8ab2f505f254a1ddc/types_pymysql-1.1.0.20250708-py3-none-any.whl", hash = "sha256:9252966d2795945b2a7a53d5cdc49fe8e4e2f3dde4c104ed7fc782a83114e365", size = 22860, upload-time = "2025-07-08T03:13:55.367Z" },
]

[[package]]
@@ -5878,20 +5909,20 @@ wheels = [

[[package]]
name = "types-python-dateutil"
version = "2.9.0.20250516"
version = "2.9.0.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ef/88/d65ed807393285204ab6e2801e5d11fbbea811adcaa979a2ed3b67a5ef41/types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5", size = 13943, upload-time = "2025-05-16T03:06:58.385Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c9/95/6bdde7607da2e1e99ec1c1672a759d42f26644bbacf939916e086db34870/types_python_dateutil-2.9.0.20250708.tar.gz", hash = "sha256:ccdbd75dab2d6c9696c350579f34cffe2c281e4c5f27a585b2a2438dd1d5c8ab", size = 15834, upload-time = "2025-07-08T03:14:03.382Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/3f/b0e8db149896005adc938a1e7f371d6d7e9eca4053a29b108978ed15e0c2/types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93", size = 14356, upload-time = "2025-05-16T03:06:57.249Z" },
{ url = "https://files.pythonhosted.org/packages/72/52/43e70a8e57fefb172c22a21000b03ebcc15e47e97f5cb8495b9c2832efb4/types_python_dateutil-2.9.0.20250708-py3-none-any.whl", hash = "sha256:4d6d0cc1cc4d24a2dc3816024e502564094497b713f7befda4d5bc7a8e3fd21f", size = 17724, upload-time = "2025-07-08T03:14:02.593Z" },
]

[[package]]
name = "types-python-http-client"
version = "3.3.7.20240910"
version = "3.3.7.20250708"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e1/d7/bb2754c2d1b20c1890593ec89799c99e8875b04f474197c41354f41e9d31/types-python-http-client-3.3.7.20240910.tar.gz", hash = "sha256:8a6ebd30ad4b90a329ace69c240291a6176388624693bc971a5ecaa7e9b05074", size = 2804, upload-time = "2024-09-10T02:38:31.608Z" }
sdist = { url = "https://files.pythonhosted.org/packages/55/a0/0ad93698a3ebc6846ca23aca20ff6f6f8ebe7b4f0c1de7f19e87c03dbe8f/types_python_http_client-3.3.7.20250708.tar.gz", hash = "sha256:5f85b32dc64671a4e5e016142169aa187c5abed0b196680944e4efd3d5ce3322", size = 7707, upload-time = "2025-07-08T03:14:36.197Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/64/95/8f492d37d99630e096acbb4071788483282a34a73ae89dd1a5727f4189cc/types_python_http_client-3.3.7.20240910-py3-none-any.whl", hash = "sha256:58941bd986fb8bb0f4f782ef376be145ece8023f391364fbcd22bd26b13a140e", size = 3917, upload-time = "2024-09-10T02:38:30.261Z" },
{ url = "https://files.pythonhosted.org/packages/85/4f/b88274658cf489e35175be8571c970e9a1219713bafd8fc9e166d7351ecb/types_python_http_client-3.3.7.20250708-py3-none-any.whl", hash = "sha256:e2fc253859decab36713d82fc7f205868c3ddeaee79dbb55956ad9ca77abe12b", size = 8890, upload-time = "2025-07-08T03:14:35.506Z" },
]

[[package]]
@@ -6040,11 +6071,11 @@ wheels = [

[[package]]
name = "typing-extensions"
version = "4.14.0"
version = "4.14.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" }
sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" },
{ url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
]

[[package]]
@@ -6172,7 +6203,7 @@ pptx = [

[[package]]
name = "unstructured-client"
version = "0.37.4"
version = "0.38.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiofiles" },
@@ -6183,9 +6214,9 @@ dependencies = [
{ name = "pypdf" },
{ name = "requests-toolbelt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6c/6f/8dd20dab879f25074d6abfbb98f77bb8efeea0ae1bdf9a414b3e73c152b6/unstructured_client-0.37.4.tar.gz", hash = "sha256:5a4029563c2f79de098374fd8a99090719df325b4bdcfa3a87820908f2c83e6c", size = 90481, upload-time = "2025-07-01T16:40:09.877Z" }
sdist = { url = "https://files.pythonhosted.org/packages/85/60/412092671bfc4952640739f2c0c9b2f4c8af26a3c921738fd12621b4ddd8/unstructured_client-0.38.1.tar.gz", hash = "sha256:43ab0670dd8ff53d71e74f9b6dfe490a84a5303dab80a4873e118a840c6d46ca", size = 91781, upload-time = "2025-07-03T15:46:35.054Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/93/09/4399b0c32564b1a19fef943b5acea5a16fa0c6aa7a320065ce726b8245c1/unstructured_client-0.37.4-py3-none-any.whl", hash = "sha256:31975c0ea4408e369e6aad11c9e746d1f3f14013ac5c89f9f8dbada3a21dcec0", size = 211242, upload-time = "2025-07-01T16:40:08.642Z" },
{ url = "https://files.pythonhosted.org/packages/26/e0/8c249f00ba85fb4aba5c541463312befbfbf491105ff5c06e508089467be/unstructured_client-0.38.1-py3-none-any.whl", hash = "sha256:71e5467870d0a0119c788c29ec8baf5c0f7123f424affc9d6682eeeb7b8d45fa", size = 212626, upload-time = "2025-07-03T15:46:33.929Z" },
]

[[package]]
@@ -6220,11 +6251,11 @@ wheels = [

[[package]]
name = "uuid6"
version = "2025.0.0"
version = "2025.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3f/49/06a089c184580f510e20226d9a081e4323d13db2fbc92d566697b5395c1e/uuid6-2025.0.0.tar.gz", hash = "sha256:bb78aa300e29db89b00410371d0c1f1824e59e29995a9daa3dedc8033d1d84ec", size = 13941, upload-time = "2025-06-11T20:02:05.324Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932, upload-time = "2025-07-04T18:30:35.186Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/50/4da47101af45b6cfa291559577993b52ee4399b3cd54ba307574a11e4f3a/uuid6-2025.0.0-py3-none-any.whl", hash = "sha256:2c73405ff5333c7181443958c6865e0d1b9b816bb160549e8d80ba186263cb3a", size = 7001, upload-time = "2025-06-11T20:02:04.521Z" },
{ url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979, upload-time = "2025-07-04T18:30:34.001Z" },
]

[[package]]

+ 5
- 0
docker/.env.example ファイルの表示

@@ -47,6 +47,11 @@ APP_WEB_URL=
# ensuring port 5001 is externally accessible (see docker-compose.yaml).
FILES_URL=

# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
# Set this to the internal Docker service URL for proper plugin file access.
# Example: INTERNAL_FILES_URL=http://api:5001
INTERNAL_FILES_URL=

# ------------------------------
# Server Configuration
# ------------------------------

+ 3
- 3
docker/docker-compose-template.yaml ファイルの表示

@@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.5.1
image: langgenius/dify-api:1.6.0
restart: always
environment:
# Use the shared environment variables.
@@ -31,7 +31,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:1.5.1
image: langgenius/dify-api:1.6.0
restart: always
environment:
# Use the shared environment variables.
@@ -57,7 +57,7 @@ services:

# Frontend web application.
web:
image: langgenius/dify-web:1.5.1
image: langgenius/dify-web:1.6.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

+ 4
- 3
docker/docker-compose.yaml ファイルの表示

@@ -11,6 +11,7 @@ x-shared-env: &shared-api-worker-env
APP_API_URL: ${APP_API_URL:-}
APP_WEB_URL: ${APP_WEB_URL:-}
FILES_URL: ${FILES_URL:-}
INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-}
LOG_LEVEL: ${LOG_LEVEL:-INFO}
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20}
@@ -518,7 +519,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.5.1
image: langgenius/dify-api:1.6.0
restart: always
environment:
# Use the shared environment variables.
@@ -547,7 +548,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:1.5.1
image: langgenius/dify-api:1.6.0
restart: always
environment:
# Use the shared environment variables.
@@ -573,7 +574,7 @@ services:

# Frontend web application.
web:
image: langgenius/dify-web:1.5.1
image: langgenius/dify-web:1.6.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

+ 4
- 1
docker/nginx/conf.d/default.conf.template ファイルの表示

@@ -39,7 +39,10 @@ server {
proxy_pass http://web:3000;
include proxy.conf;
}

location /mcp {
proxy_pass http://api:5001;
include proxy.conf;
}
# placeholder for acme challenge location
${ACME_CHALLENGE_LOCATION}


+ 8
- 0
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/cardView.tsx ファイルの表示

@@ -5,6 +5,7 @@ import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AppCard from '@/app/components/app/overview/appCard'
import Loading from '@/app/components/base/loading'
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
import { ToastContext } from '@/app/components/base/toast'
import {
fetchAppDetail,
@@ -31,6 +32,8 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
const appDetail = useAppStore(state => state.appDetail)
const setAppDetail = useAppStore(state => state.setAppDetail)

const showMCPCard = isInPanel

const updateAppDetail = async () => {
try {
const res = await fetchAppDetail({ url: '/apps', id: appId })
@@ -117,6 +120,11 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
isInPanel={isInPanel}
onChangeStatus={onChangeApiStatus}
/>
{showMCPCard && (
<MCPServiceCard
appInfo={appDetail}
/>
)}
</div>
)
}

+ 1
- 2
web/app/(shareLayout)/webapp-reset-password/set-password/page.tsx ファイルの表示

@@ -9,8 +9,7 @@ import Button from '@/app/components/base/button'
import { changeWebAppPasswordWithToken } from '@/service/common'
import Toast from '@/app/components/base/toast'
import Input from '@/app/components/base/input'

const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/
import { validPassword } from '@/config'

const ChangePasswordForm = () => {
const { t } = useTranslation()

+ 1
- 2
web/app/account/account-page/index.tsx ファイルの表示

@@ -21,6 +21,7 @@ import { IS_CE_EDITION } from '@/config'
import Input from '@/app/components/base/input'
import PremiumBadge from '@/app/components/base/premium-badge'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { validPassword } from '@/config'

const titleClassName = `
system-sm-semibold text-text-secondary
@@ -29,8 +30,6 @@ const descriptionClassName = `
mt-1 body-xs-regular text-text-tertiary
`

const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/

export default function AccountPage() {
const { t } = useTranslation()
const { systemFeatures } = useGlobalPublicStore()

+ 5
- 7
web/app/components/app-sidebar/app-info.tsx ファイルの表示

@@ -308,13 +308,11 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
operations={operations}
/>
</div>
<div className='flex flex-1'>
<CardView
appId={appDetail.id}
isInPanel={true}
className='flex grow flex-col gap-2 overflow-auto px-2 py-1'
/>
</div>
<CardView
appId={appDetail.id}
isInPanel={true}
className='flex flex-1 flex-col gap-2 overflow-auto px-2 py-1'
/>
<Divider />
<div className='flex min-h-fit shrink-0 flex-col items-start justify-center gap-3 self-stretch border-t-[0.5px] border-divider-subtle p-2'>
<Button

+ 15
- 18
web/app/components/app-sidebar/basic.tsx ファイルの表示

@@ -2,6 +2,10 @@ import React from 'react'
import { useTranslation } from 'react-i18next'
import AppIcon from '../base/app-icon'
import Tooltip from '@/app/components/base/tooltip'
import {
Code,
WindowCursor,
} from '@/app/components/base/icons/src/vender/workflow'

export type IAppBasicProps = {
iconType?: 'app' | 'api' | 'dataset' | 'webapp' | 'notion'
@@ -14,25 +18,13 @@ export type IAppBasicProps = {
textStyle?: { main?: string; extra?: string }
isExtraInLine?: boolean
mode?: string
hideType?: boolean
}

const ApiSvg = <svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.5 3.5C8.5 4.60457 9.39543 5.5 10.5 5.5C11.6046 5.5 12.5 4.60457 12.5 3.5C12.5 2.39543 11.6046 1.5 10.5 1.5C9.39543 1.5 8.5 2.39543 8.5 3.5Z" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M12.5 9C12.5 10.1046 13.3954 11 14.5 11C15.6046 11 16.5 10.1046 16.5 9C16.5 7.89543 15.6046 7 14.5 7C13.3954 7 12.5 7.89543 12.5 9Z" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M8.5 3.5H5.5L3.5 6.5" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M8.5 14.5C8.5 15.6046 9.39543 16.5 10.5 16.5C11.6046 16.5 12.5 15.6046 12.5 14.5C12.5 13.3954 11.6046 12.5 10.5 12.5C9.39543 12.5 8.5 13.3954 8.5 14.5Z" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M8.5 14.5H5.5L3.5 11.5" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
<path d="M12.5 9H1.5" stroke="#5850EC" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>

const DatasetSvg = <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M0.833497 5.13481C0.833483 4.69553 0.83347 4.31654 0.858973 4.0044C0.88589 3.67495 0.94532 3.34727 1.10598 3.03195C1.34567 2.56155 1.72812 2.17909 2.19852 1.93941C2.51384 1.77875 2.84152 1.71932 3.17097 1.6924C3.48312 1.6669 3.86209 1.66691 4.30137 1.66693L7.62238 1.66684C8.11701 1.66618 8.55199 1.66561 8.95195 1.80356C9.30227 1.92439 9.62134 2.12159 9.88607 2.38088C10.1883 2.67692 10.3823 3.06624 10.603 3.50894L11.3484 5.00008H14.3679C15.0387 5.00007 15.5924 5.00006 16.0434 5.03691C16.5118 5.07518 16.9424 5.15732 17.3468 5.36339C17.974 5.68297 18.4839 6.19291 18.8035 6.82011C19.0096 7.22456 19.0917 7.65515 19.13 8.12356C19.1668 8.57455 19.1668 9.12818 19.1668 9.79898V13.5345C19.1668 14.2053 19.1668 14.7589 19.13 15.2099C19.0917 15.6784 19.0096 16.1089 18.8035 16.5134C18.4839 17.1406 17.974 17.6505 17.3468 17.9701C16.9424 18.1762 16.5118 18.2583 16.0434 18.2966C15.5924 18.3334 15.0387 18.3334 14.3679 18.3334H5.63243C4.96163 18.3334 4.40797 18.3334 3.95698 18.2966C3.48856 18.2583 3.05798 18.1762 2.65353 17.9701C2.02632 17.6505 1.51639 17.1406 1.19681 16.5134C0.990734 16.1089 0.908597 15.6784 0.870326 15.2099C0.833478 14.7589 0.833487 14.2053 0.833497 13.5345V5.13481ZM7.51874 3.33359C8.17742 3.33359 8.30798 3.34447 8.4085 3.37914C8.52527 3.41942 8.63163 3.48515 8.71987 3.57158C8.79584 3.64598 8.86396 3.7579 9.15852 4.34704L9.48505 5.00008L2.50023 5.00008C2.50059 4.61259 2.50314 4.34771 2.5201 4.14012C2.5386 3.91374 2.57 3.82981 2.59099 3.7886C2.67089 3.6318 2.79837 3.50432 2.95517 3.42442C2.99638 3.40343 3.08031 3.37203 3.30669 3.35353C3.54281 3.33424 3.85304 3.33359 4.3335 3.33359H7.51874Z" fill="#444CE7" />
</svg>

const WebappSvg = <svg width="16" height="18" viewBox="0 0 16 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.375 5.45825L7.99998 8.99992M7.99998 8.99992L1.62498 5.45825M7.99998 8.99992L8 16.1249M14.75 12.0439V5.95603C14.75 5.69904 14.75 5.57055 14.7121 5.45595C14.6786 5.35457 14.6239 5.26151 14.5515 5.18299C14.4697 5.09424 14.3574 5.03184 14.1328 4.90704L8.58277 1.8237C8.37007 1.70553 8.26372 1.64645 8.15109 1.62329C8.05141 1.60278 7.9486 1.60278 7.84891 1.62329C7.73628 1.64645 7.62993 1.70553 7.41723 1.8237L1.86723 4.90704C1.64259 5.03184 1.53026 5.09424 1.44847 5.18299C1.37612 5.26151 1.32136 5.35457 1.28786 5.45595C1.25 5.57055 1.25 5.69904 1.25 5.95603V12.0439C1.25 12.3008 1.25 12.4293 1.28786 12.5439C1.32136 12.6453 1.37612 12.7384 1.44847 12.8169C1.53026 12.9056 1.64259 12.968 1.86723 13.0928L7.41723 16.1762C7.62993 16.2943 7.73628 16.3534 7.84891 16.3766C7.9486 16.3971 8.05141 16.3971 8.15109 16.3766C8.26372 16.3534 8.37007 16.2943 8.58277 16.1762L14.1328 13.0928C14.3574 12.968 14.4697 12.9056 14.5515 12.8169C14.6239 12.7384 14.6786 12.6453 14.7121 12.5439C14.75 12.4293 14.75 12.3008 14.75 12.0439Z" stroke="#155EEF" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
</svg>

const NotionSvg = <svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clipPath="url(#clip0_6294_13848)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M4.287 21.9133L1.70748 18.6999C1.08685 17.9267 0.75 16.976 0.75 15.9974V4.36124C0.75 2.89548 1.92269 1.67923 3.43553 1.57594L15.3991 0.759137C16.2682 0.699797 17.1321 0.930818 17.8461 1.41353L22.0494 4.25543C22.8018 4.76414 23.25 5.59574 23.25 6.48319V19.7124C23.25 21.1468 22.0969 22.3345 20.6157 22.4256L7.3375 23.243C6.1555 23.3158 5.01299 22.8178 4.287 21.9133Z" fill="white" />
@@ -48,13 +40,17 @@ const NotionSvg = <svg width="24" height="24" viewBox="0 0 24 24" fill="none" xm

const ICON_MAP = {
app: <AppIcon className='border !border-[rgba(0,0,0,0.05)]' />,
api: <AppIcon innerIcon={ApiSvg} className='border !border-purple-200 !bg-purple-50' />,
api: <div className='rounded-lg border-[0.5px] border-divider-subtle bg-util-colors-violet-violet-500 p-1 shadow-md'>
<Code className='h-4 w-4 text-text-primary-on-surface' />
</div>,
dataset: <AppIcon innerIcon={DatasetSvg} className='!border-[0.5px] !border-indigo-100 !bg-indigo-25' />,
webapp: <AppIcon innerIcon={WebappSvg} className='border !border-primary-200 !bg-primary-100' />,
webapp: <div className='rounded-lg border-[0.5px] border-divider-subtle bg-util-colors-blue-brand-blue-brand-500 p-1 shadow-md'>
<WindowCursor className='h-4 w-4 text-text-primary-on-surface' />
</div>,
notion: <AppIcon innerIcon={NotionSvg} className='!border-[0.5px] !border-indigo-100 !bg-white' />,
}

export default function AppBasic({ icon, icon_background, name, isExternal, type, hoverTip, textStyle, isExtraInLine, mode = 'expand', iconType = 'app' }: IAppBasicProps) {
export default function AppBasic({ icon, icon_background, name, isExternal, type, hoverTip, textStyle, isExtraInLine, mode = 'expand', iconType = 'app', hideType }: IAppBasicProps) {
const { t } = useTranslation()

return (
@@ -88,9 +84,10 @@ export default function AppBasic({ icon, icon_background, name, isExternal, type
/>
}
</div>
{isExtraInLine ? (
{!hideType && isExtraInLine && (
<div className="system-2xs-medium-uppercase flex text-text-tertiary">{type}</div>
) : (
)}
{!hideType && !isExtraInLine && (
<div className='system-2xs-medium-uppercase text-text-tertiary'>{isExternal ? t('dataset.externalTag') : type}</div>
)}
</div>}

+ 50
- 18
web/app/components/app/configuration/config/agent/agent-tools/index.tsx ファイルの表示

@@ -30,15 +30,31 @@ import ConfigCredential from '@/app/components/tools/setting/build-in/config-cre
import { updateBuiltInToolCredential } from '@/service/tools'
import cn from '@/utils/classnames'
import ToolPicker from '@/app/components/workflow/block-selector/tool-picker'
import type { ToolDefaultValue } from '@/app/components/workflow/block-selector/types'
import type { ToolDefaultValue, ToolValue } from '@/app/components/workflow/block-selector/types'
import { canFindTool } from '@/utils'
import { useAllBuiltInTools, useAllCustomTools, useAllMCPTools, useAllWorkflowTools } from '@/service/use-tools'
import type { ToolWithProvider } from '@/app/components/workflow/types'
import { useMittContextSelector } from '@/context/mitt-context'

type AgentToolWithMoreInfo = AgentTool & { icon: any; collection?: Collection } | null
const AgentTools: FC = () => {
const { t } = useTranslation()
const [isShowChooseTool, setIsShowChooseTool] = useState(false)
const { modelConfig, setModelConfig, collectionList } = useContext(ConfigContext)
const { modelConfig, setModelConfig } = useContext(ConfigContext)
const { data: buildInTools } = useAllBuiltInTools()
const { data: customTools } = useAllCustomTools()
const { data: workflowTools } = useAllWorkflowTools()
const { data: mcpTools } = useAllMCPTools()
const collectionList = useMemo(() => {
const allTools = [
...(buildInTools || []),
...(customTools || []),
...(workflowTools || []),
...(mcpTools || []),
]
return allTools
}, [buildInTools, customTools, workflowTools, mcpTools])

const formattingChangedDispatcher = useFormattingChangedDispatcher()
const [currentTool, setCurrentTool] = useState<AgentToolWithMoreInfo>(null)
const currentCollection = useMemo(() => {
@@ -96,23 +112,38 @@ const AgentTools: FC = () => {
}

const [isDeleting, setIsDeleting] = useState<number>(-1)

const getToolValue = (tool: ToolDefaultValue) => {
return {
provider_id: tool.provider_id,
provider_type: tool.provider_type as CollectionType,
provider_name: tool.provider_name,
tool_name: tool.tool_name,
tool_label: tool.tool_label,
tool_parameters: tool.params,
notAuthor: !tool.is_team_authorization,
enabled: true,
}
}
const handleSelectTool = (tool: ToolDefaultValue) => {
const newModelConfig = produce(modelConfig, (draft) => {
draft.agentConfig.tools.push({
provider_id: tool.provider_id,
provider_type: tool.provider_type as CollectionType,
provider_name: tool.provider_name,
tool_name: tool.tool_name,
tool_label: tool.tool_label,
tool_parameters: tool.params,
notAuthor: !tool.is_team_authorization,
enabled: true,
})
draft.agentConfig.tools.push(getToolValue(tool))
})
setModelConfig(newModelConfig)
}

const handleSelectMultipleTool = (tool: ToolDefaultValue[]) => {
const newModelConfig = produce(modelConfig, (draft) => {
draft.agentConfig.tools.push(...tool.map(getToolValue))
})
setModelConfig(newModelConfig)
}
const getProviderShowName = (item: AgentTool) => {
const type = item.provider_type
if(type === CollectionType.builtIn)
return item.provider_name.split('/').pop()
return item.provider_name
}

return (
<>
<Panel
@@ -143,7 +174,9 @@ const AgentTools: FC = () => {
disabled={false}
supportAddCustomTool
onSelect={handleSelectTool}
selectedTools={tools as any}
onSelectMultiple={handleSelectMultipleTool}
selectedTools={tools as unknown as ToolValue[]}
canChooseMCPTool
/>
</>
)}
@@ -161,7 +194,7 @@ const AgentTools: FC = () => {
<div className='flex w-0 grow items-center'>
{item.isDeleted && <DefaultToolIcon className='h-5 w-5' />}
{!item.isDeleted && (
<div className={cn((item.notAuthor || !item.enabled) && 'opacity-50')}>
<div className={cn((item.notAuthor || !item.enabled) && 'shrink-0 opacity-50')}>
{typeof item.icon === 'string' && <div className='h-5 w-5 rounded-md bg-cover bg-center' style={{ backgroundImage: `url(${item.icon})` }} />}
{typeof item.icon !== 'string' && <AppIcon className='rounded-md' size='xs' icon={item.icon?.content} background={item.icon?.background} />}
</div>
@@ -172,7 +205,7 @@ const AgentTools: FC = () => {
(item.isDeleted || item.notAuthor || !item.enabled) ? 'opacity-50' : '',
)}
>
<span className='system-xs-medium pr-1.5 text-text-secondary'>{item.provider_type === CollectionType.builtIn ? item.provider_name.split('/').pop() : item.tool_label}</span>
<span className='system-xs-medium pr-1.5 text-text-secondary'>{getProviderShowName(item)}</span>
<span className='text-text-tertiary'>{item.tool_label}</span>
{!item.isDeleted && (
<Tooltip
@@ -285,8 +318,7 @@ const AgentTools: FC = () => {
<SettingBuiltInTool
toolName={currentTool?.tool_name as string}
setting={currentTool?.tool_parameters}
collection={currentTool?.collection as Collection}
isBuiltIn={currentTool?.collection?.type === CollectionType.builtIn}
collection={currentTool?.collection as ToolWithProvider}
isModel={currentTool?.collection?.type === CollectionType.model}
onSave={handleToolSettingChange}
onHide={() => setIsShowSettingTool(false)}

+ 7
- 5
web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx ファイルの表示

@@ -24,10 +24,11 @@ import { fetchBuiltInToolList, fetchCustomToolList, fetchModelToolList, fetchWor
import I18n from '@/context/i18n'
import { getLanguage } from '@/i18n/language'
import cn from '@/utils/classnames'
import type { ToolWithProvider } from '@/app/components/workflow/types'

type Props = {
showBackButton?: boolean
collection: Collection
collection: Collection | ToolWithProvider
isBuiltIn?: boolean
isModel?: boolean
toolName: string
@@ -51,9 +52,10 @@ const SettingBuiltInTool: FC<Props> = ({
const { locale } = useContext(I18n)
const language = getLanguage(locale)
const { t } = useTranslation()

const [isLoading, setIsLoading] = useState(true)
const [tools, setTools] = useState<Tool[]>([])
const passedTools = (collection as ToolWithProvider).tools
const hasPassedTools = passedTools?.length > 0
const [isLoading, setIsLoading] = useState(!hasPassedTools)
const [tools, setTools] = useState<Tool[]>(hasPassedTools ? passedTools : [])
const currTool = tools.find(tool => tool.name === toolName)
const formSchemas = currTool ? toolParametersToFormSchemas(currTool.parameters) : []
const infoSchemas = formSchemas.filter(item => item.form === 'llm')
@@ -63,7 +65,7 @@ const SettingBuiltInTool: FC<Props> = ({
const [currType, setCurrType] = useState('info')
const isInfoActive = currType === 'info'
useEffect(() => {
if (!collection)
if (!collection || hasPassedTools)
return

(async () => {

+ 78
- 0
web/app/components/app/configuration/config/config-audio.tsx ファイルの表示

@@ -0,0 +1,78 @@
'use client'
import type { FC } from 'react'
import React, { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import produce from 'immer'
import { useContext } from 'use-context-selector'

import { Microphone01 } from '@/app/components/base/icons/src/vender/features'
import Tooltip from '@/app/components/base/tooltip'
import ConfigContext from '@/context/debug-configuration'
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
import { useFeatures, useFeaturesStore } from '@/app/components/base/features/hooks'
import Switch from '@/app/components/base/switch'

const ConfigAudio: FC = () => {
const { t } = useTranslation()
const file = useFeatures(s => s.features.file)
const featuresStore = useFeaturesStore()
const { isShowAudioConfig } = useContext(ConfigContext)

const isAudioEnabled = file?.allowed_file_types?.includes(SupportUploadFileTypes.audio) ?? false

const handleChange = useCallback((value: boolean) => {
const {
features,
setFeatures,
} = featuresStore!.getState()

const newFeatures = produce(features, (draft) => {
if (value) {
draft.file!.allowed_file_types = Array.from(new Set([
...(draft.file?.allowed_file_types || []),
SupportUploadFileTypes.audio,
]))
}
else {
draft.file!.allowed_file_types = draft.file!.allowed_file_types?.filter(
type => type !== SupportUploadFileTypes.audio,
)
}
if (draft.file)
draft.file.enabled = (draft.file.allowed_file_types?.length ?? 0) > 0
})
setFeatures(newFeatures)
}, [featuresStore])

if (!isShowAudioConfig)
return null

return (
<div className='mt-2 flex items-center gap-2 rounded-xl border-l-[0.5px] border-t-[0.5px] bg-background-section-burn p-2'>
<div className='shrink-0 p-1'>
<div className='rounded-lg border-[0.5px] border-divider-subtle bg-util-colors-violet-violet-600 p-1 shadow-xs'>
<Microphone01 className='h-4 w-4 text-text-primary-on-surface' />
</div>
</div>
<div className='flex grow items-center'>
<div className='system-sm-semibold mr-1 text-text-secondary'>{t('appDebug.feature.audioUpload.title')}</div>
<Tooltip
popupContent={
<div className='w-[180px]' >
{t('appDebug.feature.audioUpload.description')}
</div>
}
/>
</div>
<div className='flex shrink-0 items-center'>
<div className='ml-1 mr-3 h-3.5 w-[1px] bg-divider-subtle'></div>
<Switch
defaultValue={isAudioEnabled}
onChange={handleChange}
size='md'
/>
</div>
</div>
)
}
export default React.memo(ConfigAudio)

+ 3
- 0
web/app/components/app/configuration/config/index.tsx ファイルの表示

@@ -8,6 +8,7 @@ import DatasetConfig from '../dataset-config'
import HistoryPanel from '../config-prompt/conversation-history/history-panel'
import ConfigVision from '../config-vision'
import ConfigDocument from './config-document'
import ConfigAudio from './config-audio'
import AgentTools from './agent/agent-tools'
import ConfigContext from '@/context/debug-configuration'
import ConfigPrompt from '@/app/components/app/configuration/config-prompt'
@@ -85,6 +86,8 @@ const Config: FC = () => {

<ConfigDocument />

<ConfigAudio />

{/* Chat History */}
{isAdvancedMode && isChatApp && modelModeType === ModelModeType.completion && (
<HistoryPanel

+ 2
- 0
web/app/components/app/configuration/index.tsx ファイルの表示

@@ -474,6 +474,7 @@ const Configuration: FC = () => {

const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision)
const isShowDocumentConfig = !!currModel?.features?.includes(ModelFeatureEnum.document)
const isShowAudioConfig = !!currModel?.features?.includes(ModelFeatureEnum.audio)
const isAllowVideoUpload = !!currModel?.features?.includes(ModelFeatureEnum.video)
// *** web app features ***
const featuresData: FeaturesData = useMemo(() => {
@@ -920,6 +921,7 @@ const Configuration: FC = () => {
setVisionConfig: handleSetVisionConfig,
isAllowVideoUpload,
isShowDocumentConfig,
isShowAudioConfig,
rerankSettingModalOpen,
setRerankSettingModalOpen,
}}

+ 0
- 0
web/app/components/app/overview/appCard.tsx ファイルの表示


変更されたファイルが多すぎるため、一部のファイルは表示されません

読み込み中…
キャンセル
保存