Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>tags/1.6.0
| conversation, | conversation, | ||||
| conversation_variables, | conversation_variables, | ||||
| generator, | generator, | ||||
| mcp_server, | |||||
| message, | message, | ||||
| model_config, | model_config, | ||||
| ops_trace, | ops_trace, | 
| import json | |||||
| from enum import StrEnum | |||||
| from flask_login import current_user | |||||
| from flask_restful import Resource, marshal_with, reqparse | |||||
| from werkzeug.exceptions import NotFound | |||||
| from controllers.console import api | |||||
| from controllers.console.app.wraps import get_app_model | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from extensions.ext_database import db | |||||
| from fields.app_fields import app_server_fields | |||||
| from libs.login import login_required | |||||
| from models.model import AppMCPServer | |||||
| class AppMCPServerStatus(StrEnum): | |||||
| ACTIVE = "active" | |||||
| INACTIVE = "inactive" | |||||
| class AppMCPServerController(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| @get_app_model | |||||
| @marshal_with(app_server_fields) | |||||
| def get(self, app_model): | |||||
| server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == app_model.id).first() | |||||
| return server | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| @get_app_model | |||||
| @marshal_with(app_server_fields) | |||||
| def post(self, app_model): | |||||
| # The role of the current user in the ta table must be editor, admin, or owner | |||||
| if not current_user.is_editor: | |||||
| raise NotFound() | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("description", type=str, required=True, location="json") | |||||
| parser.add_argument("parameters", type=dict, required=True, location="json") | |||||
| args = parser.parse_args() | |||||
| server = AppMCPServer( | |||||
| name=app_model.name, | |||||
| description=args["description"], | |||||
| parameters=json.dumps(args["parameters"], ensure_ascii=False), | |||||
| status=AppMCPServerStatus.ACTIVE, | |||||
| app_id=app_model.id, | |||||
| tenant_id=current_user.current_tenant_id, | |||||
| server_code=AppMCPServer.generate_server_code(16), | |||||
| ) | |||||
| db.session.add(server) | |||||
| db.session.commit() | |||||
| return server | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| @get_app_model | |||||
| @marshal_with(app_server_fields) | |||||
| def put(self, app_model): | |||||
| if not current_user.is_editor: | |||||
| raise NotFound() | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("id", type=str, required=True, location="json") | |||||
| parser.add_argument("description", type=str, required=True, location="json") | |||||
| parser.add_argument("parameters", type=dict, required=True, location="json") | |||||
| parser.add_argument("status", type=str, required=False, location="json") | |||||
| args = parser.parse_args() | |||||
| server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first() | |||||
| if not server: | |||||
| raise NotFound() | |||||
| server.description = args["description"] | |||||
| server.parameters = json.dumps(args["parameters"], ensure_ascii=False) | |||||
| if args["status"]: | |||||
| if args["status"] not in [status.value for status in AppMCPServerStatus]: | |||||
| raise ValueError("Invalid status") | |||||
| server.status = args["status"] | |||||
| db.session.commit() | |||||
| return server | |||||
| class AppMCPServerRefreshController(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| @marshal_with(app_server_fields) | |||||
| def get(self, server_id): | |||||
| if not current_user.is_editor: | |||||
| raise NotFound() | |||||
| server = db.session.query(AppMCPServer).filter(AppMCPServer.id == server_id).first() | |||||
| if not server: | |||||
| raise NotFound() | |||||
| server.server_code = AppMCPServer.generate_server_code(16) | |||||
| db.session.commit() | |||||
| return server | |||||
| api.add_resource(AppMCPServerController, "/apps/<uuid:app_id>/server") | |||||
| api.add_resource(AppMCPServerRefreshController, "/apps/<uuid:server_id>/server/refresh") | 
| import io | import io | ||||
| from urllib.parse import urlparse | |||||
| from flask import send_file | |||||
| from flask import redirect, send_file | |||||
| from flask_login import current_user | from flask_login import current_user | ||||
| from flask_restful import Resource, reqparse | from flask_restful import Resource, reqparse | ||||
| from sqlalchemy.orm import Session | from sqlalchemy.orm import Session | ||||
| from configs import dify_config | from configs import dify_config | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required | from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required | ||||
| from core.mcp.auth.auth_flow import auth, handle_callback | |||||
| from core.mcp.auth.auth_provider import OAuthClientProvider | |||||
| from core.mcp.error import MCPAuthError, MCPError | |||||
| from core.mcp.mcp_client import MCPClient | |||||
| from core.model_runtime.utils.encoders import jsonable_encoder | from core.model_runtime.utils.encoders import jsonable_encoder | ||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from libs.helper import alphanumeric, uuid_value | from libs.helper import alphanumeric, uuid_value | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.tools.api_tools_manage_service import ApiToolManageService | from services.tools.api_tools_manage_service import ApiToolManageService | ||||
| from services.tools.builtin_tools_manage_service import BuiltinToolManageService | from services.tools.builtin_tools_manage_service import BuiltinToolManageService | ||||
| from services.tools.mcp_tools_mange_service import MCPToolManageService | |||||
| from services.tools.tool_labels_service import ToolLabelsService | from services.tools.tool_labels_service import ToolLabelsService | ||||
| from services.tools.tools_manage_service import ToolCommonService | from services.tools.tools_manage_service import ToolCommonService | ||||
| from services.tools.tools_transform_service import ToolTransformService | |||||
| from services.tools.workflow_tools_manage_service import WorkflowToolManageService | from services.tools.workflow_tools_manage_service import WorkflowToolManageService | ||||
| def is_valid_url(url: str) -> bool: | |||||
| if not url: | |||||
| return False | |||||
| try: | |||||
| parsed = urlparse(url) | |||||
| return all([parsed.scheme, parsed.netloc]) and parsed.scheme in ["http", "https"] | |||||
| except Exception: | |||||
| return False | |||||
| class ToolProviderListApi(Resource): | class ToolProviderListApi(Resource): | ||||
| @setup_required | @setup_required | ||||
| @login_required | @login_required | ||||
| req.add_argument( | req.add_argument( | ||||
| "type", | "type", | ||||
| type=str, | type=str, | ||||
| choices=["builtin", "model", "api", "workflow"], | |||||
| choices=["builtin", "model", "api", "workflow", "mcp"], | |||||
| required=False, | required=False, | ||||
| nullable=True, | nullable=True, | ||||
| location="args", | location="args", | ||||
| return jsonable_encoder(ToolLabelsService.list_tool_labels()) | return jsonable_encoder(ToolLabelsService.list_tool_labels()) | ||||
| class ToolProviderMCPApi(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def post(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("server_url", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("name", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("icon", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json", default="") | |||||
| parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json") | |||||
| args = parser.parse_args() | |||||
| user = current_user | |||||
| if not is_valid_url(args["server_url"]): | |||||
| raise ValueError("Server URL is not valid.") | |||||
| return jsonable_encoder( | |||||
| MCPToolManageService.create_mcp_provider( | |||||
| tenant_id=user.current_tenant_id, | |||||
| server_url=args["server_url"], | |||||
| name=args["name"], | |||||
| icon=args["icon"], | |||||
| icon_type=args["icon_type"], | |||||
| icon_background=args["icon_background"], | |||||
| user_id=user.id, | |||||
| server_identifier=args["server_identifier"], | |||||
| ) | |||||
| ) | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def put(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("server_url", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("name", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("icon", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("icon_type", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("icon_background", type=str, required=False, nullable=True, location="json") | |||||
| parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("server_identifier", type=str, required=True, nullable=False, location="json") | |||||
| args = parser.parse_args() | |||||
| if not is_valid_url(args["server_url"]): | |||||
| if "[__HIDDEN__]" in args["server_url"]: | |||||
| pass | |||||
| else: | |||||
| raise ValueError("Server URL is not valid.") | |||||
| MCPToolManageService.update_mcp_provider( | |||||
| tenant_id=current_user.current_tenant_id, | |||||
| provider_id=args["provider_id"], | |||||
| server_url=args["server_url"], | |||||
| name=args["name"], | |||||
| icon=args["icon"], | |||||
| icon_type=args["icon_type"], | |||||
| icon_background=args["icon_background"], | |||||
| server_identifier=args["server_identifier"], | |||||
| ) | |||||
| return {"result": "success"} | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def delete(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") | |||||
| args = parser.parse_args() | |||||
| MCPToolManageService.delete_mcp_tool(tenant_id=current_user.current_tenant_id, provider_id=args["provider_id"]) | |||||
| return {"result": "success"} | |||||
| class ToolMCPAuthApi(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def post(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("provider_id", type=str, required=True, nullable=False, location="json") | |||||
| parser.add_argument("authorization_code", type=str, required=False, nullable=True, location="json") | |||||
| args = parser.parse_args() | |||||
| provider_id = args["provider_id"] | |||||
| tenant_id = current_user.current_tenant_id | |||||
| provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) | |||||
| if not provider: | |||||
| raise ValueError("provider not found") | |||||
| try: | |||||
| with MCPClient( | |||||
| provider.decrypted_server_url, | |||||
| provider_id, | |||||
| tenant_id, | |||||
| authed=False, | |||||
| authorization_code=args["authorization_code"], | |||||
| for_list=True, | |||||
| ): | |||||
| MCPToolManageService.update_mcp_provider_credentials( | |||||
| mcp_provider=provider, | |||||
| credentials=provider.decrypted_credentials, | |||||
| authed=True, | |||||
| ) | |||||
| return {"result": "success"} | |||||
| except MCPAuthError: | |||||
| auth_provider = OAuthClientProvider(provider_id, tenant_id, for_list=True) | |||||
| return auth(auth_provider, provider.decrypted_server_url, args["authorization_code"]) | |||||
| except MCPError as e: | |||||
| MCPToolManageService.update_mcp_provider_credentials( | |||||
| mcp_provider=provider, | |||||
| credentials={}, | |||||
| authed=False, | |||||
| ) | |||||
| raise ValueError(f"Failed to connect to MCP server: {e}") from e | |||||
| class ToolMCPDetailApi(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def get(self, provider_id): | |||||
| user = current_user | |||||
| provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, user.current_tenant_id) | |||||
| return jsonable_encoder(ToolTransformService.mcp_provider_to_user_provider(provider, for_list=True)) | |||||
| class ToolMCPListAllApi(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def get(self): | |||||
| user = current_user | |||||
| tenant_id = user.current_tenant_id | |||||
| tools = MCPToolManageService.retrieve_mcp_tools(tenant_id=tenant_id) | |||||
| return [tool.to_dict() for tool in tools] | |||||
| class ToolMCPUpdateApi(Resource): | |||||
| @setup_required | |||||
| @login_required | |||||
| @account_initialization_required | |||||
| def get(self, provider_id): | |||||
| tenant_id = current_user.current_tenant_id | |||||
| tools = MCPToolManageService.list_mcp_tool_from_remote_server( | |||||
| tenant_id=tenant_id, | |||||
| provider_id=provider_id, | |||||
| ) | |||||
| return jsonable_encoder(tools) | |||||
| class ToolMCPCallbackApi(Resource): | |||||
| def get(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("code", type=str, required=True, nullable=False, location="args") | |||||
| parser.add_argument("state", type=str, required=True, nullable=False, location="args") | |||||
| args = parser.parse_args() | |||||
| state_key = args["state"] | |||||
| authorization_code = args["code"] | |||||
| handle_callback(state_key, authorization_code) | |||||
| return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback") | |||||
| # tool provider | # tool provider | ||||
| api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers") | api.add_resource(ToolProviderListApi, "/workspaces/current/tool-providers") | ||||
| api.add_resource(ToolWorkflowProviderGetApi, "/workspaces/current/tool-provider/workflow/get") | api.add_resource(ToolWorkflowProviderGetApi, "/workspaces/current/tool-provider/workflow/get") | ||||
| api.add_resource(ToolWorkflowProviderListToolApi, "/workspaces/current/tool-provider/workflow/tools") | api.add_resource(ToolWorkflowProviderListToolApi, "/workspaces/current/tool-provider/workflow/tools") | ||||
| # mcp tool provider | |||||
| api.add_resource(ToolMCPDetailApi, "/workspaces/current/tool-provider/mcp/tools/<path:provider_id>") | |||||
| api.add_resource(ToolProviderMCPApi, "/workspaces/current/tool-provider/mcp") | |||||
| api.add_resource(ToolMCPUpdateApi, "/workspaces/current/tool-provider/mcp/update/<path:provider_id>") | |||||
| api.add_resource(ToolMCPAuthApi, "/workspaces/current/tool-provider/mcp/auth") | |||||
| api.add_resource(ToolMCPCallbackApi, "/mcp/oauth/callback") | |||||
| api.add_resource(ToolBuiltinListApi, "/workspaces/current/tools/builtin") | api.add_resource(ToolBuiltinListApi, "/workspaces/current/tools/builtin") | ||||
| api.add_resource(ToolApiListApi, "/workspaces/current/tools/api") | api.add_resource(ToolApiListApi, "/workspaces/current/tools/api") | ||||
| api.add_resource(ToolMCPListAllApi, "/workspaces/current/tools/mcp") | |||||
| api.add_resource(ToolWorkflowListApi, "/workspaces/current/tools/workflow") | api.add_resource(ToolWorkflowListApi, "/workspaces/current/tools/workflow") | ||||
| api.add_resource(ToolLabelsApi, "/workspaces/current/tool-labels") | api.add_resource(ToolLabelsApi, "/workspaces/current/tool-labels") | 
| from flask import Blueprint | |||||
| from libs.external_api import ExternalApi | |||||
| bp = Blueprint("mcp", __name__, url_prefix="/mcp") | |||||
| api = ExternalApi(bp) | |||||
| from . import mcp | 
| from flask_restful import Resource, reqparse | |||||
| from pydantic import ValidationError | |||||
| from controllers.console.app.mcp_server import AppMCPServerStatus | |||||
| from controllers.mcp import api | |||||
| from core.app.app_config.entities import VariableEntity | |||||
| from core.mcp import types | |||||
| from core.mcp.server.streamable_http import MCPServerStreamableHTTPRequestHandler | |||||
| from core.mcp.types import ClientNotification, ClientRequest | |||||
| from core.mcp.utils import create_mcp_error_response | |||||
| from extensions.ext_database import db | |||||
| from libs import helper | |||||
| from models.model import App, AppMCPServer, AppMode | |||||
| class MCPAppApi(Resource): | |||||
| def post(self, server_code): | |||||
| def int_or_str(value): | |||||
| if isinstance(value, (int, str)): | |||||
| return value | |||||
| else: | |||||
| return None | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("jsonrpc", type=str, required=True, location="json") | |||||
| parser.add_argument("method", type=str, required=True, location="json") | |||||
| parser.add_argument("params", type=dict, required=False, location="json") | |||||
| parser.add_argument("id", type=int_or_str, required=False, location="json") | |||||
| args = parser.parse_args() | |||||
| request_id = args.get("id") | |||||
| server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first() | |||||
| if not server: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server Not Found") | |||||
| ) | |||||
| if server.status != AppMCPServerStatus.ACTIVE: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_REQUEST, "Server is not active") | |||||
| ) | |||||
| app = db.session.query(App).filter(App.id == server.app_id).first() | |||||
| if not app: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_REQUEST, "App Not Found") | |||||
| ) | |||||
| if app.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}: | |||||
| workflow = app.workflow | |||||
| if workflow is None: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_REQUEST, "App is unavailable") | |||||
| ) | |||||
| user_input_form = workflow.user_input_form(to_old_structure=True) | |||||
| else: | |||||
| app_model_config = app.app_model_config | |||||
| if app_model_config is None: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_REQUEST, "App is unavailable") | |||||
| ) | |||||
| features_dict = app_model_config.to_dict() | |||||
| user_input_form = features_dict.get("user_input_form", []) | |||||
| converted_user_input_form: list[VariableEntity] = [] | |||||
| try: | |||||
| for item in user_input_form: | |||||
| variable_type = item.get("type", "") or list(item.keys())[0] | |||||
| variable = item[variable_type] | |||||
| converted_user_input_form.append( | |||||
| VariableEntity( | |||||
| type=variable_type, | |||||
| variable=variable.get("variable"), | |||||
| description=variable.get("description") or "", | |||||
| label=variable.get("label"), | |||||
| required=variable.get("required", False), | |||||
| max_length=variable.get("max_length"), | |||||
| options=variable.get("options") or [], | |||||
| ) | |||||
| ) | |||||
| except ValidationError as e: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_PARAMS, f"Invalid user_input_form: {str(e)}") | |||||
| ) | |||||
| try: | |||||
| request: ClientRequest | ClientNotification = ClientRequest.model_validate(args) | |||||
| except ValidationError as e: | |||||
| try: | |||||
| notification = ClientNotification.model_validate(args) | |||||
| request = notification | |||||
| except ValidationError as e: | |||||
| return helper.compact_generate_response( | |||||
| create_mcp_error_response(request_id, types.INVALID_PARAMS, f"Invalid MCP request: {str(e)}") | |||||
| ) | |||||
| mcp_server_handler = MCPServerStreamableHTTPRequestHandler(app, request, converted_user_input_form) | |||||
| response = mcp_server_handler.handle() | |||||
| return helper.compact_generate_response(response) | |||||
| api.add_resource(MCPAppApi, "/server/<string:server_code>/mcp") | 
| if parameter.type == ToolParameter.ToolParameterType.SELECT: | if parameter.type == ToolParameter.ToolParameterType.SELECT: | ||||
| enum = [option.value for option in parameter.options] if parameter.options else [] | enum = [option.value for option in parameter.options] if parameter.options else [] | ||||
| message_tool.parameters["properties"][parameter.name] = { | |||||
| "type": parameter_type, | |||||
| "description": parameter.llm_description or "", | |||||
| } | |||||
| message_tool.parameters["properties"][parameter.name] = ( | |||||
| { | |||||
| "type": parameter_type, | |||||
| "description": parameter.llm_description or "", | |||||
| } | |||||
| if parameter.input_schema is None | |||||
| else parameter.input_schema | |||||
| ) | |||||
| if len(enum) > 0: | if len(enum) > 0: | ||||
| message_tool.parameters["properties"][parameter.name]["enum"] = enum | message_tool.parameters["properties"][parameter.name]["enum"] = enum | ||||
| if parameter.type == ToolParameter.ToolParameterType.SELECT: | if parameter.type == ToolParameter.ToolParameterType.SELECT: | ||||
| enum = [option.value for option in parameter.options] if parameter.options else [] | enum = [option.value for option in parameter.options] if parameter.options else [] | ||||
| prompt_tool.parameters["properties"][parameter.name] = { | |||||
| "type": parameter_type, | |||||
| "description": parameter.llm_description or "", | |||||
| } | |||||
| prompt_tool.parameters["properties"][parameter.name] = ( | |||||
| { | |||||
| "type": parameter_type, | |||||
| "description": parameter.llm_description or "", | |||||
| } | |||||
| if parameter.input_schema is None | |||||
| else parameter.input_schema | |||||
| ) | |||||
| if len(enum) > 0: | if len(enum) > 0: | ||||
| prompt_tool.parameters["properties"][parameter.name]["enum"] = enum | prompt_tool.parameters["properties"][parameter.name]["enum"] = enum | 
| description: I18nObject = Field(..., description="The description of the agent strategy") | description: I18nObject = Field(..., description="The description of the agent strategy") | ||||
| output_schema: Optional[dict] = None | output_schema: Optional[dict] = None | ||||
| features: Optional[list[AgentFeature]] = None | features: Optional[list[AgentFeature]] = None | ||||
| meta_version: Optional[str] = None | |||||
| # pydantic configs | # pydantic configs | ||||
| model_config = ConfigDict(protected_namespaces=()) | model_config = ConfigDict(protected_namespaces=()) | ||||
| tenant_id: str | tenant_id: str | ||||
| declaration: AgentStrategyEntity | declaration: AgentStrategyEntity | ||||
| meta_version: str | None = None | |||||
| def __init__(self, tenant_id: str, declaration: AgentStrategyEntity): | |||||
| def __init__(self, tenant_id: str, declaration: AgentStrategyEntity, meta_version: str | None): | |||||
| self.tenant_id = tenant_id | self.tenant_id = tenant_id | ||||
| self.declaration = declaration | self.declaration = declaration | ||||
| self.meta_version = meta_version | |||||
| def get_parameters(self) -> Sequence[AgentStrategyParameter]: | def get_parameters(self) -> Sequence[AgentStrategyParameter]: | ||||
| return self.declaration.parameters | return self.declaration.parameters | 
| DYNAMIC_SELECT = "dynamic-select" | DYNAMIC_SELECT = "dynamic-select" | ||||
| # TOOL_SELECTOR = "tool-selector" | # TOOL_SELECTOR = "tool-selector" | ||||
| # MCP object and array type parameters | |||||
| ARRAY = "array" | |||||
| OBJECT = "object" | |||||
| class AppSelectorScope(StrEnum): | class AppSelectorScope(StrEnum): | 
| import base64 | |||||
| import hashlib | |||||
| import json | |||||
| import os | |||||
| import secrets | |||||
| import urllib.parse | |||||
| from typing import Optional | |||||
| from urllib.parse import urljoin | |||||
| import requests | |||||
| from pydantic import BaseModel, ValidationError | |||||
| from core.mcp.auth.auth_provider import OAuthClientProvider | |||||
| from core.mcp.types import ( | |||||
| OAuthClientInformation, | |||||
| OAuthClientInformationFull, | |||||
| OAuthClientMetadata, | |||||
| OAuthMetadata, | |||||
| OAuthTokens, | |||||
| ) | |||||
| from extensions.ext_redis import redis_client | |||||
| LATEST_PROTOCOL_VERSION = "1.0" | |||||
| OAUTH_STATE_EXPIRY_SECONDS = 5 * 60 # 5 minutes expiry | |||||
| OAUTH_STATE_REDIS_KEY_PREFIX = "oauth_state:" | |||||
| class OAuthCallbackState(BaseModel): | |||||
| provider_id: str | |||||
| tenant_id: str | |||||
| server_url: str | |||||
| metadata: OAuthMetadata | None = None | |||||
| client_information: OAuthClientInformation | |||||
| code_verifier: str | |||||
| redirect_uri: str | |||||
| def generate_pkce_challenge() -> tuple[str, str]: | |||||
| """Generate PKCE challenge and verifier.""" | |||||
| code_verifier = base64.urlsafe_b64encode(os.urandom(40)).decode("utf-8") | |||||
| code_verifier = code_verifier.replace("=", "").replace("+", "-").replace("/", "_") | |||||
| code_challenge_hash = hashlib.sha256(code_verifier.encode("utf-8")).digest() | |||||
| code_challenge = base64.urlsafe_b64encode(code_challenge_hash).decode("utf-8") | |||||
| code_challenge = code_challenge.replace("=", "").replace("+", "-").replace("/", "_") | |||||
| return code_verifier, code_challenge | |||||
| def _create_secure_redis_state(state_data: OAuthCallbackState) -> str: | |||||
| """Create a secure state parameter by storing state data in Redis and returning a random state key.""" | |||||
| # Generate a secure random state key | |||||
| state_key = secrets.token_urlsafe(32) | |||||
| # Store the state data in Redis with expiration | |||||
| redis_key = f"{OAUTH_STATE_REDIS_KEY_PREFIX}{state_key}" | |||||
| redis_client.setex(redis_key, OAUTH_STATE_EXPIRY_SECONDS, state_data.model_dump_json()) | |||||
| return state_key | |||||
| def _retrieve_redis_state(state_key: str) -> OAuthCallbackState: | |||||
| """Retrieve and decode OAuth state data from Redis using the state key, then delete it.""" | |||||
| redis_key = f"{OAUTH_STATE_REDIS_KEY_PREFIX}{state_key}" | |||||
| # Get state data from Redis | |||||
| state_data = redis_client.get(redis_key) | |||||
| if not state_data: | |||||
| raise ValueError("State parameter has expired or does not exist") | |||||
| # Delete the state data from Redis immediately after retrieval to prevent reuse | |||||
| redis_client.delete(redis_key) | |||||
| try: | |||||
| # Parse and validate the state data | |||||
| oauth_state = OAuthCallbackState.model_validate_json(state_data) | |||||
| return oauth_state | |||||
| except ValidationError as e: | |||||
| raise ValueError(f"Invalid state parameter: {str(e)}") | |||||
| def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackState: | |||||
| """Handle the callback from the OAuth provider.""" | |||||
| # Retrieve state data from Redis (state is automatically deleted after retrieval) | |||||
| full_state_data = _retrieve_redis_state(state_key) | |||||
| tokens = exchange_authorization( | |||||
| full_state_data.server_url, | |||||
| full_state_data.metadata, | |||||
| full_state_data.client_information, | |||||
| authorization_code, | |||||
| full_state_data.code_verifier, | |||||
| full_state_data.redirect_uri, | |||||
| ) | |||||
| provider = OAuthClientProvider(full_state_data.provider_id, full_state_data.tenant_id, for_list=True) | |||||
| provider.save_tokens(tokens) | |||||
| return full_state_data | |||||
| def discover_oauth_metadata(server_url: str, protocol_version: Optional[str] = None) -> Optional[OAuthMetadata]: | |||||
| """Looks up RFC 8414 OAuth 2.0 Authorization Server Metadata.""" | |||||
| url = urljoin(server_url, "/.well-known/oauth-authorization-server") | |||||
| try: | |||||
| headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION} | |||||
| response = requests.get(url, headers=headers) | |||||
| if response.status_code == 404: | |||||
| return None | |||||
| if not response.ok: | |||||
| raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") | |||||
| return OAuthMetadata.model_validate(response.json()) | |||||
| except requests.RequestException as e: | |||||
| if isinstance(e, requests.ConnectionError): | |||||
| response = requests.get(url) | |||||
| if response.status_code == 404: | |||||
| return None | |||||
| if not response.ok: | |||||
| raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") | |||||
| return OAuthMetadata.model_validate(response.json()) | |||||
| raise | |||||
| def start_authorization( | |||||
| server_url: str, | |||||
| metadata: Optional[OAuthMetadata], | |||||
| client_information: OAuthClientInformation, | |||||
| redirect_url: str, | |||||
| provider_id: str, | |||||
| tenant_id: str, | |||||
| ) -> tuple[str, str]: | |||||
| """Begins the authorization flow with secure Redis state storage.""" | |||||
| response_type = "code" | |||||
| code_challenge_method = "S256" | |||||
| if metadata: | |||||
| authorization_url = metadata.authorization_endpoint | |||||
| if response_type not in metadata.response_types_supported: | |||||
| raise ValueError(f"Incompatible auth server: does not support response type {response_type}") | |||||
| if ( | |||||
| not metadata.code_challenge_methods_supported | |||||
| or code_challenge_method not in metadata.code_challenge_methods_supported | |||||
| ): | |||||
| raise ValueError( | |||||
| f"Incompatible auth server: does not support code challenge method {code_challenge_method}" | |||||
| ) | |||||
| else: | |||||
| authorization_url = urljoin(server_url, "/authorize") | |||||
| code_verifier, code_challenge = generate_pkce_challenge() | |||||
| # Prepare state data with all necessary information | |||||
| state_data = OAuthCallbackState( | |||||
| provider_id=provider_id, | |||||
| tenant_id=tenant_id, | |||||
| server_url=server_url, | |||||
| metadata=metadata, | |||||
| client_information=client_information, | |||||
| code_verifier=code_verifier, | |||||
| redirect_uri=redirect_url, | |||||
| ) | |||||
| # Store state data in Redis and generate secure state key | |||||
| state_key = _create_secure_redis_state(state_data) | |||||
| params = { | |||||
| "response_type": response_type, | |||||
| "client_id": client_information.client_id, | |||||
| "code_challenge": code_challenge, | |||||
| "code_challenge_method": code_challenge_method, | |||||
| "redirect_uri": redirect_url, | |||||
| "state": state_key, | |||||
| } | |||||
| authorization_url = f"{authorization_url}?{urllib.parse.urlencode(params)}" | |||||
| return authorization_url, code_verifier | |||||
| def exchange_authorization( | |||||
| server_url: str, | |||||
| metadata: Optional[OAuthMetadata], | |||||
| client_information: OAuthClientInformation, | |||||
| authorization_code: str, | |||||
| code_verifier: str, | |||||
| redirect_uri: str, | |||||
| ) -> OAuthTokens: | |||||
| """Exchanges an authorization code for an access token.""" | |||||
| grant_type = "authorization_code" | |||||
| if metadata: | |||||
| token_url = metadata.token_endpoint | |||||
| if metadata.grant_types_supported and grant_type not in metadata.grant_types_supported: | |||||
| raise ValueError(f"Incompatible auth server: does not support grant type {grant_type}") | |||||
| else: | |||||
| token_url = urljoin(server_url, "/token") | |||||
| params = { | |||||
| "grant_type": grant_type, | |||||
| "client_id": client_information.client_id, | |||||
| "code": authorization_code, | |||||
| "code_verifier": code_verifier, | |||||
| "redirect_uri": redirect_uri, | |||||
| } | |||||
| if client_information.client_secret: | |||||
| params["client_secret"] = client_information.client_secret | |||||
| response = requests.post(token_url, data=params) | |||||
| if not response.ok: | |||||
| raise ValueError(f"Token exchange failed: HTTP {response.status_code}") | |||||
| return OAuthTokens.model_validate(response.json()) | |||||
| def refresh_authorization( | |||||
| server_url: str, | |||||
| metadata: Optional[OAuthMetadata], | |||||
| client_information: OAuthClientInformation, | |||||
| refresh_token: str, | |||||
| ) -> OAuthTokens: | |||||
| """Exchange a refresh token for an updated access token.""" | |||||
| grant_type = "refresh_token" | |||||
| if metadata: | |||||
| token_url = metadata.token_endpoint | |||||
| if metadata.grant_types_supported and grant_type not in metadata.grant_types_supported: | |||||
| raise ValueError(f"Incompatible auth server: does not support grant type {grant_type}") | |||||
| else: | |||||
| token_url = urljoin(server_url, "/token") | |||||
| params = { | |||||
| "grant_type": grant_type, | |||||
| "client_id": client_information.client_id, | |||||
| "refresh_token": refresh_token, | |||||
| } | |||||
| if client_information.client_secret: | |||||
| params["client_secret"] = client_information.client_secret | |||||
| response = requests.post(token_url, data=params) | |||||
| if not response.ok: | |||||
| raise ValueError(f"Token refresh failed: HTTP {response.status_code}") | |||||
| return OAuthTokens.parse_obj(response.json()) | |||||
| def register_client( | |||||
| server_url: str, | |||||
| metadata: Optional[OAuthMetadata], | |||||
| client_metadata: OAuthClientMetadata, | |||||
| ) -> OAuthClientInformationFull: | |||||
| """Performs OAuth 2.0 Dynamic Client Registration.""" | |||||
| if metadata: | |||||
| if not metadata.registration_endpoint: | |||||
| raise ValueError("Incompatible auth server: does not support dynamic client registration") | |||||
| registration_url = metadata.registration_endpoint | |||||
| else: | |||||
| registration_url = urljoin(server_url, "/register") | |||||
| response = requests.post( | |||||
| registration_url, | |||||
| json=client_metadata.model_dump(), | |||||
| headers={"Content-Type": "application/json"}, | |||||
| ) | |||||
| if not response.ok: | |||||
| response.raise_for_status() | |||||
| return OAuthClientInformationFull.model_validate(response.json()) | |||||
| def auth( | |||||
| provider: OAuthClientProvider, | |||||
| server_url: str, | |||||
| authorization_code: Optional[str] = None, | |||||
| state_param: Optional[str] = None, | |||||
| for_list: bool = False, | |||||
| ) -> dict[str, str]: | |||||
| """Orchestrates the full auth flow with a server using secure Redis state storage.""" | |||||
| metadata = discover_oauth_metadata(server_url) | |||||
| # Handle client registration if needed | |||||
| client_information = provider.client_information() | |||||
| if not client_information: | |||||
| if authorization_code is not None: | |||||
| raise ValueError("Existing OAuth client information is required when exchanging an authorization code") | |||||
| try: | |||||
| full_information = register_client(server_url, metadata, provider.client_metadata) | |||||
| except requests.RequestException as e: | |||||
| raise ValueError(f"Could not register OAuth client: {e}") | |||||
| provider.save_client_information(full_information) | |||||
| client_information = full_information | |||||
| # Exchange authorization code for tokens | |||||
| if authorization_code is not None: | |||||
| if not state_param: | |||||
| raise ValueError("State parameter is required when exchanging authorization code") | |||||
| try: | |||||
| # Retrieve state data from Redis using state key | |||||
| full_state_data = _retrieve_redis_state(state_param) | |||||
| code_verifier = full_state_data.code_verifier | |||||
| redirect_uri = full_state_data.redirect_uri | |||||
| if not code_verifier or not redirect_uri: | |||||
| raise ValueError("Missing code_verifier or redirect_uri in state data") | |||||
| except (json.JSONDecodeError, ValueError) as e: | |||||
| raise ValueError(f"Invalid state parameter: {e}") | |||||
| tokens = exchange_authorization( | |||||
| server_url, | |||||
| metadata, | |||||
| client_information, | |||||
| authorization_code, | |||||
| code_verifier, | |||||
| redirect_uri, | |||||
| ) | |||||
| provider.save_tokens(tokens) | |||||
| return {"result": "success"} | |||||
| provider_tokens = provider.tokens() | |||||
| # Handle token refresh or new authorization | |||||
| if provider_tokens and provider_tokens.refresh_token: | |||||
| try: | |||||
| new_tokens = refresh_authorization(server_url, metadata, client_information, provider_tokens.refresh_token) | |||||
| provider.save_tokens(new_tokens) | |||||
| return {"result": "success"} | |||||
| except Exception as e: | |||||
| raise ValueError(f"Could not refresh OAuth tokens: {e}") | |||||
| # Start new authorization flow | |||||
| authorization_url, code_verifier = start_authorization( | |||||
| server_url, | |||||
| metadata, | |||||
| client_information, | |||||
| provider.redirect_url, | |||||
| provider.mcp_provider.id, | |||||
| provider.mcp_provider.tenant_id, | |||||
| ) | |||||
| provider.save_code_verifier(code_verifier) | |||||
| return {"authorization_url": authorization_url} | 
| from typing import Optional | |||||
| from configs import dify_config | |||||
| from core.mcp.types import ( | |||||
| OAuthClientInformation, | |||||
| OAuthClientInformationFull, | |||||
| OAuthClientMetadata, | |||||
| OAuthTokens, | |||||
| ) | |||||
| from models.tools import MCPToolProvider | |||||
| from services.tools.mcp_tools_mange_service import MCPToolManageService | |||||
| LATEST_PROTOCOL_VERSION = "1.0" | |||||
| class OAuthClientProvider: | |||||
| mcp_provider: MCPToolProvider | |||||
| def __init__(self, provider_id: str, tenant_id: str, for_list: bool = False): | |||||
| if for_list: | |||||
| self.mcp_provider = MCPToolManageService.get_mcp_provider_by_provider_id(provider_id, tenant_id) | |||||
| else: | |||||
| self.mcp_provider = MCPToolManageService.get_mcp_provider_by_server_identifier(provider_id, tenant_id) | |||||
| @property | |||||
| def redirect_url(self) -> str: | |||||
| """The URL to redirect the user agent to after authorization.""" | |||||
| return dify_config.CONSOLE_API_URL + "/console/api/mcp/oauth/callback" | |||||
| @property | |||||
| def client_metadata(self) -> OAuthClientMetadata: | |||||
| """Metadata about this OAuth client.""" | |||||
| return OAuthClientMetadata( | |||||
| redirect_uris=[self.redirect_url], | |||||
| token_endpoint_auth_method="none", | |||||
| grant_types=["authorization_code", "refresh_token"], | |||||
| response_types=["code"], | |||||
| client_name="Dify", | |||||
| client_uri="https://github.com/langgenius/dify", | |||||
| ) | |||||
| def client_information(self) -> Optional[OAuthClientInformation]: | |||||
| """Loads information about this OAuth client.""" | |||||
| client_information = self.mcp_provider.decrypted_credentials.get("client_information", {}) | |||||
| if not client_information: | |||||
| return None | |||||
| return OAuthClientInformation.model_validate(client_information) | |||||
| def save_client_information(self, client_information: OAuthClientInformationFull) -> None: | |||||
| """Saves client information after dynamic registration.""" | |||||
| MCPToolManageService.update_mcp_provider_credentials( | |||||
| self.mcp_provider, | |||||
| {"client_information": client_information.model_dump()}, | |||||
| ) | |||||
| def tokens(self) -> Optional[OAuthTokens]: | |||||
| """Loads any existing OAuth tokens for the current session.""" | |||||
| credentials = self.mcp_provider.decrypted_credentials | |||||
| if not credentials: | |||||
| return None | |||||
| return OAuthTokens( | |||||
| access_token=credentials.get("access_token", ""), | |||||
| token_type=credentials.get("token_type", "Bearer"), | |||||
| expires_in=int(credentials.get("expires_in", "3600") or 3600), | |||||
| refresh_token=credentials.get("refresh_token", ""), | |||||
| ) | |||||
| def save_tokens(self, tokens: OAuthTokens) -> None: | |||||
| """Stores new OAuth tokens for the current session.""" | |||||
| # update mcp provider credentials | |||||
| token_dict = tokens.model_dump() | |||||
| MCPToolManageService.update_mcp_provider_credentials(self.mcp_provider, token_dict, authed=True) | |||||
| def save_code_verifier(self, code_verifier: str) -> None: | |||||
| """Saves a PKCE code verifier for the current session.""" | |||||
| MCPToolManageService.update_mcp_provider_credentials(self.mcp_provider, {"code_verifier": code_verifier}) | |||||
| def code_verifier(self) -> str: | |||||
| """Loads the PKCE code verifier for the current session.""" | |||||
| # get code verifier from mcp provider credentials | |||||
| return str(self.mcp_provider.decrypted_credentials.get("code_verifier", "")) | 
| import logging | |||||
| import queue | |||||
| from collections.abc import Generator | |||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from contextlib import contextmanager | |||||
| from typing import Any, TypeAlias, final | |||||
| from urllib.parse import urljoin, urlparse | |||||
| import httpx | |||||
| from sseclient import SSEClient | |||||
| from core.mcp import types | |||||
| from core.mcp.error import MCPAuthError, MCPConnectionError | |||||
| from core.mcp.types import SessionMessage | |||||
| from core.mcp.utils import create_ssrf_proxy_mcp_http_client, ssrf_proxy_sse_connect | |||||
| logger = logging.getLogger(__name__) | |||||
| DEFAULT_QUEUE_READ_TIMEOUT = 3 | |||||
| @final | |||||
| class _StatusReady: | |||||
| def __init__(self, endpoint_url: str): | |||||
| self._endpoint_url = endpoint_url | |||||
| @final | |||||
| class _StatusError: | |||||
| def __init__(self, exc: Exception): | |||||
| self._exc = exc | |||||
| # Type aliases for better readability | |||||
| ReadQueue: TypeAlias = queue.Queue[SessionMessage | Exception | None] | |||||
| WriteQueue: TypeAlias = queue.Queue[SessionMessage | Exception | None] | |||||
| StatusQueue: TypeAlias = queue.Queue[_StatusReady | _StatusError] | |||||
| def remove_request_params(url: str) -> str: | |||||
| """Remove request parameters from URL, keeping only the path.""" | |||||
| return urljoin(url, urlparse(url).path) | |||||
| class SSETransport: | |||||
| """SSE client transport implementation.""" | |||||
| def __init__( | |||||
| self, | |||||
| url: str, | |||||
| headers: dict[str, Any] | None = None, | |||||
| timeout: float = 5.0, | |||||
| sse_read_timeout: float = 5 * 60, | |||||
| ) -> None: | |||||
| """Initialize the SSE transport. | |||||
| Args: | |||||
| url: The SSE endpoint URL. | |||||
| headers: Optional headers to include in requests. | |||||
| timeout: HTTP timeout for regular operations. | |||||
| sse_read_timeout: Timeout for SSE read operations. | |||||
| """ | |||||
| self.url = url | |||||
| self.headers = headers or {} | |||||
| self.timeout = timeout | |||||
| self.sse_read_timeout = sse_read_timeout | |||||
| self.endpoint_url: str | None = None | |||||
| def _validate_endpoint_url(self, endpoint_url: str) -> bool: | |||||
| """Validate that the endpoint URL matches the connection origin. | |||||
| Args: | |||||
| endpoint_url: The endpoint URL to validate. | |||||
| Returns: | |||||
| True if valid, False otherwise. | |||||
| """ | |||||
| url_parsed = urlparse(self.url) | |||||
| endpoint_parsed = urlparse(endpoint_url) | |||||
| return url_parsed.netloc == endpoint_parsed.netloc and url_parsed.scheme == endpoint_parsed.scheme | |||||
| def _handle_endpoint_event(self, sse_data: str, status_queue: StatusQueue) -> None: | |||||
| """Handle an 'endpoint' SSE event. | |||||
| Args: | |||||
| sse_data: The SSE event data. | |||||
| status_queue: Queue to put status updates. | |||||
| """ | |||||
| endpoint_url = urljoin(self.url, sse_data) | |||||
| logger.info(f"Received endpoint URL: {endpoint_url}") | |||||
| if not self._validate_endpoint_url(endpoint_url): | |||||
| error_msg = f"Endpoint origin does not match connection origin: {endpoint_url}" | |||||
| logger.error(error_msg) | |||||
| status_queue.put(_StatusError(ValueError(error_msg))) | |||||
| return | |||||
| status_queue.put(_StatusReady(endpoint_url)) | |||||
| def _handle_message_event(self, sse_data: str, read_queue: ReadQueue) -> None: | |||||
| """Handle a 'message' SSE event. | |||||
| Args: | |||||
| sse_data: The SSE event data. | |||||
| read_queue: Queue to put parsed messages. | |||||
| """ | |||||
| try: | |||||
| message = types.JSONRPCMessage.model_validate_json(sse_data) | |||||
| logger.debug(f"Received server message: {message}") | |||||
| session_message = SessionMessage(message) | |||||
| read_queue.put(session_message) | |||||
| except Exception as exc: | |||||
| logger.exception("Error parsing server message") | |||||
| read_queue.put(exc) | |||||
| def _handle_sse_event(self, sse, read_queue: ReadQueue, status_queue: StatusQueue) -> None: | |||||
| """Handle a single SSE event. | |||||
| Args: | |||||
| sse: The SSE event object. | |||||
| read_queue: Queue for message events. | |||||
| status_queue: Queue for status events. | |||||
| """ | |||||
| match sse.event: | |||||
| case "endpoint": | |||||
| self._handle_endpoint_event(sse.data, status_queue) | |||||
| case "message": | |||||
| self._handle_message_event(sse.data, read_queue) | |||||
| case _: | |||||
| logger.warning(f"Unknown SSE event: {sse.event}") | |||||
| def sse_reader(self, event_source, read_queue: ReadQueue, status_queue: StatusQueue) -> None: | |||||
| """Read and process SSE events. | |||||
| Args: | |||||
| event_source: The SSE event source. | |||||
| read_queue: Queue to put received messages. | |||||
| status_queue: Queue to put status updates. | |||||
| """ | |||||
| try: | |||||
| for sse in event_source.iter_sse(): | |||||
| self._handle_sse_event(sse, read_queue, status_queue) | |||||
| except httpx.ReadError as exc: | |||||
| logger.debug(f"SSE reader shutting down normally: {exc}") | |||||
| except Exception as exc: | |||||
| read_queue.put(exc) | |||||
| finally: | |||||
| read_queue.put(None) | |||||
| def _send_message(self, client: httpx.Client, endpoint_url: str, message: SessionMessage) -> None: | |||||
| """Send a single message to the server. | |||||
| Args: | |||||
| client: HTTP client to use. | |||||
| endpoint_url: The endpoint URL to send to. | |||||
| message: The message to send. | |||||
| """ | |||||
| response = client.post( | |||||
| endpoint_url, | |||||
| json=message.message.model_dump( | |||||
| by_alias=True, | |||||
| mode="json", | |||||
| exclude_none=True, | |||||
| ), | |||||
| ) | |||||
| response.raise_for_status() | |||||
| logger.debug(f"Client message sent successfully: {response.status_code}") | |||||
| def post_writer(self, client: httpx.Client, endpoint_url: str, write_queue: WriteQueue) -> None: | |||||
| """Handle writing messages to the server. | |||||
| Args: | |||||
| client: HTTP client to use. | |||||
| endpoint_url: The endpoint URL to send messages to. | |||||
| write_queue: Queue to read messages from. | |||||
| """ | |||||
| try: | |||||
| while True: | |||||
| try: | |||||
| message = write_queue.get(timeout=DEFAULT_QUEUE_READ_TIMEOUT) | |||||
| if message is None: | |||||
| break | |||||
| if isinstance(message, Exception): | |||||
| write_queue.put(message) | |||||
| continue | |||||
| self._send_message(client, endpoint_url, message) | |||||
| except queue.Empty: | |||||
| continue | |||||
| except httpx.ReadError as exc: | |||||
| logger.debug(f"Post writer shutting down normally: {exc}") | |||||
| except Exception as exc: | |||||
| logger.exception("Error writing messages") | |||||
| write_queue.put(exc) | |||||
| finally: | |||||
| write_queue.put(None) | |||||
| def _wait_for_endpoint(self, status_queue: StatusQueue) -> str: | |||||
| """Wait for the endpoint URL from the status queue. | |||||
| Args: | |||||
| status_queue: Queue to read status from. | |||||
| Returns: | |||||
| The endpoint URL. | |||||
| Raises: | |||||
| ValueError: If endpoint URL is not received or there's an error. | |||||
| """ | |||||
| try: | |||||
| status = status_queue.get(timeout=1) | |||||
| except queue.Empty: | |||||
| raise ValueError("failed to get endpoint URL") | |||||
| if isinstance(status, _StatusReady): | |||||
| return status._endpoint_url | |||||
| elif isinstance(status, _StatusError): | |||||
| raise status._exc | |||||
| else: | |||||
| raise ValueError("failed to get endpoint URL") | |||||
| def connect( | |||||
| self, | |||||
| executor: ThreadPoolExecutor, | |||||
| client: httpx.Client, | |||||
| event_source, | |||||
| ) -> tuple[ReadQueue, WriteQueue]: | |||||
| """Establish connection and start worker threads. | |||||
| Args: | |||||
| executor: Thread pool executor. | |||||
| client: HTTP client. | |||||
| event_source: SSE event source. | |||||
| Returns: | |||||
| Tuple of (read_queue, write_queue). | |||||
| """ | |||||
| read_queue: ReadQueue = queue.Queue() | |||||
| write_queue: WriteQueue = queue.Queue() | |||||
| status_queue: StatusQueue = queue.Queue() | |||||
| # Start SSE reader thread | |||||
| executor.submit(self.sse_reader, event_source, read_queue, status_queue) | |||||
| # Wait for endpoint URL | |||||
| endpoint_url = self._wait_for_endpoint(status_queue) | |||||
| self.endpoint_url = endpoint_url | |||||
| # Start post writer thread | |||||
| executor.submit(self.post_writer, client, endpoint_url, write_queue) | |||||
| return read_queue, write_queue | |||||
| @contextmanager | |||||
| def sse_client( | |||||
| url: str, | |||||
| headers: dict[str, Any] | None = None, | |||||
| timeout: float = 5.0, | |||||
| sse_read_timeout: float = 5 * 60, | |||||
| ) -> Generator[tuple[ReadQueue, WriteQueue], None, None]: | |||||
| """ | |||||
| Client transport for SSE. | |||||
| `sse_read_timeout` determines how long (in seconds) the client will wait for a new | |||||
| event before disconnecting. All other HTTP operations are controlled by `timeout`. | |||||
| Args: | |||||
| url: The SSE endpoint URL. | |||||
| headers: Optional headers to include in requests. | |||||
| timeout: HTTP timeout for regular operations. | |||||
| sse_read_timeout: Timeout for SSE read operations. | |||||
| Yields: | |||||
| Tuple of (read_queue, write_queue) for message communication. | |||||
| """ | |||||
| transport = SSETransport(url, headers, timeout, sse_read_timeout) | |||||
| read_queue: ReadQueue | None = None | |||||
| write_queue: WriteQueue | None = None | |||||
| with ThreadPoolExecutor() as executor: | |||||
| try: | |||||
| with create_ssrf_proxy_mcp_http_client(headers=transport.headers) as client: | |||||
| with ssrf_proxy_sse_connect( | |||||
| url, timeout=httpx.Timeout(timeout, read=sse_read_timeout), client=client | |||||
| ) as event_source: | |||||
| event_source.response.raise_for_status() | |||||
| read_queue, write_queue = transport.connect(executor, client, event_source) | |||||
| yield read_queue, write_queue | |||||
| except httpx.HTTPStatusError as exc: | |||||
| if exc.response.status_code == 401: | |||||
| raise MCPAuthError() | |||||
| raise MCPConnectionError() | |||||
| except Exception: | |||||
| logger.exception("Error connecting to SSE endpoint") | |||||
| raise | |||||
| finally: | |||||
| # Clean up queues | |||||
| if read_queue: | |||||
| read_queue.put(None) | |||||
| if write_queue: | |||||
| write_queue.put(None) | |||||
| def send_message(http_client: httpx.Client, endpoint_url: str, session_message: SessionMessage) -> None: | |||||
| """ | |||||
| Send a message to the server using the provided HTTP client. | |||||
| Args: | |||||
| http_client: The HTTP client to use for sending | |||||
| endpoint_url: The endpoint URL to send the message to | |||||
| session_message: The message to send | |||||
| """ | |||||
| try: | |||||
| response = http_client.post( | |||||
| endpoint_url, | |||||
| json=session_message.message.model_dump( | |||||
| by_alias=True, | |||||
| mode="json", | |||||
| exclude_none=True, | |||||
| ), | |||||
| ) | |||||
| response.raise_for_status() | |||||
| logger.debug(f"Client message sent successfully: {response.status_code}") | |||||
| except Exception as exc: | |||||
| logger.exception("Error sending message") | |||||
| raise | |||||
| def read_messages( | |||||
| sse_client: SSEClient, | |||||
| ) -> Generator[SessionMessage | Exception, None, None]: | |||||
| """ | |||||
| Read messages from the SSE client. | |||||
| Args: | |||||
| sse_client: The SSE client to read from | |||||
| Yields: | |||||
| SessionMessage or Exception for each event received | |||||
| """ | |||||
| try: | |||||
| for sse in sse_client.events(): | |||||
| if sse.event == "message": | |||||
| try: | |||||
| message = types.JSONRPCMessage.model_validate_json(sse.data) | |||||
| logger.debug(f"Received server message: {message}") | |||||
| yield SessionMessage(message) | |||||
| except Exception as exc: | |||||
| logger.exception("Error parsing server message") | |||||
| yield exc | |||||
| else: | |||||
| logger.warning(f"Unknown SSE event: {sse.event}") | |||||
| except Exception as exc: | |||||
| logger.exception("Error reading SSE messages") | |||||
| yield exc | 
| """ | |||||
| StreamableHTTP Client Transport Module | |||||
| This module implements the StreamableHTTP transport for MCP clients, | |||||
| providing support for HTTP POST requests with optional SSE streaming responses | |||||
| and session management. | |||||
| """ | |||||
| import logging | |||||
| import queue | |||||
| from collections.abc import Callable, Generator | |||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from contextlib import contextmanager | |||||
| from dataclasses import dataclass | |||||
| from datetime import timedelta | |||||
| from typing import Any, cast | |||||
| import httpx | |||||
| from httpx_sse import EventSource, ServerSentEvent | |||||
| from core.mcp.types import ( | |||||
| ClientMessageMetadata, | |||||
| ErrorData, | |||||
| JSONRPCError, | |||||
| JSONRPCMessage, | |||||
| JSONRPCNotification, | |||||
| JSONRPCRequest, | |||||
| JSONRPCResponse, | |||||
| RequestId, | |||||
| SessionMessage, | |||||
| ) | |||||
| from core.mcp.utils import create_ssrf_proxy_mcp_http_client, ssrf_proxy_sse_connect | |||||
| logger = logging.getLogger(__name__) | |||||
| SessionMessageOrError = SessionMessage | Exception | None | |||||
| # Queue types with clearer names for their roles | |||||
| ServerToClientQueue = queue.Queue[SessionMessageOrError] # Server to client messages | |||||
| ClientToServerQueue = queue.Queue[SessionMessage | None] # Client to server messages | |||||
| GetSessionIdCallback = Callable[[], str | None] | |||||
| MCP_SESSION_ID = "mcp-session-id" | |||||
| LAST_EVENT_ID = "last-event-id" | |||||
| CONTENT_TYPE = "content-type" | |||||
| ACCEPT = "Accept" | |||||
| JSON = "application/json" | |||||
| SSE = "text/event-stream" | |||||
| DEFAULT_QUEUE_READ_TIMEOUT = 3 | |||||
| class StreamableHTTPError(Exception): | |||||
| """Base exception for StreamableHTTP transport errors.""" | |||||
| pass | |||||
| class ResumptionError(StreamableHTTPError): | |||||
| """Raised when resumption request is invalid.""" | |||||
| pass | |||||
| @dataclass | |||||
| class RequestContext: | |||||
| """Context for a request operation.""" | |||||
| client: httpx.Client | |||||
| headers: dict[str, str] | |||||
| session_id: str | None | |||||
| session_message: SessionMessage | |||||
| metadata: ClientMessageMetadata | None | |||||
| server_to_client_queue: ServerToClientQueue # Renamed for clarity | |||||
| sse_read_timeout: timedelta | |||||
| class StreamableHTTPTransport: | |||||
| """StreamableHTTP client transport implementation.""" | |||||
| def __init__( | |||||
| self, | |||||
| url: str, | |||||
| headers: dict[str, Any] | None = None, | |||||
| timeout: timedelta = timedelta(seconds=30), | |||||
| sse_read_timeout: timedelta = timedelta(seconds=60 * 5), | |||||
| ) -> None: | |||||
| """Initialize the StreamableHTTP transport. | |||||
| Args: | |||||
| url: The endpoint URL. | |||||
| headers: Optional headers to include in requests. | |||||
| timeout: HTTP timeout for regular operations. | |||||
| sse_read_timeout: Timeout for SSE read operations. | |||||
| """ | |||||
| self.url = url | |||||
| self.headers = headers or {} | |||||
| self.timeout = timeout | |||||
| self.sse_read_timeout = sse_read_timeout | |||||
| self.session_id: str | None = None | |||||
| self.request_headers = { | |||||
| ACCEPT: f"{JSON}, {SSE}", | |||||
| CONTENT_TYPE: JSON, | |||||
| **self.headers, | |||||
| } | |||||
| def _update_headers_with_session(self, base_headers: dict[str, str]) -> dict[str, str]: | |||||
| """Update headers with session ID if available.""" | |||||
| headers = base_headers.copy() | |||||
| if self.session_id: | |||||
| headers[MCP_SESSION_ID] = self.session_id | |||||
| return headers | |||||
| def _is_initialization_request(self, message: JSONRPCMessage) -> bool: | |||||
| """Check if the message is an initialization request.""" | |||||
| return isinstance(message.root, JSONRPCRequest) and message.root.method == "initialize" | |||||
| def _is_initialized_notification(self, message: JSONRPCMessage) -> bool: | |||||
| """Check if the message is an initialized notification.""" | |||||
| return isinstance(message.root, JSONRPCNotification) and message.root.method == "notifications/initialized" | |||||
| def _maybe_extract_session_id_from_response( | |||||
| self, | |||||
| response: httpx.Response, | |||||
| ) -> None: | |||||
| """Extract and store session ID from response headers.""" | |||||
| new_session_id = response.headers.get(MCP_SESSION_ID) | |||||
| if new_session_id: | |||||
| self.session_id = new_session_id | |||||
| logger.info(f"Received session ID: {self.session_id}") | |||||
| def _handle_sse_event( | |||||
| self, | |||||
| sse: ServerSentEvent, | |||||
| server_to_client_queue: ServerToClientQueue, | |||||
| original_request_id: RequestId | None = None, | |||||
| resumption_callback: Callable[[str], None] | None = None, | |||||
| ) -> bool: | |||||
| """Handle an SSE event, returning True if the response is complete.""" | |||||
| if sse.event == "message": | |||||
| try: | |||||
| message = JSONRPCMessage.model_validate_json(sse.data) | |||||
| logger.debug(f"SSE message: {message}") | |||||
| # If this is a response and we have original_request_id, replace it | |||||
| if original_request_id is not None and isinstance(message.root, JSONRPCResponse | JSONRPCError): | |||||
| message.root.id = original_request_id | |||||
| session_message = SessionMessage(message) | |||||
| # Put message in queue that goes to client | |||||
| server_to_client_queue.put(session_message) | |||||
| # Call resumption token callback if we have an ID | |||||
| if sse.id and resumption_callback: | |||||
| resumption_callback(sse.id) | |||||
| # If this is a response or error return True indicating completion | |||||
| # Otherwise, return False to continue listening | |||||
| return isinstance(message.root, JSONRPCResponse | JSONRPCError) | |||||
| except Exception as exc: | |||||
| # Put exception in queue that goes to client | |||||
| server_to_client_queue.put(exc) | |||||
| return False | |||||
| elif sse.event == "ping": | |||||
| logger.debug("Received ping event") | |||||
| return False | |||||
| else: | |||||
| logger.warning(f"Unknown SSE event: {sse.event}") | |||||
| return False | |||||
| def handle_get_stream( | |||||
| self, | |||||
| client: httpx.Client, | |||||
| server_to_client_queue: ServerToClientQueue, | |||||
| ) -> None: | |||||
| """Handle GET stream for server-initiated messages.""" | |||||
| try: | |||||
| if not self.session_id: | |||||
| return | |||||
| headers = self._update_headers_with_session(self.request_headers) | |||||
| with ssrf_proxy_sse_connect( | |||||
| self.url, | |||||
| headers=headers, | |||||
| timeout=httpx.Timeout(self.timeout.seconds, read=self.sse_read_timeout.seconds), | |||||
| client=client, | |||||
| method="GET", | |||||
| ) as event_source: | |||||
| event_source.response.raise_for_status() | |||||
| logger.debug("GET SSE connection established") | |||||
| for sse in event_source.iter_sse(): | |||||
| self._handle_sse_event(sse, server_to_client_queue) | |||||
| except Exception as exc: | |||||
| logger.debug(f"GET stream error (non-fatal): {exc}") | |||||
| def _handle_resumption_request(self, ctx: RequestContext) -> None: | |||||
| """Handle a resumption request using GET with SSE.""" | |||||
| headers = self._update_headers_with_session(ctx.headers) | |||||
| if ctx.metadata and ctx.metadata.resumption_token: | |||||
| headers[LAST_EVENT_ID] = ctx.metadata.resumption_token | |||||
| else: | |||||
| raise ResumptionError("Resumption request requires a resumption token") | |||||
| # Extract original request ID to map responses | |||||
| original_request_id = None | |||||
| if isinstance(ctx.session_message.message.root, JSONRPCRequest): | |||||
| original_request_id = ctx.session_message.message.root.id | |||||
| with ssrf_proxy_sse_connect( | |||||
| self.url, | |||||
| headers=headers, | |||||
| timeout=httpx.Timeout(self.timeout.seconds, read=ctx.sse_read_timeout.seconds), | |||||
| client=ctx.client, | |||||
| method="GET", | |||||
| ) as event_source: | |||||
| event_source.response.raise_for_status() | |||||
| logger.debug("Resumption GET SSE connection established") | |||||
| for sse in event_source.iter_sse(): | |||||
| is_complete = self._handle_sse_event( | |||||
| sse, | |||||
| ctx.server_to_client_queue, | |||||
| original_request_id, | |||||
| ctx.metadata.on_resumption_token_update if ctx.metadata else None, | |||||
| ) | |||||
| if is_complete: | |||||
| break | |||||
| def _handle_post_request(self, ctx: RequestContext) -> None: | |||||
| """Handle a POST request with response processing.""" | |||||
| headers = self._update_headers_with_session(ctx.headers) | |||||
| message = ctx.session_message.message | |||||
| is_initialization = self._is_initialization_request(message) | |||||
| with ctx.client.stream( | |||||
| "POST", | |||||
| self.url, | |||||
| json=message.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| headers=headers, | |||||
| ) as response: | |||||
| if response.status_code == 202: | |||||
| logger.debug("Received 202 Accepted") | |||||
| return | |||||
| if response.status_code == 404: | |||||
| if isinstance(message.root, JSONRPCRequest): | |||||
| self._send_session_terminated_error( | |||||
| ctx.server_to_client_queue, | |||||
| message.root.id, | |||||
| ) | |||||
| return | |||||
| response.raise_for_status() | |||||
| if is_initialization: | |||||
| self._maybe_extract_session_id_from_response(response) | |||||
| content_type = cast(str, response.headers.get(CONTENT_TYPE, "").lower()) | |||||
| if content_type.startswith(JSON): | |||||
| self._handle_json_response(response, ctx.server_to_client_queue) | |||||
| elif content_type.startswith(SSE): | |||||
| self._handle_sse_response(response, ctx) | |||||
| else: | |||||
| self._handle_unexpected_content_type( | |||||
| content_type, | |||||
| ctx.server_to_client_queue, | |||||
| ) | |||||
| def _handle_json_response( | |||||
| self, | |||||
| response: httpx.Response, | |||||
| server_to_client_queue: ServerToClientQueue, | |||||
| ) -> None: | |||||
| """Handle JSON response from the server.""" | |||||
| try: | |||||
| content = response.read() | |||||
| message = JSONRPCMessage.model_validate_json(content) | |||||
| session_message = SessionMessage(message) | |||||
| server_to_client_queue.put(session_message) | |||||
| except Exception as exc: | |||||
| server_to_client_queue.put(exc) | |||||
| def _handle_sse_response(self, response: httpx.Response, ctx: RequestContext) -> None: | |||||
| """Handle SSE response from the server.""" | |||||
| try: | |||||
| event_source = EventSource(response) | |||||
| for sse in event_source.iter_sse(): | |||||
| is_complete = self._handle_sse_event( | |||||
| sse, | |||||
| ctx.server_to_client_queue, | |||||
| resumption_callback=(ctx.metadata.on_resumption_token_update if ctx.metadata else None), | |||||
| ) | |||||
| if is_complete: | |||||
| break | |||||
| except Exception as e: | |||||
| ctx.server_to_client_queue.put(e) | |||||
| def _handle_unexpected_content_type( | |||||
| self, | |||||
| content_type: str, | |||||
| server_to_client_queue: ServerToClientQueue, | |||||
| ) -> None: | |||||
| """Handle unexpected content type in response.""" | |||||
| error_msg = f"Unexpected content type: {content_type}" | |||||
| logger.error(error_msg) | |||||
| server_to_client_queue.put(ValueError(error_msg)) | |||||
| def _send_session_terminated_error( | |||||
| self, | |||||
| server_to_client_queue: ServerToClientQueue, | |||||
| request_id: RequestId, | |||||
| ) -> None: | |||||
| """Send a session terminated error response.""" | |||||
| jsonrpc_error = JSONRPCError( | |||||
| jsonrpc="2.0", | |||||
| id=request_id, | |||||
| error=ErrorData(code=32600, message="Session terminated by server"), | |||||
| ) | |||||
| session_message = SessionMessage(JSONRPCMessage(jsonrpc_error)) | |||||
| server_to_client_queue.put(session_message) | |||||
| def post_writer( | |||||
| self, | |||||
| client: httpx.Client, | |||||
| client_to_server_queue: ClientToServerQueue, | |||||
| server_to_client_queue: ServerToClientQueue, | |||||
| start_get_stream: Callable[[], None], | |||||
| ) -> None: | |||||
| """Handle writing requests to the server. | |||||
| This method processes messages from the client_to_server_queue and sends them to the server. | |||||
| Responses are written to the server_to_client_queue. | |||||
| """ | |||||
| while True: | |||||
| try: | |||||
| # Read message from client queue with timeout to check stop_event periodically | |||||
| session_message = client_to_server_queue.get(timeout=DEFAULT_QUEUE_READ_TIMEOUT) | |||||
| if session_message is None: | |||||
| break | |||||
| message = session_message.message | |||||
| metadata = ( | |||||
| session_message.metadata if isinstance(session_message.metadata, ClientMessageMetadata) else None | |||||
| ) | |||||
| # Check if this is a resumption request | |||||
| is_resumption = bool(metadata and metadata.resumption_token) | |||||
| logger.debug(f"Sending client message: {message}") | |||||
| # Handle initialized notification | |||||
| if self._is_initialized_notification(message): | |||||
| start_get_stream() | |||||
| ctx = RequestContext( | |||||
| client=client, | |||||
| headers=self.request_headers, | |||||
| session_id=self.session_id, | |||||
| session_message=session_message, | |||||
| metadata=metadata, | |||||
| server_to_client_queue=server_to_client_queue, # Queue to write responses to client | |||||
| sse_read_timeout=self.sse_read_timeout, | |||||
| ) | |||||
| if is_resumption: | |||||
| self._handle_resumption_request(ctx) | |||||
| else: | |||||
| self._handle_post_request(ctx) | |||||
| except queue.Empty: | |||||
| continue | |||||
| except Exception as exc: | |||||
| server_to_client_queue.put(exc) | |||||
| def terminate_session(self, client: httpx.Client) -> None: | |||||
| """Terminate the session by sending a DELETE request.""" | |||||
| if not self.session_id: | |||||
| return | |||||
| try: | |||||
| headers = self._update_headers_with_session(self.request_headers) | |||||
| response = client.delete(self.url, headers=headers) | |||||
| if response.status_code == 405: | |||||
| logger.debug("Server does not allow session termination") | |||||
| elif response.status_code != 200: | |||||
| logger.warning(f"Session termination failed: {response.status_code}") | |||||
| except Exception as exc: | |||||
| logger.warning(f"Session termination failed: {exc}") | |||||
| def get_session_id(self) -> str | None: | |||||
| """Get the current session ID.""" | |||||
| return self.session_id | |||||
| @contextmanager | |||||
| def streamablehttp_client( | |||||
| url: str, | |||||
| headers: dict[str, Any] | None = None, | |||||
| timeout: timedelta = timedelta(seconds=30), | |||||
| sse_read_timeout: timedelta = timedelta(seconds=60 * 5), | |||||
| terminate_on_close: bool = True, | |||||
| ) -> Generator[ | |||||
| tuple[ | |||||
| ServerToClientQueue, # Queue for receiving messages FROM server | |||||
| ClientToServerQueue, # Queue for sending messages TO server | |||||
| GetSessionIdCallback, | |||||
| ], | |||||
| None, | |||||
| None, | |||||
| ]: | |||||
| """ | |||||
| Client transport for StreamableHTTP. | |||||
| `sse_read_timeout` determines how long (in seconds) the client will wait for a new | |||||
| event before disconnecting. All other HTTP operations are controlled by `timeout`. | |||||
| Yields: | |||||
| Tuple containing: | |||||
| - server_to_client_queue: Queue for reading messages FROM the server | |||||
| - client_to_server_queue: Queue for sending messages TO the server | |||||
| - get_session_id_callback: Function to retrieve the current session ID | |||||
| """ | |||||
| transport = StreamableHTTPTransport(url, headers, timeout, sse_read_timeout) | |||||
| # Create queues with clear directional meaning | |||||
| server_to_client_queue: ServerToClientQueue = queue.Queue() # For messages FROM server TO client | |||||
| client_to_server_queue: ClientToServerQueue = queue.Queue() # For messages FROM client TO server | |||||
| with ThreadPoolExecutor(max_workers=2) as executor: | |||||
| try: | |||||
| with create_ssrf_proxy_mcp_http_client( | |||||
| headers=transport.request_headers, | |||||
| timeout=httpx.Timeout(transport.timeout.seconds, read=transport.sse_read_timeout.seconds), | |||||
| ) as client: | |||||
| # Define callbacks that need access to thread pool | |||||
| def start_get_stream() -> None: | |||||
| """Start a worker thread to handle server-initiated messages.""" | |||||
| executor.submit(transport.handle_get_stream, client, server_to_client_queue) | |||||
| # Start the post_writer worker thread | |||||
| executor.submit( | |||||
| transport.post_writer, | |||||
| client, | |||||
| client_to_server_queue, # Queue for messages FROM client TO server | |||||
| server_to_client_queue, # Queue for messages FROM server TO client | |||||
| start_get_stream, | |||||
| ) | |||||
| try: | |||||
| yield ( | |||||
| server_to_client_queue, # Queue for receiving messages FROM server | |||||
| client_to_server_queue, # Queue for sending messages TO server | |||||
| transport.get_session_id, | |||||
| ) | |||||
| finally: | |||||
| if transport.session_id and terminate_on_close: | |||||
| transport.terminate_session(client) | |||||
| # Signal threads to stop | |||||
| client_to_server_queue.put(None) | |||||
| finally: | |||||
| # Clear any remaining items and add None sentinel to unblock any waiting threads | |||||
| try: | |||||
| while not client_to_server_queue.empty(): | |||||
| client_to_server_queue.get_nowait() | |||||
| except queue.Empty: | |||||
| pass | |||||
| client_to_server_queue.put(None) | |||||
| server_to_client_queue.put(None) | 
| from dataclasses import dataclass | |||||
| from typing import Any, Generic, TypeVar | |||||
| from core.mcp.session.base_session import BaseSession | |||||
| from core.mcp.types import LATEST_PROTOCOL_VERSION, RequestId, RequestParams | |||||
| SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", LATEST_PROTOCOL_VERSION] | |||||
| SessionT = TypeVar("SessionT", bound=BaseSession[Any, Any, Any, Any, Any]) | |||||
| LifespanContextT = TypeVar("LifespanContextT") | |||||
| @dataclass | |||||
| class RequestContext(Generic[SessionT, LifespanContextT]): | |||||
| request_id: RequestId | |||||
| meta: RequestParams.Meta | None | |||||
| session: SessionT | |||||
| lifespan_context: LifespanContextT | 
| class MCPError(Exception): | |||||
| pass | |||||
| class MCPConnectionError(MCPError): | |||||
| pass | |||||
| class MCPAuthError(MCPConnectionError): | |||||
| pass | 
| import logging | |||||
| from collections.abc import Callable | |||||
| from contextlib import AbstractContextManager, ExitStack | |||||
| from types import TracebackType | |||||
| from typing import Any, Optional, cast | |||||
| from urllib.parse import urlparse | |||||
| from core.mcp.client.sse_client import sse_client | |||||
| from core.mcp.client.streamable_client import streamablehttp_client | |||||
| from core.mcp.error import MCPAuthError, MCPConnectionError | |||||
| from core.mcp.session.client_session import ClientSession | |||||
| from core.mcp.types import Tool | |||||
| logger = logging.getLogger(__name__) | |||||
| class MCPClient: | |||||
| def __init__( | |||||
| self, | |||||
| server_url: str, | |||||
| provider_id: str, | |||||
| tenant_id: str, | |||||
| authed: bool = True, | |||||
| authorization_code: Optional[str] = None, | |||||
| for_list: bool = False, | |||||
| ): | |||||
| # Initialize info | |||||
| self.provider_id = provider_id | |||||
| self.tenant_id = tenant_id | |||||
| self.client_type = "streamable" | |||||
| self.server_url = server_url | |||||
| # Authentication info | |||||
| self.authed = authed | |||||
| self.authorization_code = authorization_code | |||||
| if authed: | |||||
| from core.mcp.auth.auth_provider import OAuthClientProvider | |||||
| self.provider = OAuthClientProvider(self.provider_id, self.tenant_id, for_list=for_list) | |||||
| self.token = self.provider.tokens() | |||||
| # Initialize session and client objects | |||||
| self._session: Optional[ClientSession] = None | |||||
| self._streams_context: Optional[AbstractContextManager[Any]] = None | |||||
| self._session_context: Optional[ClientSession] = None | |||||
| self.exit_stack = ExitStack() | |||||
| # Whether the client has been initialized | |||||
| self._initialized = False | |||||
| def __enter__(self): | |||||
| self._initialize() | |||||
| self._initialized = True | |||||
| return self | |||||
| def __exit__( | |||||
| self, exc_type: Optional[type], exc_value: Optional[BaseException], traceback: Optional[TracebackType] | |||||
| ): | |||||
| self.cleanup() | |||||
| def _initialize( | |||||
| self, | |||||
| ): | |||||
| """Initialize the client with fallback to SSE if streamable connection fails""" | |||||
| connection_methods: dict[str, Callable[..., AbstractContextManager[Any]]] = { | |||||
| "mcp": streamablehttp_client, | |||||
| "sse": sse_client, | |||||
| } | |||||
| parsed_url = urlparse(self.server_url) | |||||
| path = parsed_url.path | |||||
| method_name = path.rstrip("/").split("/")[-1] if path else "" | |||||
| try: | |||||
| client_factory = connection_methods[method_name] | |||||
| self.connect_server(client_factory, method_name) | |||||
| except KeyError: | |||||
| try: | |||||
| self.connect_server(sse_client, "sse") | |||||
| except MCPConnectionError: | |||||
| self.connect_server(streamablehttp_client, "mcp") | |||||
| def connect_server( | |||||
| self, client_factory: Callable[..., AbstractContextManager[Any]], method_name: str, first_try: bool = True | |||||
| ): | |||||
| from core.mcp.auth.auth_flow import auth | |||||
| try: | |||||
| headers = ( | |||||
| {"Authorization": f"{self.token.token_type.capitalize()} {self.token.access_token}"} | |||||
| if self.authed and self.token | |||||
| else {} | |||||
| ) | |||||
| self._streams_context = client_factory(url=self.server_url, headers=headers) | |||||
| if self._streams_context is None: | |||||
| raise MCPConnectionError("Failed to create connection context") | |||||
| # Use exit_stack to manage context managers properly | |||||
| if method_name == "mcp": | |||||
| read_stream, write_stream, _ = self.exit_stack.enter_context(self._streams_context) | |||||
| streams = (read_stream, write_stream) | |||||
| else: # sse_client | |||||
| streams = self.exit_stack.enter_context(self._streams_context) | |||||
| self._session_context = ClientSession(*streams) | |||||
| self._session = self.exit_stack.enter_context(self._session_context) | |||||
| session = cast(ClientSession, self._session) | |||||
| session.initialize() | |||||
| return | |||||
| except MCPAuthError: | |||||
| if not self.authed: | |||||
| raise | |||||
| try: | |||||
| auth(self.provider, self.server_url, self.authorization_code) | |||||
| except Exception as e: | |||||
| raise ValueError(f"Failed to authenticate: {e}") | |||||
| self.token = self.provider.tokens() | |||||
| if first_try: | |||||
| return self.connect_server(client_factory, method_name, first_try=False) | |||||
| except MCPConnectionError: | |||||
| raise | |||||
| def list_tools(self) -> list[Tool]: | |||||
| """Connect to an MCP server running with SSE transport""" | |||||
| # List available tools to verify connection | |||||
| if not self._initialized or not self._session: | |||||
| raise ValueError("Session not initialized.") | |||||
| response = self._session.list_tools() | |||||
| tools = response.tools | |||||
| return tools | |||||
| def invoke_tool(self, tool_name: str, tool_args: dict): | |||||
| """Call a tool""" | |||||
| if not self._initialized or not self._session: | |||||
| raise ValueError("Session not initialized.") | |||||
| return self._session.call_tool(tool_name, tool_args) | |||||
| def cleanup(self): | |||||
| """Clean up resources""" | |||||
| try: | |||||
| # ExitStack will handle proper cleanup of all managed context managers | |||||
| self.exit_stack.close() | |||||
| self._session = None | |||||
| self._session_context = None | |||||
| self._streams_context = None | |||||
| self._initialized = False | |||||
| except Exception as e: | |||||
| logging.exception("Error during cleanup") | |||||
| raise ValueError(f"Error during cleanup: {e}") | 
| import json | |||||
| import logging | |||||
| from collections.abc import Mapping | |||||
| from typing import Any, cast | |||||
| from configs import dify_config | |||||
| from controllers.web.passport import generate_session_id | |||||
| from core.app.app_config.entities import VariableEntity, VariableEntityType | |||||
| from core.app.entities.app_invoke_entities import InvokeFrom | |||||
| from core.app.features.rate_limiting.rate_limit import RateLimitGenerator | |||||
| from core.mcp import types | |||||
| from core.mcp.types import INTERNAL_ERROR, INVALID_PARAMS, METHOD_NOT_FOUND | |||||
| from core.mcp.utils import create_mcp_error_response | |||||
| from core.model_runtime.utils.encoders import jsonable_encoder | |||||
| from extensions.ext_database import db | |||||
| from models.model import App, AppMCPServer, AppMode, EndUser | |||||
| from services.app_generate_service import AppGenerateService | |||||
| """ | |||||
| Apply to MCP HTTP streamable server with stateless http | |||||
| """ | |||||
| logger = logging.getLogger(__name__) | |||||
| class MCPServerStreamableHTTPRequestHandler: | |||||
| def __init__( | |||||
| self, app: App, request: types.ClientRequest | types.ClientNotification, user_input_form: list[VariableEntity] | |||||
| ): | |||||
| self.app = app | |||||
| self.request = request | |||||
| mcp_server = db.session.query(AppMCPServer).filter(AppMCPServer.app_id == self.app.id).first() | |||||
| if not mcp_server: | |||||
| raise ValueError("MCP server not found") | |||||
| self.mcp_server: AppMCPServer = mcp_server | |||||
| self.end_user = self.retrieve_end_user() | |||||
| self.user_input_form = user_input_form | |||||
| @property | |||||
| def request_type(self): | |||||
| return type(self.request.root) | |||||
| @property | |||||
| def parameter_schema(self): | |||||
| parameters, required = self._convert_input_form_to_parameters(self.user_input_form) | |||||
| if self.app.mode in {AppMode.COMPLETION.value, AppMode.WORKFLOW.value}: | |||||
| return { | |||||
| "type": "object", | |||||
| "properties": parameters, | |||||
| "required": required, | |||||
| } | |||||
| return { | |||||
| "type": "object", | |||||
| "properties": { | |||||
| "query": {"type": "string", "description": "User Input/Question content"}, | |||||
| **parameters, | |||||
| }, | |||||
| "required": ["query", *required], | |||||
| } | |||||
| @property | |||||
| def capabilities(self): | |||||
| return types.ServerCapabilities( | |||||
| tools=types.ToolsCapability(listChanged=False), | |||||
| ) | |||||
| def response(self, response: types.Result | str): | |||||
| if isinstance(response, str): | |||||
| sse_content = f"event: ping\ndata: {response}\n\n".encode() | |||||
| yield sse_content | |||||
| return | |||||
| json_response = types.JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=(self.request.root.model_extra or {}).get("id", 1), | |||||
| result=response.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| json_data = json.dumps(jsonable_encoder(json_response)) | |||||
| sse_content = f"event: message\ndata: {json_data}\n\n".encode() | |||||
| yield sse_content | |||||
| def error_response(self, code: int, message: str, data=None): | |||||
| request_id = (self.request.root.model_extra or {}).get("id", 1) or 1 | |||||
| return create_mcp_error_response(request_id, code, message, data) | |||||
| def handle(self): | |||||
| handle_map = { | |||||
| types.InitializeRequest: self.initialize, | |||||
| types.ListToolsRequest: self.list_tools, | |||||
| types.CallToolRequest: self.invoke_tool, | |||||
| types.InitializedNotification: self.handle_notification, | |||||
| } | |||||
| try: | |||||
| if self.request_type in handle_map: | |||||
| return self.response(handle_map[self.request_type]()) | |||||
| else: | |||||
| return self.error_response(METHOD_NOT_FOUND, f"Method not found: {self.request_type}") | |||||
| except ValueError as e: | |||||
| logger.exception("Invalid params") | |||||
| return self.error_response(INVALID_PARAMS, str(e)) | |||||
| except Exception as e: | |||||
| logger.exception("Internal server error") | |||||
| return self.error_response(INTERNAL_ERROR, f"Internal server error: {str(e)}") | |||||
| def handle_notification(self): | |||||
| return "ping" | |||||
| def initialize(self): | |||||
| request = cast(types.InitializeRequest, self.request.root) | |||||
| client_info = request.params.clientInfo | |||||
| clinet_name = f"{client_info.name}@{client_info.version}" | |||||
| if not self.end_user: | |||||
| end_user = EndUser( | |||||
| tenant_id=self.app.tenant_id, | |||||
| app_id=self.app.id, | |||||
| type="mcp", | |||||
| name=clinet_name, | |||||
| session_id=generate_session_id(), | |||||
| external_user_id=self.mcp_server.id, | |||||
| ) | |||||
| db.session.add(end_user) | |||||
| db.session.commit() | |||||
| return types.InitializeResult( | |||||
| protocolVersion=types.SERVER_LATEST_PROTOCOL_VERSION, | |||||
| capabilities=self.capabilities, | |||||
| serverInfo=types.Implementation(name="Dify", version=dify_config.project.version), | |||||
| instructions=self.mcp_server.description, | |||||
| ) | |||||
| def list_tools(self): | |||||
| if not self.end_user: | |||||
| raise ValueError("User not found") | |||||
| return types.ListToolsResult( | |||||
| tools=[ | |||||
| types.Tool( | |||||
| name=self.app.name, | |||||
| description=self.mcp_server.description, | |||||
| inputSchema=self.parameter_schema, | |||||
| ) | |||||
| ], | |||||
| ) | |||||
| def invoke_tool(self): | |||||
| if not self.end_user: | |||||
| raise ValueError("User not found") | |||||
| request = cast(types.CallToolRequest, self.request.root) | |||||
| args = request.params.arguments | |||||
| if not args: | |||||
| raise ValueError("No arguments provided") | |||||
| if self.app.mode in {AppMode.WORKFLOW.value}: | |||||
| args = {"inputs": args} | |||||
| elif self.app.mode in {AppMode.COMPLETION.value}: | |||||
| args = {"query": "", "inputs": args} | |||||
| else: | |||||
| args = {"query": args["query"], "inputs": {k: v for k, v in args.items() if k != "query"}} | |||||
| response = AppGenerateService.generate( | |||||
| self.app, | |||||
| self.end_user, | |||||
| args, | |||||
| InvokeFrom.SERVICE_API, | |||||
| streaming=self.app.mode == AppMode.AGENT_CHAT.value, | |||||
| ) | |||||
| answer = "" | |||||
| if isinstance(response, RateLimitGenerator): | |||||
| for item in response.generator: | |||||
| data = item | |||||
| if isinstance(data, str) and data.startswith("data: "): | |||||
| try: | |||||
| json_str = data[6:].strip() | |||||
| parsed_data = json.loads(json_str) | |||||
| if parsed_data.get("event") == "agent_thought": | |||||
| answer += parsed_data.get("thought", "") | |||||
| except json.JSONDecodeError: | |||||
| continue | |||||
| if isinstance(response, Mapping): | |||||
| if self.app.mode in { | |||||
| AppMode.ADVANCED_CHAT.value, | |||||
| AppMode.COMPLETION.value, | |||||
| AppMode.CHAT.value, | |||||
| AppMode.AGENT_CHAT.value, | |||||
| }: | |||||
| answer = response["answer"] | |||||
| elif self.app.mode in {AppMode.WORKFLOW.value}: | |||||
| answer = json.dumps(response["data"]["outputs"], ensure_ascii=False) | |||||
| else: | |||||
| raise ValueError("Invalid app mode") | |||||
| # Not support image yet | |||||
| return types.CallToolResult(content=[types.TextContent(text=answer, type="text")]) | |||||
| def retrieve_end_user(self): | |||||
| return ( | |||||
| db.session.query(EndUser) | |||||
| .filter(EndUser.external_user_id == self.mcp_server.id, EndUser.type == "mcp") | |||||
| .first() | |||||
| ) | |||||
| def _convert_input_form_to_parameters(self, user_input_form: list[VariableEntity]): | |||||
| parameters: dict[str, dict[str, Any]] = {} | |||||
| required = [] | |||||
| for item in user_input_form: | |||||
| parameters[item.variable] = {} | |||||
| if item.type in ( | |||||
| VariableEntityType.FILE, | |||||
| VariableEntityType.FILE_LIST, | |||||
| VariableEntityType.EXTERNAL_DATA_TOOL, | |||||
| ): | |||||
| continue | |||||
| if item.required: | |||||
| required.append(item.variable) | |||||
| # if the workflow republished, the parameters not changed | |||||
| # we should not raise error here | |||||
| try: | |||||
| description = self.mcp_server.parameters_dict[item.variable] | |||||
| except KeyError: | |||||
| description = "" | |||||
| parameters[item.variable]["description"] = description | |||||
| if item.type in (VariableEntityType.TEXT_INPUT, VariableEntityType.PARAGRAPH): | |||||
| parameters[item.variable]["type"] = "string" | |||||
| elif item.type == VariableEntityType.SELECT: | |||||
| parameters[item.variable]["type"] = "string" | |||||
| parameters[item.variable]["enum"] = item.options | |||||
| elif item.type == VariableEntityType.NUMBER: | |||||
| parameters[item.variable]["type"] = "float" | |||||
| return parameters, required | 
| import logging | |||||
| import queue | |||||
| from collections.abc import Callable | |||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from contextlib import ExitStack | |||||
| from datetime import timedelta | |||||
| from types import TracebackType | |||||
| from typing import Any, Generic, Self, TypeVar | |||||
| from httpx import HTTPStatusError | |||||
| from pydantic import BaseModel | |||||
| from core.mcp.error import MCPAuthError, MCPConnectionError | |||||
| from core.mcp.types import ( | |||||
| CancelledNotification, | |||||
| ClientNotification, | |||||
| ClientRequest, | |||||
| ClientResult, | |||||
| ErrorData, | |||||
| JSONRPCError, | |||||
| JSONRPCMessage, | |||||
| JSONRPCNotification, | |||||
| JSONRPCRequest, | |||||
| JSONRPCResponse, | |||||
| MessageMetadata, | |||||
| RequestId, | |||||
| RequestParams, | |||||
| ServerMessageMetadata, | |||||
| ServerNotification, | |||||
| ServerRequest, | |||||
| ServerResult, | |||||
| SessionMessage, | |||||
| ) | |||||
| SendRequestT = TypeVar("SendRequestT", ClientRequest, ServerRequest) | |||||
| SendResultT = TypeVar("SendResultT", ClientResult, ServerResult) | |||||
| SendNotificationT = TypeVar("SendNotificationT", ClientNotification, ServerNotification) | |||||
| ReceiveRequestT = TypeVar("ReceiveRequestT", ClientRequest, ServerRequest) | |||||
| ReceiveResultT = TypeVar("ReceiveResultT", bound=BaseModel) | |||||
| ReceiveNotificationT = TypeVar("ReceiveNotificationT", ClientNotification, ServerNotification) | |||||
| DEFAULT_RESPONSE_READ_TIMEOUT = 1.0 | |||||
| class RequestResponder(Generic[ReceiveRequestT, SendResultT]): | |||||
| """Handles responding to MCP requests and manages request lifecycle. | |||||
| This class MUST be used as a context manager to ensure proper cleanup and | |||||
| cancellation handling: | |||||
| Example: | |||||
| with request_responder as resp: | |||||
| resp.respond(result) | |||||
| The context manager ensures: | |||||
| 1. Proper cancellation scope setup and cleanup | |||||
| 2. Request completion tracking | |||||
| 3. Cleanup of in-flight requests | |||||
| """ | |||||
| request: ReceiveRequestT | |||||
| _session: Any | |||||
| _on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any] | |||||
| def __init__( | |||||
| self, | |||||
| request_id: RequestId, | |||||
| request_meta: RequestParams.Meta | None, | |||||
| request: ReceiveRequestT, | |||||
| session: """BaseSession[ | |||||
| SendRequestT, | |||||
| SendNotificationT, | |||||
| SendResultT, | |||||
| ReceiveRequestT, | |||||
| ReceiveNotificationT | |||||
| ]""", | |||||
| on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any], | |||||
| ) -> None: | |||||
| self.request_id = request_id | |||||
| self.request_meta = request_meta | |||||
| self.request = request | |||||
| self._session = session | |||||
| self._completed = False | |||||
| self._on_complete = on_complete | |||||
| self._entered = False # Track if we're in a context manager | |||||
| def __enter__(self) -> "RequestResponder[ReceiveRequestT, SendResultT]": | |||||
| """Enter the context manager, enabling request cancellation tracking.""" | |||||
| self._entered = True | |||||
| return self | |||||
| def __exit__( | |||||
| self, | |||||
| exc_type: type[BaseException] | None, | |||||
| exc_val: BaseException | None, | |||||
| exc_tb: TracebackType | None, | |||||
| ) -> None: | |||||
| """Exit the context manager, performing cleanup and notifying completion.""" | |||||
| try: | |||||
| if self._completed: | |||||
| self._on_complete(self) | |||||
| finally: | |||||
| self._entered = False | |||||
| def respond(self, response: SendResultT | ErrorData) -> None: | |||||
| """Send a response for this request. | |||||
| Must be called within a context manager block. | |||||
| Raises: | |||||
| RuntimeError: If not used within a context manager | |||||
| AssertionError: If request was already responded to | |||||
| """ | |||||
| if not self._entered: | |||||
| raise RuntimeError("RequestResponder must be used as a context manager") | |||||
| assert not self._completed, "Request already responded to" | |||||
| self._completed = True | |||||
| self._session._send_response(request_id=self.request_id, response=response) | |||||
| def cancel(self) -> None: | |||||
| """Cancel this request and mark it as completed.""" | |||||
| if not self._entered: | |||||
| raise RuntimeError("RequestResponder must be used as a context manager") | |||||
| self._completed = True # Mark as completed so it's removed from in_flight | |||||
| # Send an error response to indicate cancellation | |||||
| self._session._send_response( | |||||
| request_id=self.request_id, | |||||
| response=ErrorData(code=0, message="Request cancelled", data=None), | |||||
| ) | |||||
| class BaseSession( | |||||
| Generic[ | |||||
| SendRequestT, | |||||
| SendNotificationT, | |||||
| SendResultT, | |||||
| ReceiveRequestT, | |||||
| ReceiveNotificationT, | |||||
| ], | |||||
| ): | |||||
| """ | |||||
| Implements an MCP "session" on top of read/write streams, including features | |||||
| like request/response linking, notifications, and progress. | |||||
| This class is a context manager that automatically starts processing | |||||
| messages when entered. | |||||
| """ | |||||
| _response_streams: dict[RequestId, queue.Queue[JSONRPCResponse | JSONRPCError]] | |||||
| _request_id: int | |||||
| _in_flight: dict[RequestId, RequestResponder[ReceiveRequestT, SendResultT]] | |||||
| _receive_request_type: type[ReceiveRequestT] | |||||
| _receive_notification_type: type[ReceiveNotificationT] | |||||
| def __init__( | |||||
| self, | |||||
| read_stream: queue.Queue, | |||||
| write_stream: queue.Queue, | |||||
| receive_request_type: type[ReceiveRequestT], | |||||
| receive_notification_type: type[ReceiveNotificationT], | |||||
| # If none, reading will never time out | |||||
| read_timeout_seconds: timedelta | None = None, | |||||
| ) -> None: | |||||
| self._read_stream = read_stream | |||||
| self._write_stream = write_stream | |||||
| self._response_streams = {} | |||||
| self._request_id = 0 | |||||
| self._receive_request_type = receive_request_type | |||||
| self._receive_notification_type = receive_notification_type | |||||
| self._session_read_timeout_seconds = read_timeout_seconds | |||||
| self._in_flight = {} | |||||
| self._exit_stack = ExitStack() | |||||
| def __enter__(self) -> Self: | |||||
| self._executor = ThreadPoolExecutor() | |||||
| self._receiver_future = self._executor.submit(self._receive_loop) | |||||
| return self | |||||
| def check_receiver_status(self) -> None: | |||||
| if self._receiver_future.done(): | |||||
| self._receiver_future.result() | |||||
| def __exit__( | |||||
| self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None | |||||
| ) -> None: | |||||
| self._exit_stack.close() | |||||
| self._read_stream.put(None) | |||||
| self._write_stream.put(None) | |||||
| def send_request( | |||||
| self, | |||||
| request: SendRequestT, | |||||
| result_type: type[ReceiveResultT], | |||||
| request_read_timeout_seconds: timedelta | None = None, | |||||
| metadata: MessageMetadata = None, | |||||
| ) -> ReceiveResultT: | |||||
| """ | |||||
| Sends a request and wait for a response. Raises an McpError if the | |||||
| response contains an error. If a request read timeout is provided, it | |||||
| will take precedence over the session read timeout. | |||||
| Do not use this method to emit notifications! Use send_notification() | |||||
| instead. | |||||
| """ | |||||
| self.check_receiver_status() | |||||
| request_id = self._request_id | |||||
| self._request_id = request_id + 1 | |||||
| response_queue: queue.Queue[JSONRPCResponse | JSONRPCError] = queue.Queue() | |||||
| self._response_streams[request_id] = response_queue | |||||
| try: | |||||
| jsonrpc_request = JSONRPCRequest( | |||||
| jsonrpc="2.0", | |||||
| id=request_id, | |||||
| **request.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| self._write_stream.put(SessionMessage(message=JSONRPCMessage(jsonrpc_request), metadata=metadata)) | |||||
| timeout = DEFAULT_RESPONSE_READ_TIMEOUT | |||||
| if request_read_timeout_seconds is not None: | |||||
| timeout = float(request_read_timeout_seconds.total_seconds()) | |||||
| elif self._session_read_timeout_seconds is not None: | |||||
| timeout = float(self._session_read_timeout_seconds.total_seconds()) | |||||
| while True: | |||||
| try: | |||||
| response_or_error = response_queue.get(timeout=timeout) | |||||
| break | |||||
| except queue.Empty: | |||||
| self.check_receiver_status() | |||||
| continue | |||||
| if response_or_error is None: | |||||
| raise MCPConnectionError( | |||||
| ErrorData( | |||||
| code=500, | |||||
| message="No response received", | |||||
| ) | |||||
| ) | |||||
| elif isinstance(response_or_error, JSONRPCError): | |||||
| if response_or_error.error.code == 401: | |||||
| raise MCPAuthError( | |||||
| ErrorData(code=response_or_error.error.code, message=response_or_error.error.message) | |||||
| ) | |||||
| else: | |||||
| raise MCPConnectionError( | |||||
| ErrorData(code=response_or_error.error.code, message=response_or_error.error.message) | |||||
| ) | |||||
| else: | |||||
| return result_type.model_validate(response_or_error.result) | |||||
| finally: | |||||
| self._response_streams.pop(request_id, None) | |||||
| def send_notification( | |||||
| self, | |||||
| notification: SendNotificationT, | |||||
| related_request_id: RequestId | None = None, | |||||
| ) -> None: | |||||
| """ | |||||
| Emits a notification, which is a one-way message that does not expect | |||||
| a response. | |||||
| """ | |||||
| self.check_receiver_status() | |||||
| # Some transport implementations may need to set the related_request_id | |||||
| # to attribute to the notifications to the request that triggered them. | |||||
| jsonrpc_notification = JSONRPCNotification( | |||||
| jsonrpc="2.0", | |||||
| **notification.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| session_message = SessionMessage( | |||||
| message=JSONRPCMessage(jsonrpc_notification), | |||||
| metadata=ServerMessageMetadata(related_request_id=related_request_id) if related_request_id else None, | |||||
| ) | |||||
| self._write_stream.put(session_message) | |||||
| def _send_response(self, request_id: RequestId, response: SendResultT | ErrorData) -> None: | |||||
| if isinstance(response, ErrorData): | |||||
| jsonrpc_error = JSONRPCError(jsonrpc="2.0", id=request_id, error=response) | |||||
| session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_error)) | |||||
| self._write_stream.put(session_message) | |||||
| else: | |||||
| jsonrpc_response = JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=request_id, | |||||
| result=response.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| session_message = SessionMessage(message=JSONRPCMessage(jsonrpc_response)) | |||||
| self._write_stream.put(session_message) | |||||
| def _receive_loop(self) -> None: | |||||
| """ | |||||
| Main message processing loop. | |||||
| In a real synchronous implementation, this would likely run in a separate thread. | |||||
| """ | |||||
| while True: | |||||
| try: | |||||
| # Attempt to receive a message (this would be blocking in a synchronous context) | |||||
| message = self._read_stream.get(timeout=DEFAULT_RESPONSE_READ_TIMEOUT) | |||||
| if message is None: | |||||
| break | |||||
| if isinstance(message, HTTPStatusError): | |||||
| response_queue = self._response_streams.get(self._request_id - 1) | |||||
| if response_queue is not None: | |||||
| response_queue.put( | |||||
| JSONRPCError( | |||||
| jsonrpc="2.0", | |||||
| id=self._request_id - 1, | |||||
| error=ErrorData(code=message.response.status_code, message=message.args[0]), | |||||
| ) | |||||
| ) | |||||
| else: | |||||
| self._handle_incoming(RuntimeError(f"Received response with an unknown request ID: {message}")) | |||||
| elif isinstance(message, Exception): | |||||
| self._handle_incoming(message) | |||||
| elif isinstance(message.message.root, JSONRPCRequest): | |||||
| validated_request = self._receive_request_type.model_validate( | |||||
| message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| responder = RequestResponder( | |||||
| request_id=message.message.root.id, | |||||
| request_meta=validated_request.root.params.meta if validated_request.root.params else None, | |||||
| request=validated_request, | |||||
| session=self, | |||||
| on_complete=lambda r: self._in_flight.pop(r.request_id, None), | |||||
| ) | |||||
| self._in_flight[responder.request_id] = responder | |||||
| self._received_request(responder) | |||||
| if not responder._completed: | |||||
| self._handle_incoming(responder) | |||||
| elif isinstance(message.message.root, JSONRPCNotification): | |||||
| try: | |||||
| notification = self._receive_notification_type.model_validate( | |||||
| message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| # Handle cancellation notifications | |||||
| if isinstance(notification.root, CancelledNotification): | |||||
| cancelled_id = notification.root.params.requestId | |||||
| if cancelled_id in self._in_flight: | |||||
| self._in_flight[cancelled_id].cancel() | |||||
| else: | |||||
| self._received_notification(notification) | |||||
| self._handle_incoming(notification) | |||||
| except Exception as e: | |||||
| # For other validation errors, log and continue | |||||
| logging.warning(f"Failed to validate notification: {e}. Message was: {message.message.root}") | |||||
| else: # Response or error | |||||
| response_queue = self._response_streams.get(message.message.root.id) | |||||
| if response_queue is not None: | |||||
| response_queue.put(message.message.root) | |||||
| else: | |||||
| self._handle_incoming(RuntimeError(f"Server Error: {message}")) | |||||
| except queue.Empty: | |||||
| continue | |||||
| except Exception as e: | |||||
| logging.exception("Error in message processing loop") | |||||
| raise | |||||
| def _received_request(self, responder: RequestResponder[ReceiveRequestT, SendResultT]) -> None: | |||||
| """ | |||||
| Can be overridden by subclasses to handle a request without needing to | |||||
| listen on the message stream. | |||||
| If the request is responded to within this method, it will not be | |||||
| forwarded on to the message stream. | |||||
| """ | |||||
| pass | |||||
| def _received_notification(self, notification: ReceiveNotificationT) -> None: | |||||
| """ | |||||
| Can be overridden by subclasses to handle a notification without needing | |||||
| to listen on the message stream. | |||||
| """ | |||||
| pass | |||||
| def send_progress_notification( | |||||
| self, progress_token: str | int, progress: float, total: float | None = None | |||||
| ) -> None: | |||||
| """ | |||||
| Sends a progress notification for a request that is currently being | |||||
| processed. | |||||
| """ | |||||
| pass | |||||
| def _handle_incoming( | |||||
| self, | |||||
| req: RequestResponder[ReceiveRequestT, SendResultT] | ReceiveNotificationT | Exception, | |||||
| ) -> None: | |||||
| """A generic handler for incoming messages. Overwritten by subclasses.""" | |||||
| pass | 
| from datetime import timedelta | |||||
| from typing import Any, Protocol | |||||
| from pydantic import AnyUrl, TypeAdapter | |||||
| from configs import dify_config | |||||
| from core.mcp import types | |||||
| from core.mcp.entities import SUPPORTED_PROTOCOL_VERSIONS, RequestContext | |||||
| from core.mcp.session.base_session import BaseSession, RequestResponder | |||||
| DEFAULT_CLIENT_INFO = types.Implementation(name="Dify", version=dify_config.project.version) | |||||
| class SamplingFnT(Protocol): | |||||
| def __call__( | |||||
| self, | |||||
| context: RequestContext["ClientSession", Any], | |||||
| params: types.CreateMessageRequestParams, | |||||
| ) -> types.CreateMessageResult | types.ErrorData: ... | |||||
| class ListRootsFnT(Protocol): | |||||
| def __call__(self, context: RequestContext["ClientSession", Any]) -> types.ListRootsResult | types.ErrorData: ... | |||||
| class LoggingFnT(Protocol): | |||||
| def __call__( | |||||
| self, | |||||
| params: types.LoggingMessageNotificationParams, | |||||
| ) -> None: ... | |||||
| class MessageHandlerFnT(Protocol): | |||||
| def __call__( | |||||
| self, | |||||
| message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception, | |||||
| ) -> None: ... | |||||
| def _default_message_handler( | |||||
| message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception, | |||||
| ) -> None: | |||||
| if isinstance(message, Exception): | |||||
| raise ValueError(str(message)) | |||||
| elif isinstance(message, (types.ServerNotification | RequestResponder)): | |||||
| pass | |||||
| def _default_sampling_callback( | |||||
| context: RequestContext["ClientSession", Any], | |||||
| params: types.CreateMessageRequestParams, | |||||
| ) -> types.CreateMessageResult | types.ErrorData: | |||||
| return types.ErrorData( | |||||
| code=types.INVALID_REQUEST, | |||||
| message="Sampling not supported", | |||||
| ) | |||||
| def _default_list_roots_callback( | |||||
| context: RequestContext["ClientSession", Any], | |||||
| ) -> types.ListRootsResult | types.ErrorData: | |||||
| return types.ErrorData( | |||||
| code=types.INVALID_REQUEST, | |||||
| message="List roots not supported", | |||||
| ) | |||||
| def _default_logging_callback( | |||||
| params: types.LoggingMessageNotificationParams, | |||||
| ) -> None: | |||||
| pass | |||||
| ClientResponse: TypeAdapter[types.ClientResult | types.ErrorData] = TypeAdapter(types.ClientResult | types.ErrorData) | |||||
| class ClientSession( | |||||
| BaseSession[ | |||||
| types.ClientRequest, | |||||
| types.ClientNotification, | |||||
| types.ClientResult, | |||||
| types.ServerRequest, | |||||
| types.ServerNotification, | |||||
| ] | |||||
| ): | |||||
| def __init__( | |||||
| self, | |||||
| read_stream, | |||||
| write_stream, | |||||
| read_timeout_seconds: timedelta | None = None, | |||||
| sampling_callback: SamplingFnT | None = None, | |||||
| list_roots_callback: ListRootsFnT | None = None, | |||||
| logging_callback: LoggingFnT | None = None, | |||||
| message_handler: MessageHandlerFnT | None = None, | |||||
| client_info: types.Implementation | None = None, | |||||
| ) -> None: | |||||
| super().__init__( | |||||
| read_stream, | |||||
| write_stream, | |||||
| types.ServerRequest, | |||||
| types.ServerNotification, | |||||
| read_timeout_seconds=read_timeout_seconds, | |||||
| ) | |||||
| self._client_info = client_info or DEFAULT_CLIENT_INFO | |||||
| self._sampling_callback = sampling_callback or _default_sampling_callback | |||||
| self._list_roots_callback = list_roots_callback or _default_list_roots_callback | |||||
| self._logging_callback = logging_callback or _default_logging_callback | |||||
| self._message_handler = message_handler or _default_message_handler | |||||
| def initialize(self) -> types.InitializeResult: | |||||
| sampling = types.SamplingCapability() | |||||
| roots = types.RootsCapability( | |||||
| # TODO: Should this be based on whether we | |||||
| # _will_ send notifications, or only whether | |||||
| # they're supported? | |||||
| listChanged=True, | |||||
| ) | |||||
| result = self.send_request( | |||||
| types.ClientRequest( | |||||
| types.InitializeRequest( | |||||
| method="initialize", | |||||
| params=types.InitializeRequestParams( | |||||
| protocolVersion=types.LATEST_PROTOCOL_VERSION, | |||||
| capabilities=types.ClientCapabilities( | |||||
| sampling=sampling, | |||||
| experimental=None, | |||||
| roots=roots, | |||||
| ), | |||||
| clientInfo=self._client_info, | |||||
| ), | |||||
| ) | |||||
| ), | |||||
| types.InitializeResult, | |||||
| ) | |||||
| if result.protocolVersion not in SUPPORTED_PROTOCOL_VERSIONS: | |||||
| raise RuntimeError(f"Unsupported protocol version from the server: {result.protocolVersion}") | |||||
| self.send_notification( | |||||
| types.ClientNotification(types.InitializedNotification(method="notifications/initialized")) | |||||
| ) | |||||
| return result | |||||
| def send_ping(self) -> types.EmptyResult: | |||||
| """Send a ping request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.PingRequest( | |||||
| method="ping", | |||||
| ) | |||||
| ), | |||||
| types.EmptyResult, | |||||
| ) | |||||
| def send_progress_notification( | |||||
| self, progress_token: str | int, progress: float, total: float | None = None | |||||
| ) -> None: | |||||
| """Send a progress notification.""" | |||||
| self.send_notification( | |||||
| types.ClientNotification( | |||||
| types.ProgressNotification( | |||||
| method="notifications/progress", | |||||
| params=types.ProgressNotificationParams( | |||||
| progressToken=progress_token, | |||||
| progress=progress, | |||||
| total=total, | |||||
| ), | |||||
| ), | |||||
| ) | |||||
| ) | |||||
| def set_logging_level(self, level: types.LoggingLevel) -> types.EmptyResult: | |||||
| """Send a logging/setLevel request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.SetLevelRequest( | |||||
| method="logging/setLevel", | |||||
| params=types.SetLevelRequestParams(level=level), | |||||
| ) | |||||
| ), | |||||
| types.EmptyResult, | |||||
| ) | |||||
| def list_resources(self) -> types.ListResourcesResult: | |||||
| """Send a resources/list request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.ListResourcesRequest( | |||||
| method="resources/list", | |||||
| ) | |||||
| ), | |||||
| types.ListResourcesResult, | |||||
| ) | |||||
| def list_resource_templates(self) -> types.ListResourceTemplatesResult: | |||||
| """Send a resources/templates/list request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.ListResourceTemplatesRequest( | |||||
| method="resources/templates/list", | |||||
| ) | |||||
| ), | |||||
| types.ListResourceTemplatesResult, | |||||
| ) | |||||
| def read_resource(self, uri: AnyUrl) -> types.ReadResourceResult: | |||||
| """Send a resources/read request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.ReadResourceRequest( | |||||
| method="resources/read", | |||||
| params=types.ReadResourceRequestParams(uri=uri), | |||||
| ) | |||||
| ), | |||||
| types.ReadResourceResult, | |||||
| ) | |||||
| def subscribe_resource(self, uri: AnyUrl) -> types.EmptyResult: | |||||
| """Send a resources/subscribe request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.SubscribeRequest( | |||||
| method="resources/subscribe", | |||||
| params=types.SubscribeRequestParams(uri=uri), | |||||
| ) | |||||
| ), | |||||
| types.EmptyResult, | |||||
| ) | |||||
| def unsubscribe_resource(self, uri: AnyUrl) -> types.EmptyResult: | |||||
| """Send a resources/unsubscribe request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.UnsubscribeRequest( | |||||
| method="resources/unsubscribe", | |||||
| params=types.UnsubscribeRequestParams(uri=uri), | |||||
| ) | |||||
| ), | |||||
| types.EmptyResult, | |||||
| ) | |||||
| def call_tool( | |||||
| self, | |||||
| name: str, | |||||
| arguments: dict[str, Any] | None = None, | |||||
| read_timeout_seconds: timedelta | None = None, | |||||
| ) -> types.CallToolResult: | |||||
| """Send a tools/call request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.CallToolRequest( | |||||
| method="tools/call", | |||||
| params=types.CallToolRequestParams(name=name, arguments=arguments), | |||||
| ) | |||||
| ), | |||||
| types.CallToolResult, | |||||
| request_read_timeout_seconds=read_timeout_seconds, | |||||
| ) | |||||
| def list_prompts(self) -> types.ListPromptsResult: | |||||
| """Send a prompts/list request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.ListPromptsRequest( | |||||
| method="prompts/list", | |||||
| ) | |||||
| ), | |||||
| types.ListPromptsResult, | |||||
| ) | |||||
| def get_prompt(self, name: str, arguments: dict[str, str] | None = None) -> types.GetPromptResult: | |||||
| """Send a prompts/get request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.GetPromptRequest( | |||||
| method="prompts/get", | |||||
| params=types.GetPromptRequestParams(name=name, arguments=arguments), | |||||
| ) | |||||
| ), | |||||
| types.GetPromptResult, | |||||
| ) | |||||
| def complete( | |||||
| self, | |||||
| ref: types.ResourceReference | types.PromptReference, | |||||
| argument: dict[str, str], | |||||
| ) -> types.CompleteResult: | |||||
| """Send a completion/complete request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.CompleteRequest( | |||||
| method="completion/complete", | |||||
| params=types.CompleteRequestParams( | |||||
| ref=ref, | |||||
| argument=types.CompletionArgument(**argument), | |||||
| ), | |||||
| ) | |||||
| ), | |||||
| types.CompleteResult, | |||||
| ) | |||||
| def list_tools(self) -> types.ListToolsResult: | |||||
| """Send a tools/list request.""" | |||||
| return self.send_request( | |||||
| types.ClientRequest( | |||||
| types.ListToolsRequest( | |||||
| method="tools/list", | |||||
| ) | |||||
| ), | |||||
| types.ListToolsResult, | |||||
| ) | |||||
| def send_roots_list_changed(self) -> None: | |||||
| """Send a roots/list_changed notification.""" | |||||
| self.send_notification( | |||||
| types.ClientNotification( | |||||
| types.RootsListChangedNotification( | |||||
| method="notifications/roots/list_changed", | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| def _received_request(self, responder: RequestResponder[types.ServerRequest, types.ClientResult]) -> None: | |||||
| ctx = RequestContext[ClientSession, Any]( | |||||
| request_id=responder.request_id, | |||||
| meta=responder.request_meta, | |||||
| session=self, | |||||
| lifespan_context=None, | |||||
| ) | |||||
| match responder.request.root: | |||||
| case types.CreateMessageRequest(params=params): | |||||
| with responder: | |||||
| response = self._sampling_callback(ctx, params) | |||||
| client_response = ClientResponse.validate_python(response) | |||||
| responder.respond(client_response) | |||||
| case types.ListRootsRequest(): | |||||
| with responder: | |||||
| list_roots_response = self._list_roots_callback(ctx) | |||||
| client_response = ClientResponse.validate_python(list_roots_response) | |||||
| responder.respond(client_response) | |||||
| case types.PingRequest(): | |||||
| with responder: | |||||
| return responder.respond(types.ClientResult(root=types.EmptyResult())) | |||||
| def _handle_incoming( | |||||
| self, | |||||
| req: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception, | |||||
| ) -> None: | |||||
| """Handle incoming messages by forwarding to the message handler.""" | |||||
| self._message_handler(req) | |||||
| def _received_notification(self, notification: types.ServerNotification) -> None: | |||||
| """Handle notifications from the server.""" | |||||
| # Process specific notification types | |||||
| match notification.root: | |||||
| case types.LoggingMessageNotification(params=params): | |||||
| self._logging_callback(params) | |||||
| case _: | |||||
| pass | 
| import json | |||||
| import httpx | |||||
| from configs import dify_config | |||||
| from core.mcp.types import ErrorData, JSONRPCError | |||||
| from core.model_runtime.utils.encoders import jsonable_encoder | |||||
| HTTP_REQUEST_NODE_SSL_VERIFY = dify_config.HTTP_REQUEST_NODE_SSL_VERIFY | |||||
| STATUS_FORCELIST = [429, 500, 502, 503, 504] | |||||
| def create_ssrf_proxy_mcp_http_client( | |||||
| headers: dict[str, str] | None = None, | |||||
| timeout: httpx.Timeout | None = None, | |||||
| ) -> httpx.Client: | |||||
| """Create an HTTPX client with SSRF proxy configuration for MCP connections. | |||||
| Args: | |||||
| headers: Optional headers to include in the client | |||||
| timeout: Optional timeout configuration | |||||
| Returns: | |||||
| Configured httpx.Client with proxy settings | |||||
| """ | |||||
| if dify_config.SSRF_PROXY_ALL_URL: | |||||
| return httpx.Client( | |||||
| verify=HTTP_REQUEST_NODE_SSL_VERIFY, | |||||
| headers=headers or {}, | |||||
| timeout=timeout, | |||||
| follow_redirects=True, | |||||
| proxy=dify_config.SSRF_PROXY_ALL_URL, | |||||
| ) | |||||
| elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: | |||||
| proxy_mounts = { | |||||
| "http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY), | |||||
| "https://": httpx.HTTPTransport( | |||||
| proxy=dify_config.SSRF_PROXY_HTTPS_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY | |||||
| ), | |||||
| } | |||||
| return httpx.Client( | |||||
| verify=HTTP_REQUEST_NODE_SSL_VERIFY, | |||||
| headers=headers or {}, | |||||
| timeout=timeout, | |||||
| follow_redirects=True, | |||||
| mounts=proxy_mounts, | |||||
| ) | |||||
| else: | |||||
| return httpx.Client( | |||||
| verify=HTTP_REQUEST_NODE_SSL_VERIFY, | |||||
| headers=headers or {}, | |||||
| timeout=timeout, | |||||
| follow_redirects=True, | |||||
| ) | |||||
| def ssrf_proxy_sse_connect(url, **kwargs): | |||||
| """Connect to SSE endpoint with SSRF proxy protection. | |||||
| This function creates an SSE connection using the configured proxy settings | |||||
| to prevent SSRF attacks when connecting to external endpoints. | |||||
| Args: | |||||
| url: The SSE endpoint URL | |||||
| **kwargs: Additional arguments passed to the SSE connection | |||||
| Returns: | |||||
| EventSource object for SSE streaming | |||||
| """ | |||||
| from httpx_sse import connect_sse | |||||
| # Extract client if provided, otherwise create one | |||||
| client = kwargs.pop("client", None) | |||||
| if client is None: | |||||
| # Create client with SSRF proxy configuration | |||||
| timeout = kwargs.pop( | |||||
| "timeout", | |||||
| httpx.Timeout( | |||||
| timeout=dify_config.SSRF_DEFAULT_TIME_OUT, | |||||
| connect=dify_config.SSRF_DEFAULT_CONNECT_TIME_OUT, | |||||
| read=dify_config.SSRF_DEFAULT_READ_TIME_OUT, | |||||
| write=dify_config.SSRF_DEFAULT_WRITE_TIME_OUT, | |||||
| ), | |||||
| ) | |||||
| headers = kwargs.pop("headers", {}) | |||||
| client = create_ssrf_proxy_mcp_http_client(headers=headers, timeout=timeout) | |||||
| client_provided = False | |||||
| else: | |||||
| client_provided = True | |||||
| # Extract method if provided, default to GET | |||||
| method = kwargs.pop("method", "GET") | |||||
| try: | |||||
| return connect_sse(client, method, url, **kwargs) | |||||
| except Exception: | |||||
| # If we created the client, we need to clean it up on error | |||||
| if not client_provided: | |||||
| client.close() | |||||
| raise | |||||
| def create_mcp_error_response(request_id: int | str | None, code: int, message: str, data=None): | |||||
| """Create MCP error response""" | |||||
| error_data = ErrorData(code=code, message=message, data=data) | |||||
| json_response = JSONRPCError( | |||||
| jsonrpc="2.0", | |||||
| id=request_id or 1, | |||||
| error=error_data, | |||||
| ) | |||||
| json_data = json.dumps(jsonable_encoder(json_response)) | |||||
| sse_content = f"event: message\ndata: {json_data}\n\n".encode() | |||||
| yield sse_content | 
| # deprecated, should not use. | # deprecated, should not use. | ||||
| SYSTEM_FILES = CommonParameterType.SYSTEM_FILES.value | SYSTEM_FILES = CommonParameterType.SYSTEM_FILES.value | ||||
| # MCP object and array type parameters | |||||
| ARRAY = CommonParameterType.ARRAY.value | |||||
| OBJECT = CommonParameterType.OBJECT.value | |||||
| class MCPServerParameterType(enum.StrEnum): | |||||
| """ | |||||
| MCP server got complex parameter types | |||||
| """ | |||||
| ARRAY = "array" | |||||
| OBJECT = "object" | |||||
| class PluginParameterAutoGenerate(BaseModel): | class PluginParameterAutoGenerate(BaseModel): | ||||
| class Type(enum.StrEnum): | class Type(enum.StrEnum): | ||||
| if value and not isinstance(value, list): | if value and not isinstance(value, list): | ||||
| raise ValueError("The tools selector must be a list.") | raise ValueError("The tools selector must be a list.") | ||||
| return value | return value | ||||
| case PluginParameterType.ARRAY: | |||||
| if not isinstance(value, list): | |||||
| # Try to parse JSON string for arrays | |||||
| if isinstance(value, str): | |||||
| try: | |||||
| import json | |||||
| parsed_value = json.loads(value) | |||||
| if isinstance(parsed_value, list): | |||||
| return parsed_value | |||||
| except (json.JSONDecodeError, ValueError): | |||||
| pass | |||||
| return [value] | |||||
| return value | |||||
| case PluginParameterType.OBJECT: | |||||
| if not isinstance(value, dict): | |||||
| # Try to parse JSON string for objects | |||||
| if isinstance(value, str): | |||||
| try: | |||||
| import json | |||||
| parsed_value = json.loads(value) | |||||
| if isinstance(parsed_value, dict): | |||||
| return parsed_value | |||||
| except (json.JSONDecodeError, ValueError): | |||||
| pass | |||||
| return {} | |||||
| return value | |||||
| case _: | case _: | ||||
| return str(value) | return str(value) | ||||
| except ValueError: | except ValueError: | 
| class Meta(BaseModel): | class Meta(BaseModel): | ||||
| minimum_dify_version: Optional[str] = Field(default=None, pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$") | minimum_dify_version: Optional[str] = Field(default=None, pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$") | ||||
| version: Optional[str] = Field(default=None) | |||||
| version: str = Field(..., pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$") | version: str = Field(..., pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$") | ||||
| author: Optional[str] = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$") | author: Optional[str] = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$") | 
| plugin_unique_identifier: str | plugin_unique_identifier: str | ||||
| plugin_id: str | plugin_id: str | ||||
| declaration: AgentProviderEntityWithPlugin | declaration: AgentProviderEntityWithPlugin | ||||
| meta: PluginDeclaration.Meta | |||||
| class PluginBasicBooleanResponse(BaseModel): | class PluginBasicBooleanResponse(BaseModel): | 
| Request to invoke a tool | Request to invoke a tool | ||||
| """ | """ | ||||
| tool_type: Literal["builtin", "workflow", "api"] | |||||
| tool_type: Literal["builtin", "workflow", "api", "mcp"] | |||||
| provider: str | provider: str | ||||
| tool: str | tool: str | ||||
| tool_parameters: dict | tool_parameters: dict | 
| from typing import Literal, Optional | |||||
| from datetime import datetime | |||||
| from typing import Any, Literal, Optional | |||||
| from pydantic import BaseModel, Field, field_validator | from pydantic import BaseModel, Field, field_validator | ||||
| output_schema: Optional[dict] = None | output_schema: Optional[dict] = None | ||||
| ToolProviderTypeApiLiteral = Optional[Literal["builtin", "api", "workflow"]] | |||||
| ToolProviderTypeApiLiteral = Optional[Literal["builtin", "api", "workflow", "mcp"]] | |||||
| class ToolProviderApiEntity(BaseModel): | class ToolProviderApiEntity(BaseModel): | ||||
| plugin_unique_identifier: Optional[str] = Field(default="", description="The unique identifier of the tool") | plugin_unique_identifier: Optional[str] = Field(default="", description="The unique identifier of the tool") | ||||
| tools: list[ToolApiEntity] = Field(default_factory=list) | tools: list[ToolApiEntity] = Field(default_factory=list) | ||||
| labels: list[str] = Field(default_factory=list) | labels: list[str] = Field(default_factory=list) | ||||
| # MCP | |||||
| server_url: Optional[str] = Field(default="", description="The server url of the tool") | |||||
| updated_at: int = Field(default_factory=lambda: int(datetime.now().timestamp())) | |||||
| server_identifier: Optional[str] = Field(default="", description="The server identifier of the MCP tool") | |||||
| @field_validator("tools", mode="before") | @field_validator("tools", mode="before") | ||||
| @classmethod | @classmethod | ||||
| for parameter in tool.get("parameters"): | for parameter in tool.get("parameters"): | ||||
| if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES.value: | if parameter.get("type") == ToolParameter.ToolParameterType.SYSTEM_FILES.value: | ||||
| parameter["type"] = "files" | parameter["type"] = "files" | ||||
| if parameter.get("input_schema") is None: | |||||
| parameter.pop("input_schema", None) | |||||
| # ------------- | # ------------- | ||||
| optional_fields = self.optional_field("server_url", self.server_url) | |||||
| if self.type == ToolProviderType.MCP.value: | |||||
| optional_fields.update(self.optional_field("updated_at", self.updated_at)) | |||||
| optional_fields.update(self.optional_field("server_identifier", self.server_identifier)) | |||||
| return { | return { | ||||
| "id": self.id, | "id": self.id, | ||||
| "author": self.author, | "author": self.author, | ||||
| "allow_delete": self.allow_delete, | "allow_delete": self.allow_delete, | ||||
| "tools": tools, | "tools": tools, | ||||
| "labels": self.labels, | "labels": self.labels, | ||||
| **optional_fields, | |||||
| } | } | ||||
| def optional_field(self, key: str, value: Any) -> dict: | |||||
| """Return dict with key-value if value is truthy, empty dict otherwise.""" | |||||
| return {key: value} if value else {} | 
| from core.entities.provider_entities import ProviderConfig | from core.entities.provider_entities import ProviderConfig | ||||
| from core.plugin.entities.parameters import ( | from core.plugin.entities.parameters import ( | ||||
| MCPServerParameterType, | |||||
| PluginParameter, | PluginParameter, | ||||
| PluginParameterOption, | PluginParameterOption, | ||||
| PluginParameterType, | PluginParameterType, | ||||
| API = "api" | API = "api" | ||||
| APP = "app" | APP = "app" | ||||
| DATASET_RETRIEVAL = "dataset-retrieval" | DATASET_RETRIEVAL = "dataset-retrieval" | ||||
| MCP = "mcp" | |||||
| @classmethod | @classmethod | ||||
| def value_of(cls, value: str) -> "ToolProviderType": | def value_of(cls, value: str) -> "ToolProviderType": | ||||
| MODEL_SELECTOR = PluginParameterType.MODEL_SELECTOR.value | MODEL_SELECTOR = PluginParameterType.MODEL_SELECTOR.value | ||||
| DYNAMIC_SELECT = PluginParameterType.DYNAMIC_SELECT.value | DYNAMIC_SELECT = PluginParameterType.DYNAMIC_SELECT.value | ||||
| # MCP object and array type parameters | |||||
| ARRAY = MCPServerParameterType.ARRAY.value | |||||
| OBJECT = MCPServerParameterType.OBJECT.value | |||||
| # deprecated, should not use. | # deprecated, should not use. | ||||
| SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value | SYSTEM_FILES = PluginParameterType.SYSTEM_FILES.value | ||||
| human_description: Optional[I18nObject] = Field(default=None, description="The description presented to the user") | human_description: Optional[I18nObject] = Field(default=None, description="The description presented to the user") | ||||
| form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm") | form: ToolParameterForm = Field(..., description="The form of the parameter, schema/form/llm") | ||||
| llm_description: Optional[str] = None | llm_description: Optional[str] = None | ||||
| # MCP object and array type parameters use this field to store the schema | |||||
| input_schema: Optional[dict] = None | |||||
| @classmethod | @classmethod | ||||
| def get_simple_instance( | def get_simple_instance( | 
| import json | |||||
| from typing import Any | |||||
| from core.mcp.types import Tool as RemoteMCPTool | |||||
| from core.tools.__base.tool_provider import ToolProviderController | |||||
| from core.tools.__base.tool_runtime import ToolRuntime | |||||
| from core.tools.entities.common_entities import I18nObject | |||||
| from core.tools.entities.tool_entities import ( | |||||
| ToolDescription, | |||||
| ToolEntity, | |||||
| ToolIdentity, | |||||
| ToolProviderEntityWithPlugin, | |||||
| ToolProviderIdentity, | |||||
| ToolProviderType, | |||||
| ) | |||||
| from core.tools.mcp_tool.tool import MCPTool | |||||
| from models.tools import MCPToolProvider | |||||
| from services.tools.tools_transform_service import ToolTransformService | |||||
| class MCPToolProviderController(ToolProviderController): | |||||
| provider_id: str | |||||
| entity: ToolProviderEntityWithPlugin | |||||
| def __init__(self, entity: ToolProviderEntityWithPlugin, provider_id: str, tenant_id: str, server_url: str) -> None: | |||||
| super().__init__(entity) | |||||
| self.entity = entity | |||||
| self.tenant_id = tenant_id | |||||
| self.provider_id = provider_id | |||||
| self.server_url = server_url | |||||
| @property | |||||
| def provider_type(self) -> ToolProviderType: | |||||
| """ | |||||
| returns the type of the provider | |||||
| :return: type of the provider | |||||
| """ | |||||
| return ToolProviderType.MCP | |||||
| @classmethod | |||||
| def _from_db(cls, db_provider: MCPToolProvider) -> "MCPToolProviderController": | |||||
| """ | |||||
| from db provider | |||||
| """ | |||||
| tools = [] | |||||
| tools_data = json.loads(db_provider.tools) | |||||
| remote_mcp_tools = [RemoteMCPTool(**tool) for tool in tools_data] | |||||
| user = db_provider.load_user() | |||||
| tools = [ | |||||
| ToolEntity( | |||||
| identity=ToolIdentity( | |||||
| author=user.name if user else "Anonymous", | |||||
| name=remote_mcp_tool.name, | |||||
| label=I18nObject(en_US=remote_mcp_tool.name, zh_Hans=remote_mcp_tool.name), | |||||
| provider=db_provider.server_identifier, | |||||
| icon=db_provider.icon, | |||||
| ), | |||||
| parameters=ToolTransformService.convert_mcp_schema_to_parameter(remote_mcp_tool.inputSchema), | |||||
| description=ToolDescription( | |||||
| human=I18nObject( | |||||
| en_US=remote_mcp_tool.description or "", zh_Hans=remote_mcp_tool.description or "" | |||||
| ), | |||||
| llm=remote_mcp_tool.description or "", | |||||
| ), | |||||
| output_schema=None, | |||||
| has_runtime_parameters=len(remote_mcp_tool.inputSchema) > 0, | |||||
| ) | |||||
| for remote_mcp_tool in remote_mcp_tools | |||||
| ] | |||||
| return cls( | |||||
| entity=ToolProviderEntityWithPlugin( | |||||
| identity=ToolProviderIdentity( | |||||
| author=user.name if user else "Anonymous", | |||||
| name=db_provider.name, | |||||
| label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name), | |||||
| description=I18nObject(en_US="", zh_Hans=""), | |||||
| icon=db_provider.icon, | |||||
| ), | |||||
| plugin_id=None, | |||||
| credentials_schema=[], | |||||
| tools=tools, | |||||
| ), | |||||
| provider_id=db_provider.server_identifier or "", | |||||
| tenant_id=db_provider.tenant_id or "", | |||||
| server_url=db_provider.decrypted_server_url, | |||||
| ) | |||||
| def _validate_credentials(self, user_id: str, credentials: dict[str, Any]) -> None: | |||||
| """ | |||||
| validate the credentials of the provider | |||||
| """ | |||||
| pass | |||||
| def get_tool(self, tool_name: str) -> MCPTool: # type: ignore | |||||
| """ | |||||
| return tool with given name | |||||
| """ | |||||
| tool_entity = next( | |||||
| (tool_entity for tool_entity in self.entity.tools if tool_entity.identity.name == tool_name), None | |||||
| ) | |||||
| if not tool_entity: | |||||
| raise ValueError(f"Tool with name {tool_name} not found") | |||||
| return MCPTool( | |||||
| entity=tool_entity, | |||||
| runtime=ToolRuntime(tenant_id=self.tenant_id), | |||||
| tenant_id=self.tenant_id, | |||||
| icon=self.entity.identity.icon, | |||||
| server_url=self.server_url, | |||||
| provider_id=self.provider_id, | |||||
| ) | |||||
| def get_tools(self) -> list[MCPTool]: # type: ignore | |||||
| """ | |||||
| get all tools | |||||
| """ | |||||
| return [ | |||||
| MCPTool( | |||||
| entity=tool_entity, | |||||
| runtime=ToolRuntime(tenant_id=self.tenant_id), | |||||
| tenant_id=self.tenant_id, | |||||
| icon=self.entity.identity.icon, | |||||
| server_url=self.server_url, | |||||
| provider_id=self.provider_id, | |||||
| ) | |||||
| for tool_entity in self.entity.tools | |||||
| ] | 
| import base64 | |||||
| import json | |||||
| from collections.abc import Generator | |||||
| from typing import Any, Optional | |||||
| from core.mcp.error import MCPAuthError, MCPConnectionError | |||||
| from core.mcp.mcp_client import MCPClient | |||||
| from core.mcp.types import ImageContent, TextContent | |||||
| from core.tools.__base.tool import Tool | |||||
| from core.tools.__base.tool_runtime import ToolRuntime | |||||
| from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType | |||||
| class MCPTool(Tool): | |||||
| tenant_id: str | |||||
| icon: str | |||||
| runtime_parameters: Optional[list[ToolParameter]] | |||||
| server_url: str | |||||
| provider_id: str | |||||
| def __init__( | |||||
| self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, icon: str, server_url: str, provider_id: str | |||||
| ) -> None: | |||||
| super().__init__(entity, runtime) | |||||
| self.tenant_id = tenant_id | |||||
| self.icon = icon | |||||
| self.runtime_parameters = None | |||||
| self.server_url = server_url | |||||
| self.provider_id = provider_id | |||||
| def tool_provider_type(self) -> ToolProviderType: | |||||
| return ToolProviderType.MCP | |||||
| def _invoke( | |||||
| self, | |||||
| user_id: str, | |||||
| tool_parameters: dict[str, Any], | |||||
| conversation_id: Optional[str] = None, | |||||
| app_id: Optional[str] = None, | |||||
| message_id: Optional[str] = None, | |||||
| ) -> Generator[ToolInvokeMessage, None, None]: | |||||
| from core.tools.errors import ToolInvokeError | |||||
| try: | |||||
| with MCPClient(self.server_url, self.provider_id, self.tenant_id, authed=True) as mcp_client: | |||||
| tool_parameters = self._handle_none_parameter(tool_parameters) | |||||
| result = mcp_client.invoke_tool(tool_name=self.entity.identity.name, tool_args=tool_parameters) | |||||
| except MCPAuthError as e: | |||||
| raise ToolInvokeError("Please auth the tool first") from e | |||||
| except MCPConnectionError as e: | |||||
| raise ToolInvokeError(f"Failed to connect to MCP server: {e}") from e | |||||
| except Exception as e: | |||||
| raise ToolInvokeError(f"Failed to invoke tool: {e}") from e | |||||
| for content in result.content: | |||||
| if isinstance(content, TextContent): | |||||
| try: | |||||
| content_json = json.loads(content.text) | |||||
| if isinstance(content_json, dict): | |||||
| yield self.create_json_message(content_json) | |||||
| elif isinstance(content_json, list): | |||||
| for item in content_json: | |||||
| yield self.create_json_message(item) | |||||
| else: | |||||
| yield self.create_text_message(content.text) | |||||
| except json.JSONDecodeError: | |||||
| yield self.create_text_message(content.text) | |||||
| elif isinstance(content, ImageContent): | |||||
| yield self.create_blob_message( | |||||
| blob=base64.b64decode(content.data), meta={"mime_type": content.mimeType} | |||||
| ) | |||||
| def fork_tool_runtime(self, runtime: ToolRuntime) -> "MCPTool": | |||||
| return MCPTool( | |||||
| entity=self.entity, | |||||
| runtime=runtime, | |||||
| tenant_id=self.tenant_id, | |||||
| icon=self.icon, | |||||
| server_url=self.server_url, | |||||
| provider_id=self.provider_id, | |||||
| ) | |||||
| def _handle_none_parameter(self, parameter: dict[str, Any]) -> dict[str, Any]: | |||||
| """ | |||||
| in mcp tool invoke, if the parameter is empty, it will be set to None | |||||
| """ | |||||
| return { | |||||
| key: value | |||||
| for key, value in parameter.items() | |||||
| if value is not None and not (isinstance(value, str) and value.strip() == "") | |||||
| } | 
| from collections.abc import Generator | from collections.abc import Generator | ||||
| from os import listdir, path | from os import listdir, path | ||||
| from threading import Lock | from threading import Lock | ||||
| from typing import TYPE_CHECKING, Any, Union, cast | |||||
| from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast | |||||
| from yarl import URL | from yarl import URL | ||||
| from core.plugin.impl.tool import PluginToolManager | from core.plugin.impl.tool import PluginToolManager | ||||
| from core.tools.__base.tool_provider import ToolProviderController | from core.tools.__base.tool_provider import ToolProviderController | ||||
| from core.tools.__base.tool_runtime import ToolRuntime | from core.tools.__base.tool_runtime import ToolRuntime | ||||
| from core.tools.mcp_tool.provider import MCPToolProviderController | |||||
| from core.tools.mcp_tool.tool import MCPTool | |||||
| from core.tools.plugin_tool.provider import PluginToolProviderController | from core.tools.plugin_tool.provider import PluginToolProviderController | ||||
| from core.tools.plugin_tool.tool import PluginTool | from core.tools.plugin_tool.tool import PluginTool | ||||
| from core.tools.workflow_as_tool.provider import WorkflowToolProviderController | from core.tools.workflow_as_tool.provider import WorkflowToolProviderController | ||||
| from core.workflow.entities.variable_pool import VariablePool | |||||
| from services.tools.mcp_tools_mange_service import MCPToolManageService | |||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||
| from core.workflow.nodes.tool.entities import ToolEntity | from core.workflow.nodes.tool.entities import ToolEntity | ||||
| ) | ) | ||||
| from core.tools.workflow_as_tool.tool import WorkflowTool | from core.tools.workflow_as_tool.tool import WorkflowTool | ||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider | |||||
| from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider | |||||
| from services.tools.tools_transform_service import ToolTransformService | from services.tools.tools_transform_service import ToolTransformService | ||||
| logger = logging.getLogger(__name__) | logger = logging.getLogger(__name__) | ||||
| tenant_id: str, | tenant_id: str, | ||||
| invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, | invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, | ||||
| tool_invoke_from: ToolInvokeFrom = ToolInvokeFrom.AGENT, | tool_invoke_from: ToolInvokeFrom = ToolInvokeFrom.AGENT, | ||||
| ) -> Union[BuiltinTool, PluginTool, ApiTool, WorkflowTool]: | |||||
| ) -> Union[BuiltinTool, PluginTool, ApiTool, WorkflowTool, MCPTool]: | |||||
| """ | """ | ||||
| get the tool runtime | get the tool runtime | ||||
| raise NotImplementedError("app provider not implemented") | raise NotImplementedError("app provider not implemented") | ||||
| elif provider_type == ToolProviderType.PLUGIN: | elif provider_type == ToolProviderType.PLUGIN: | ||||
| return cls.get_plugin_provider(provider_id, tenant_id).get_tool(tool_name) | return cls.get_plugin_provider(provider_id, tenant_id).get_tool(tool_name) | ||||
| elif provider_type == ToolProviderType.MCP: | |||||
| return cls.get_mcp_provider_controller(tenant_id, provider_id).get_tool(tool_name) | |||||
| else: | else: | ||||
| raise ToolProviderNotFoundError(f"provider type {provider_type.value} not found") | raise ToolProviderNotFoundError(f"provider type {provider_type.value} not found") | ||||
| app_id: str, | app_id: str, | ||||
| agent_tool: AgentToolEntity, | agent_tool: AgentToolEntity, | ||||
| invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, | invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, | ||||
| variable_pool: Optional[VariablePool] = None, | |||||
| ) -> Tool: | ) -> Tool: | ||||
| """ | """ | ||||
| get the agent tool runtime | get the agent tool runtime | ||||
| ) | ) | ||||
| runtime_parameters = {} | runtime_parameters = {} | ||||
| parameters = tool_entity.get_merged_runtime_parameters() | parameters = tool_entity.get_merged_runtime_parameters() | ||||
| for parameter in parameters: | |||||
| # check file types | |||||
| if ( | |||||
| parameter.type | |||||
| in { | |||||
| ToolParameter.ToolParameterType.SYSTEM_FILES, | |||||
| ToolParameter.ToolParameterType.FILE, | |||||
| ToolParameter.ToolParameterType.FILES, | |||||
| } | |||||
| and parameter.required | |||||
| ): | |||||
| raise ValueError(f"file type parameter {parameter.name} not supported in agent") | |||||
| if parameter.form == ToolParameter.ToolParameterForm.FORM: | |||||
| # save tool parameter to tool entity memory | |||||
| value = parameter.init_frontend_parameter(agent_tool.tool_parameters.get(parameter.name)) | |||||
| runtime_parameters[parameter.name] = value | |||||
| runtime_parameters = cls._convert_tool_parameters_type( | |||||
| parameters, variable_pool, agent_tool.tool_parameters, typ="agent" | |||||
| ) | |||||
| # decrypt runtime parameters | # decrypt runtime parameters | ||||
| encryption_manager = ToolParameterConfigurationManager( | encryption_manager = ToolParameterConfigurationManager( | ||||
| tenant_id=tenant_id, | tenant_id=tenant_id, | ||||
| node_id: str, | node_id: str, | ||||
| workflow_tool: "ToolEntity", | workflow_tool: "ToolEntity", | ||||
| invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, | invoke_from: InvokeFrom = InvokeFrom.DEBUGGER, | ||||
| variable_pool: Optional[VariablePool] = None, | |||||
| ) -> Tool: | ) -> Tool: | ||||
| """ | """ | ||||
| get the workflow tool runtime | get the workflow tool runtime | ||||
| """ | """ | ||||
| tool_runtime = cls.get_tool_runtime( | tool_runtime = cls.get_tool_runtime( | ||||
| provider_type=workflow_tool.provider_type, | provider_type=workflow_tool.provider_type, | ||||
| provider_id=workflow_tool.provider_id, | provider_id=workflow_tool.provider_id, | ||||
| invoke_from=invoke_from, | invoke_from=invoke_from, | ||||
| tool_invoke_from=ToolInvokeFrom.WORKFLOW, | tool_invoke_from=ToolInvokeFrom.WORKFLOW, | ||||
| ) | ) | ||||
| runtime_parameters = {} | |||||
| parameters = tool_runtime.get_merged_runtime_parameters() | |||||
| for parameter in parameters: | |||||
| # save tool parameter to tool entity memory | |||||
| if parameter.form == ToolParameter.ToolParameterForm.FORM: | |||||
| value = parameter.init_frontend_parameter(workflow_tool.tool_configurations.get(parameter.name)) | |||||
| runtime_parameters[parameter.name] = value | |||||
| parameters = tool_runtime.get_merged_runtime_parameters() | |||||
| runtime_parameters = cls._convert_tool_parameters_type( | |||||
| parameters, variable_pool, workflow_tool.tool_configurations, typ="workflow" | |||||
| ) | |||||
| # decrypt runtime parameters | # decrypt runtime parameters | ||||
| encryption_manager = ToolParameterConfigurationManager( | encryption_manager = ToolParameterConfigurationManager( | ||||
| tenant_id=tenant_id, | tenant_id=tenant_id, | ||||
| filters = [] | filters = [] | ||||
| if not typ: | if not typ: | ||||
| filters.extend(["builtin", "api", "workflow"]) | |||||
| filters.extend(["builtin", "api", "workflow", "mcp"]) | |||||
| else: | else: | ||||
| filters.append(typ) | filters.append(typ) | ||||
| labels=labels.get(provider_controller.provider_id, []), | labels=labels.get(provider_controller.provider_id, []), | ||||
| ) | ) | ||||
| result_providers[f"workflow_provider.{user_provider.name}"] = user_provider | result_providers[f"workflow_provider.{user_provider.name}"] = user_provider | ||||
| if "mcp" in filters: | |||||
| mcp_providers = MCPToolManageService.retrieve_mcp_tools(tenant_id, for_list=True) | |||||
| for mcp_provider in mcp_providers: | |||||
| result_providers[f"mcp_provider.{mcp_provider.name}"] = mcp_provider | |||||
| return BuiltinToolProviderSort.sort(list(result_providers.values())) | return BuiltinToolProviderSort.sort(list(result_providers.values())) | ||||
| return controller, provider.credentials | return controller, provider.credentials | ||||
| @classmethod | |||||
| def get_mcp_provider_controller(cls, tenant_id: str, provider_id: str) -> MCPToolProviderController: | |||||
| """ | |||||
| get the api provider | |||||
| :param tenant_id: the id of the tenant | |||||
| :param provider_id: the id of the provider | |||||
| :return: the provider controller, the credentials | |||||
| """ | |||||
| provider: MCPToolProvider | None = ( | |||||
| db.session.query(MCPToolProvider) | |||||
| .filter( | |||||
| MCPToolProvider.server_identifier == provider_id, | |||||
| MCPToolProvider.tenant_id == tenant_id, | |||||
| ) | |||||
| .first() | |||||
| ) | |||||
| if provider is None: | |||||
| raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found") | |||||
| controller = MCPToolProviderController._from_db(provider) | |||||
| return controller | |||||
| @classmethod | @classmethod | ||||
| def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: | def user_get_api_provider(cls, provider: str, tenant_id: str) -> dict: | ||||
| """ | """ | ||||
| except Exception: | except Exception: | ||||
| return {"background": "#252525", "content": "\ud83d\ude01"} | return {"background": "#252525", "content": "\ud83d\ude01"} | ||||
| @classmethod | |||||
| def generate_mcp_tool_icon_url(cls, tenant_id: str, provider_id: str) -> dict[str, str] | str: | |||||
| try: | |||||
| mcp_provider: MCPToolProvider | None = ( | |||||
| db.session.query(MCPToolProvider) | |||||
| .filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == provider_id) | |||||
| .first() | |||||
| ) | |||||
| if mcp_provider is None: | |||||
| raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found") | |||||
| return mcp_provider.provider_icon | |||||
| except Exception: | |||||
| return {"background": "#252525", "content": "\ud83d\ude01"} | |||||
| @classmethod | @classmethod | ||||
| def get_tool_icon( | def get_tool_icon( | ||||
| cls, | cls, | ||||
| except Exception: | except Exception: | ||||
| return {"background": "#252525", "content": "\ud83d\ude01"} | return {"background": "#252525", "content": "\ud83d\ude01"} | ||||
| raise ValueError(f"plugin provider {provider_id} not found") | raise ValueError(f"plugin provider {provider_id} not found") | ||||
| elif provider_type == ToolProviderType.MCP: | |||||
| return cls.generate_mcp_tool_icon_url(tenant_id, provider_id) | |||||
| else: | else: | ||||
| raise ValueError(f"provider type {provider_type} not found") | raise ValueError(f"provider type {provider_type} not found") | ||||
| @classmethod | |||||
| def _convert_tool_parameters_type( | |||||
| cls, | |||||
| parameters: list[ToolParameter], | |||||
| variable_pool: Optional[VariablePool], | |||||
| tool_configurations: dict[str, Any], | |||||
| typ: Literal["agent", "workflow", "tool"] = "workflow", | |||||
| ) -> dict[str, Any]: | |||||
| """ | |||||
| Convert tool parameters type | |||||
| """ | |||||
| from core.workflow.nodes.tool.entities import ToolNodeData | |||||
| from core.workflow.nodes.tool.exc import ToolParameterError | |||||
| runtime_parameters = {} | |||||
| for parameter in parameters: | |||||
| if ( | |||||
| parameter.type | |||||
| in { | |||||
| ToolParameter.ToolParameterType.SYSTEM_FILES, | |||||
| ToolParameter.ToolParameterType.FILE, | |||||
| ToolParameter.ToolParameterType.FILES, | |||||
| } | |||||
| and parameter.required | |||||
| and typ == "agent" | |||||
| ): | |||||
| raise ValueError(f"file type parameter {parameter.name} not supported in agent") | |||||
| # save tool parameter to tool entity memory | |||||
| if parameter.form == ToolParameter.ToolParameterForm.FORM: | |||||
| if variable_pool: | |||||
| config = tool_configurations.get(parameter.name, {}) | |||||
| if not (config and isinstance(config, dict) and config.get("value") is not None): | |||||
| continue | |||||
| tool_input = ToolNodeData.ToolInput(**tool_configurations.get(parameter.name, {})) | |||||
| if tool_input.type == "variable": | |||||
| variable = variable_pool.get(tool_input.value) | |||||
| if variable is None: | |||||
| raise ToolParameterError(f"Variable {tool_input.value} does not exist") | |||||
| parameter_value = variable.value | |||||
| elif tool_input.type in {"mixed", "constant"}: | |||||
| segment_group = variable_pool.convert_template(str(tool_input.value)) | |||||
| parameter_value = segment_group.text | |||||
| else: | |||||
| raise ToolParameterError(f"Unknown tool input type '{tool_input.type}'") | |||||
| runtime_parameters[parameter.name] = parameter_value | |||||
| else: | |||||
| value = parameter.init_frontend_parameter(tool_configurations.get(parameter.name)) | |||||
| runtime_parameters[parameter.name] = value | |||||
| return runtime_parameters | |||||
| ToolManager.load_hardcoded_providers_cache() | ToolManager.load_hardcoded_providers_cache() | 
| return data | return data | ||||
| def decrypt(self, data: dict[str, str]) -> dict[str, str]: | |||||
| def decrypt(self, data: dict[str, str], use_cache: bool = True) -> dict[str, str]: | |||||
| """ | """ | ||||
| decrypt tool credentials with tenant id | decrypt tool credentials with tenant id | ||||
| return a deep copy of credentials with decrypted values | return a deep copy of credentials with decrypted values | ||||
| """ | """ | ||||
| cache = ToolProviderCredentialsCache( | |||||
| tenant_id=self.tenant_id, | |||||
| identity_id=f"{self.provider_type}.{self.provider_identity}", | |||||
| cache_type=ToolProviderCredentialsCacheType.PROVIDER, | |||||
| ) | |||||
| cached_credentials = cache.get() | |||||
| if cached_credentials: | |||||
| return cached_credentials | |||||
| if use_cache: | |||||
| cache = ToolProviderCredentialsCache( | |||||
| tenant_id=self.tenant_id, | |||||
| identity_id=f"{self.provider_type}.{self.provider_identity}", | |||||
| cache_type=ToolProviderCredentialsCacheType.PROVIDER, | |||||
| ) | |||||
| cached_credentials = cache.get() | |||||
| if cached_credentials: | |||||
| return cached_credentials | |||||
| data = self._deep_copy(data) | data = self._deep_copy(data) | ||||
| # get fields need to be decrypted | # get fields need to be decrypted | ||||
| fields = dict[str, BasicProviderConfig]() | fields = dict[str, BasicProviderConfig]() | ||||
| except Exception: | except Exception: | ||||
| pass | pass | ||||
| cache.set(data) | |||||
| if use_cache: | |||||
| cache.set(data) | |||||
| return data | return data | ||||
| def delete_tool_credentials_cache(self): | def delete_tool_credentials_cache(self): | 
| from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod | from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod | ||||
| from core.tools.__base.tool import Tool | from core.tools.__base.tool import Tool | ||||
| from core.tools.__base.tool_runtime import ToolRuntime | from core.tools.__base.tool_runtime import ToolRuntime | ||||
| from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType | |||||
| from core.tools.entities.tool_entities import ( | |||||
| ToolEntity, | |||||
| ToolInvokeMessage, | |||||
| ToolParameter, | |||||
| ToolProviderType, | |||||
| ) | |||||
| from core.tools.errors import ToolInvokeError | from core.tools.errors import ToolInvokeError | ||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from factories.file_factory import build_from_mapping | from factories.file_factory import build_from_mapping | 
| from collections.abc import Generator, Mapping, Sequence | from collections.abc import Generator, Mapping, Sequence | ||||
| from typing import Any, Optional, cast | from typing import Any, Optional, cast | ||||
| from packaging.version import Version | |||||
| from sqlalchemy import select | from sqlalchemy import select | ||||
| from sqlalchemy.orm import Session | from sqlalchemy.orm import Session | ||||
| from core.agent.entities import AgentToolEntity | from core.agent.entities import AgentToolEntity | ||||
| from core.agent.plugin_entities import AgentStrategyParameter | from core.agent.plugin_entities import AgentStrategyParameter | ||||
| from core.agent.strategy.plugin import PluginAgentStrategy | |||||
| from core.memory.token_buffer_memory import TokenBufferMemory | from core.memory.token_buffer_memory import TokenBufferMemory | ||||
| from core.model_manager import ModelInstance, ModelManager | from core.model_manager import ModelInstance, ModelManager | ||||
| from core.model_runtime.entities.model_entities import AIModelEntity, ModelType | from core.model_runtime.entities.model_entities import AIModelEntity, ModelType | ||||
| agent_parameters=agent_parameters, | agent_parameters=agent_parameters, | ||||
| variable_pool=self.graph_runtime_state.variable_pool, | variable_pool=self.graph_runtime_state.variable_pool, | ||||
| node_data=node_data, | node_data=node_data, | ||||
| strategy=strategy, | |||||
| ) | ) | ||||
| parameters_for_log = self._generate_agent_parameters( | parameters_for_log = self._generate_agent_parameters( | ||||
| agent_parameters=agent_parameters, | agent_parameters=agent_parameters, | ||||
| variable_pool=self.graph_runtime_state.variable_pool, | variable_pool=self.graph_runtime_state.variable_pool, | ||||
| node_data=node_data, | node_data=node_data, | ||||
| for_log=True, | for_log=True, | ||||
| strategy=strategy, | |||||
| ) | ) | ||||
| # get conversation id | # get conversation id | ||||
| variable_pool: VariablePool, | variable_pool: VariablePool, | ||||
| node_data: AgentNodeData, | node_data: AgentNodeData, | ||||
| for_log: bool = False, | for_log: bool = False, | ||||
| strategy: PluginAgentStrategy, | |||||
| ) -> dict[str, Any]: | ) -> dict[str, Any]: | ||||
| """ | """ | ||||
| Generate parameters based on the given tool parameters, variable pool, and node data. | Generate parameters based on the given tool parameters, variable pool, and node data. | ||||
| if parameter.type == "array[tools]": | if parameter.type == "array[tools]": | ||||
| value = cast(list[dict[str, Any]], value) | value = cast(list[dict[str, Any]], value) | ||||
| value = [tool for tool in value if tool.get("enabled", False)] | value = [tool for tool in value if tool.get("enabled", False)] | ||||
| value = self._filter_mcp_type_tool(strategy, value) | |||||
| for tool in value: | for tool in value: | ||||
| if "schemas" in tool: | if "schemas" in tool: | ||||
| tool.pop("schemas") | tool.pop("schemas") | ||||
| ) | ) | ||||
| extra = tool.get("extra", {}) | extra = tool.get("extra", {}) | ||||
| runtime_variable_pool = variable_pool if self.node_data.version != "1" else None | |||||
| tool_runtime = ToolManager.get_agent_tool_runtime( | tool_runtime = ToolManager.get_agent_tool_runtime( | ||||
| self.tenant_id, self.app_id, entity, self.invoke_from | |||||
| self.tenant_id, self.app_id, entity, self.invoke_from, runtime_variable_pool | |||||
| ) | ) | ||||
| if tool_runtime.entity.description: | if tool_runtime.entity.description: | ||||
| tool_runtime.entity.description.llm = ( | tool_runtime.entity.description.llm = ( | ||||
| except ValueError: | except ValueError: | ||||
| model_schema.features.remove(feature) | model_schema.features.remove(feature) | ||||
| return model_schema | return model_schema | ||||
| def _filter_mcp_type_tool(self, strategy: PluginAgentStrategy, tools: list[dict[str, Any]]) -> list[dict[str, Any]]: | |||||
| """ | |||||
| Filter MCP type tool | |||||
| :param strategy: plugin agent strategy | |||||
| :param tool: tool | |||||
| :return: filtered tool dict | |||||
| """ | |||||
| meta_version = strategy.meta_version | |||||
| if meta_version and Version(meta_version) > Version("0.0.1"): | |||||
| return tools | |||||
| else: | |||||
| return [tool for tool in tools if tool.get("type") != ToolProviderType.MCP.value] | 
| }, | }, | ||||
| NodeType.TOOL: { | NodeType.TOOL: { | ||||
| LATEST_VERSION: ToolNode, | LATEST_VERSION: ToolNode, | ||||
| "2": ToolNode, | |||||
| "1": ToolNode, | "1": ToolNode, | ||||
| }, | }, | ||||
| NodeType.VARIABLE_AGGREGATOR: { | NodeType.VARIABLE_AGGREGATOR: { | ||||
| }, | }, | ||||
| NodeType.AGENT: { | NodeType.AGENT: { | ||||
| LATEST_VERSION: AgentNode, | LATEST_VERSION: AgentNode, | ||||
| "2": AgentNode, | |||||
| "1": AgentNode, | "1": AgentNode, | ||||
| }, | }, | ||||
| } | } | 
| def check_type(cls, value, validation_info: ValidationInfo): | def check_type(cls, value, validation_info: ValidationInfo): | ||||
| typ = value | typ = value | ||||
| value = validation_info.data.get("value") | value = validation_info.data.get("value") | ||||
| if value is None: | |||||
| return typ | |||||
| if typ == "mixed" and not isinstance(value, str): | if typ == "mixed" and not isinstance(value, str): | ||||
| raise ValueError("value must be a string") | raise ValueError("value must be a string") | ||||
| elif typ == "variable": | elif typ == "variable": | ||||
| return typ | return typ | ||||
| tool_parameters: dict[str, ToolInput] | tool_parameters: dict[str, ToolInput] | ||||
| @field_validator("tool_parameters", mode="before") | |||||
| @classmethod | |||||
| def filter_none_tool_inputs(cls, value): | |||||
| if not isinstance(value, dict): | |||||
| return value | |||||
| return { | |||||
| key: tool_input | |||||
| for key, tool_input in value.items() | |||||
| if tool_input is not None and cls._has_valid_value(tool_input) | |||||
| } | |||||
| @staticmethod | |||||
| def _has_valid_value(tool_input): | |||||
| """Check if the value is valid""" | |||||
| if isinstance(tool_input, dict): | |||||
| return tool_input.get("value") is not None | |||||
| return getattr(tool_input, "value", None) is not None | 
| try: | try: | ||||
| from core.tools.tool_manager import ToolManager | from core.tools.tool_manager import ToolManager | ||||
| variable_pool = self.graph_runtime_state.variable_pool if self.node_data.version != "1" else None | |||||
| tool_runtime = ToolManager.get_workflow_tool_runtime( | tool_runtime = ToolManager.get_workflow_tool_runtime( | ||||
| self.tenant_id, self.app_id, self.node_id, self.node_data, self.invoke_from | |||||
| self.tenant_id, self.app_id, self.node_id, self.node_data, self.invoke_from, variable_pool | |||||
| ) | ) | ||||
| except ToolNodeError as e: | except ToolNodeError as e: | ||||
| yield RunCompletedEvent( | yield RunCompletedEvent( | ||||
| node_data=self.node_data, | node_data=self.node_data, | ||||
| for_log=True, | for_log=True, | ||||
| ) | ) | ||||
| # get conversation id | # get conversation id | ||||
| conversation_id = self.graph_runtime_state.variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID]) | conversation_id = self.graph_runtime_state.variable_pool.get(["sys", SystemVariableKey.CONVERSATION_ID]) | ||||
| from controllers.console import bp as console_app_bp | from controllers.console import bp as console_app_bp | ||||
| from controllers.files import bp as files_bp | from controllers.files import bp as files_bp | ||||
| from controllers.inner_api import bp as inner_api_bp | from controllers.inner_api import bp as inner_api_bp | ||||
| from controllers.mcp import bp as mcp_bp | |||||
| from controllers.service_api import bp as service_api_bp | from controllers.service_api import bp as service_api_bp | ||||
| from controllers.web import bp as web_bp | from controllers.web import bp as web_bp | ||||
| app.register_blueprint(files_bp) | app.register_blueprint(files_bp) | ||||
| app.register_blueprint(inner_api_bp) | app.register_blueprint(inner_api_bp) | ||||
| app.register_blueprint(mcp_bp) | 
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from libs.passport import PassportService | from libs.passport import PassportService | ||||
| from models.account import Account, Tenant, TenantAccountJoin | from models.account import Account, Tenant, TenantAccountJoin | ||||
| from models.model import EndUser | |||||
| from models.model import AppMCPServer, EndUser | |||||
| from services.account_service import AccountService | from services.account_service import AccountService | ||||
| login_manager = flask_login.LoginManager() | login_manager = flask_login.LoginManager() | ||||
| if not end_user: | if not end_user: | ||||
| raise NotFound("End user not found.") | raise NotFound("End user not found.") | ||||
| return end_user | return end_user | ||||
| elif request.blueprint == "mcp": | |||||
| server_code = request.view_args.get("server_code") if request.view_args else None | |||||
| if not server_code: | |||||
| raise Unauthorized("Invalid Authorization token.") | |||||
| app_mcp_server = db.session.query(AppMCPServer).filter(AppMCPServer.server_code == server_code).first() | |||||
| if not app_mcp_server: | |||||
| raise NotFound("App MCP server not found.") | |||||
| end_user = ( | |||||
| db.session.query(EndUser) | |||||
| .filter(EndUser.external_user_id == app_mcp_server.id, EndUser.type == "mcp") | |||||
| .first() | |||||
| ) | |||||
| if not end_user: | |||||
| raise NotFound("End user not found.") | |||||
| return end_user | |||||
| @user_logged_in.connect | @user_logged_in.connect | 
| agent_provider = manager.fetch_agent_strategy_provider(tenant_id, agent_strategy_provider_name) | agent_provider = manager.fetch_agent_strategy_provider(tenant_id, agent_strategy_provider_name) | ||||
| for agent_strategy in agent_provider.declaration.strategies: | for agent_strategy in agent_provider.declaration.strategies: | ||||
| if agent_strategy.identity.name == agent_strategy_name: | if agent_strategy.identity.name == agent_strategy_name: | ||||
| return PluginAgentStrategy(tenant_id, agent_strategy) | |||||
| return PluginAgentStrategy(tenant_id, agent_strategy, agent_provider.meta.version) | |||||
| raise ValueError(f"Agent strategy {agent_strategy_name} not found") | raise ValueError(f"Agent strategy {agent_strategy_name} not found") | 
| import json | |||||
| from flask_restful import fields | from flask_restful import fields | ||||
| from fields.workflow_fields import workflow_partial_fields | from fields.workflow_fields import workflow_partial_fields | ||||
| from libs.helper import AppIconUrlField, TimestampField | from libs.helper import AppIconUrlField, TimestampField | ||||
| class JsonStringField(fields.Raw): | |||||
| def format(self, value): | |||||
| if isinstance(value, str): | |||||
| try: | |||||
| return json.loads(value) | |||||
| except (json.JSONDecodeError, TypeError): | |||||
| return value | |||||
| return value | |||||
| app_detail_kernel_fields = { | app_detail_kernel_fields = { | ||||
| "id": fields.String, | "id": fields.String, | ||||
| "name": fields.String, | "name": fields.String, | ||||
| app_import_check_dependencies_fields = { | app_import_check_dependencies_fields = { | ||||
| "leaked_dependencies": fields.List(fields.Nested(leaked_dependency_fields)), | "leaked_dependencies": fields.List(fields.Nested(leaked_dependency_fields)), | ||||
| } | } | ||||
| app_server_fields = { | |||||
| "id": fields.String, | |||||
| "name": fields.String, | |||||
| "server_code": fields.String, | |||||
| "description": fields.String, | |||||
| "status": fields.String, | |||||
| "parameters": JsonStringField, | |||||
| "created_at": TimestampField, | |||||
| "updated_at": TimestampField, | |||||
| } | 
| """add mcp server tool and app server | |||||
| Revision ID: 58eb7bdb93fe | |||||
| Revises: 0ab65e1cc7fa | |||||
| Create Date: 2025-06-25 09:36:07.510570 | |||||
| """ | |||||
| from alembic import op | |||||
| import models as models | |||||
| import sqlalchemy as sa | |||||
| # revision identifiers, used by Alembic. | |||||
| revision = '58eb7bdb93fe' | |||||
| down_revision = '0ab65e1cc7fa' | |||||
| branch_labels = None | |||||
| depends_on = None | |||||
| def upgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| op.create_table('app_mcp_servers', | |||||
| sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), | |||||
| sa.Column('tenant_id', models.types.StringUUID(), nullable=False), | |||||
| sa.Column('app_id', models.types.StringUUID(), nullable=False), | |||||
| sa.Column('name', sa.String(length=255), nullable=False), | |||||
| sa.Column('description', sa.String(length=255), nullable=False), | |||||
| sa.Column('server_code', sa.String(length=255), nullable=False), | |||||
| sa.Column('status', sa.String(length=255), server_default=sa.text("'normal'::character varying"), nullable=False), | |||||
| sa.Column('parameters', sa.Text(), nullable=False), | |||||
| sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), | |||||
| sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), | |||||
| sa.PrimaryKeyConstraint('id', name='app_mcp_server_pkey'), | |||||
| sa.UniqueConstraint('tenant_id', 'app_id', name='unique_app_mcp_server_tenant_app_id'), | |||||
| sa.UniqueConstraint('server_code', name='unique_app_mcp_server_server_code') | |||||
| ) | |||||
| op.create_table('tool_mcp_providers', | |||||
| sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), | |||||
| sa.Column('name', sa.String(length=40), nullable=False), | |||||
| sa.Column('server_identifier', sa.String(length=24), nullable=False), | |||||
| sa.Column('server_url', sa.Text(), nullable=False), | |||||
| sa.Column('server_url_hash', sa.String(length=64), nullable=False), | |||||
| sa.Column('icon', sa.String(length=255), nullable=True), | |||||
| sa.Column('tenant_id', models.types.StringUUID(), nullable=False), | |||||
| sa.Column('user_id', models.types.StringUUID(), nullable=False), | |||||
| sa.Column('encrypted_credentials', sa.Text(), nullable=True), | |||||
| sa.Column('authed', sa.Boolean(), nullable=False), | |||||
| sa.Column('tools', sa.Text(), nullable=False), | |||||
| sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), | |||||
| sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), | |||||
| sa.PrimaryKeyConstraint('id', name='tool_mcp_provider_pkey'), | |||||
| sa.UniqueConstraint('tenant_id', 'name', name='unique_mcp_provider_name'), | |||||
| sa.UniqueConstraint('tenant_id', 'server_identifier', name='unique_mcp_provider_server_identifier'), | |||||
| sa.UniqueConstraint('tenant_id', 'server_url_hash', name='unique_mcp_provider_server_url') | |||||
| ) | |||||
| # ### end Alembic commands ### | |||||
| def downgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| op.drop_table('tool_mcp_providers') | |||||
| op.drop_table('app_mcp_servers') | |||||
| # ### end Alembic commands ### | 
| App, | App, | ||||
| AppAnnotationHitHistory, | AppAnnotationHitHistory, | ||||
| AppAnnotationSetting, | AppAnnotationSetting, | ||||
| AppMCPServer, | |||||
| AppMode, | AppMode, | ||||
| AppModelConfig, | AppModelConfig, | ||||
| Conversation, | Conversation, | ||||
| "AppAnnotationHitHistory", | "AppAnnotationHitHistory", | ||||
| "AppAnnotationSetting", | "AppAnnotationSetting", | ||||
| "AppDatasetJoin", | "AppDatasetJoin", | ||||
| "AppMCPServer", # Added | |||||
| "AppMode", | "AppMode", | ||||
| "AppModelConfig", | "AppModelConfig", | ||||
| "BuiltinToolProvider", | "BuiltinToolProvider", | 
| updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) | updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) | ||||
| class AppMCPServer(Base): | |||||
| __tablename__ = "app_mcp_servers" | |||||
| __table_args__ = ( | |||||
| db.PrimaryKeyConstraint("id", name="app_mcp_server_pkey"), | |||||
| db.UniqueConstraint("tenant_id", "app_id", name="unique_app_mcp_server_tenant_app_id"), | |||||
| db.UniqueConstraint("server_code", name="unique_app_mcp_server_server_code"), | |||||
| ) | |||||
| id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) | |||||
| tenant_id = db.Column(StringUUID, nullable=False) | |||||
| app_id = db.Column(StringUUID, nullable=False) | |||||
| name = db.Column(db.String(255), nullable=False) | |||||
| description = db.Column(db.String(255), nullable=False) | |||||
| server_code = db.Column(db.String(255), nullable=False) | |||||
| status = db.Column(db.String(255), nullable=False, server_default=db.text("'normal'::character varying")) | |||||
| parameters = db.Column(db.Text, nullable=False) | |||||
| created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) | |||||
| updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) | |||||
| @staticmethod | |||||
| def generate_server_code(n): | |||||
| while True: | |||||
| result = generate_string(n) | |||||
| while db.session.query(AppMCPServer).filter(AppMCPServer.server_code == result).count() > 0: | |||||
| result = generate_string(n) | |||||
| return result | |||||
| @property | |||||
| def parameters_dict(self) -> dict[str, Any]: | |||||
| return cast(dict[str, Any], json.loads(self.parameters)) | |||||
| class Site(Base): | class Site(Base): | ||||
| __tablename__ = "sites" | __tablename__ = "sites" | ||||
| __table_args__ = ( | __table_args__ = ( | 
| import json | import json | ||||
| from datetime import datetime | from datetime import datetime | ||||
| from typing import Any, cast | from typing import Any, cast | ||||
| from urllib.parse import urlparse | |||||
| import sqlalchemy as sa | import sqlalchemy as sa | ||||
| from deprecated import deprecated | from deprecated import deprecated | ||||
| from sqlalchemy import ForeignKey, func | from sqlalchemy import ForeignKey, func | ||||
| from sqlalchemy.orm import Mapped, mapped_column | from sqlalchemy.orm import Mapped, mapped_column | ||||
| from core.file import helpers as file_helpers | |||||
| from core.helper import encrypter | |||||
| from core.mcp.types import Tool | |||||
| from core.tools.entities.common_entities import I18nObject | from core.tools.entities.common_entities import I18nObject | ||||
| from core.tools.entities.tool_bundle import ApiToolBundle | from core.tools.entities.tool_bundle import ApiToolBundle | ||||
| from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration | from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration | ||||
| return db.session.query(App).filter(App.id == self.app_id).first() | return db.session.query(App).filter(App.id == self.app_id).first() | ||||
| class MCPToolProvider(Base): | |||||
| """ | |||||
| The table stores the mcp providers. | |||||
| """ | |||||
| __tablename__ = "tool_mcp_providers" | |||||
| __table_args__ = ( | |||||
| db.PrimaryKeyConstraint("id", name="tool_mcp_provider_pkey"), | |||||
| db.UniqueConstraint("tenant_id", "server_url_hash", name="unique_mcp_provider_server_url"), | |||||
| db.UniqueConstraint("tenant_id", "name", name="unique_mcp_provider_name"), | |||||
| db.UniqueConstraint("tenant_id", "server_identifier", name="unique_mcp_provider_server_identifier"), | |||||
| ) | |||||
| id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) | |||||
| # name of the mcp provider | |||||
| name: Mapped[str] = mapped_column(db.String(40), nullable=False) | |||||
| # server identifier of the mcp provider | |||||
| server_identifier: Mapped[str] = mapped_column(db.String(24), nullable=False) | |||||
| # encrypted url of the mcp provider | |||||
| server_url: Mapped[str] = mapped_column(db.Text, nullable=False) | |||||
| # hash of server_url for uniqueness check | |||||
| server_url_hash: Mapped[str] = mapped_column(db.String(64), nullable=False) | |||||
| # icon of the mcp provider | |||||
| icon: Mapped[str] = mapped_column(db.String(255), nullable=True) | |||||
| # tenant id | |||||
| tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) | |||||
| # who created this tool | |||||
| user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) | |||||
| # encrypted credentials | |||||
| encrypted_credentials: Mapped[str] = mapped_column(db.Text, nullable=True) | |||||
| # authed | |||||
| authed: Mapped[bool] = mapped_column(db.Boolean, nullable=False, default=False) | |||||
| # tools | |||||
| tools: Mapped[str] = mapped_column(db.Text, nullable=False, default="[]") | |||||
| created_at: Mapped[datetime] = mapped_column( | |||||
| db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") | |||||
| ) | |||||
| updated_at: Mapped[datetime] = mapped_column( | |||||
| db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") | |||||
| ) | |||||
| def load_user(self) -> Account | None: | |||||
| return db.session.query(Account).filter(Account.id == self.user_id).first() | |||||
| @property | |||||
| def tenant(self) -> Tenant | None: | |||||
| return db.session.query(Tenant).filter(Tenant.id == self.tenant_id).first() | |||||
| @property | |||||
| def credentials(self) -> dict: | |||||
| try: | |||||
| return cast(dict, json.loads(self.encrypted_credentials)) or {} | |||||
| except Exception: | |||||
| return {} | |||||
| @property | |||||
| def mcp_tools(self) -> list[Tool]: | |||||
| return [Tool(**tool) for tool in json.loads(self.tools)] | |||||
| @property | |||||
| def provider_icon(self) -> dict[str, str] | str: | |||||
| try: | |||||
| return cast(dict[str, str], json.loads(self.icon)) | |||||
| except json.JSONDecodeError: | |||||
| return file_helpers.get_signed_file_url(self.icon) | |||||
| @property | |||||
| def decrypted_server_url(self) -> str: | |||||
| return cast(str, encrypter.decrypt_token(self.tenant_id, self.server_url)) | |||||
| @property | |||||
| def masked_server_url(self) -> str: | |||||
| def mask_url(url: str, mask_char: str = "*") -> str: | |||||
| """ | |||||
| mask the url to a simple string | |||||
| """ | |||||
| parsed = urlparse(url) | |||||
| base_url = f"{parsed.scheme}://{parsed.netloc}" | |||||
| if parsed.path and parsed.path != "/": | |||||
| return f"{base_url}/{mask_char * 6}" | |||||
| else: | |||||
| return base_url | |||||
| return mask_url(self.decrypted_server_url) | |||||
| @property | |||||
| def decrypted_credentials(self) -> dict: | |||||
| from core.tools.mcp_tool.provider import MCPToolProviderController | |||||
| from core.tools.utils.configuration import ProviderConfigEncrypter | |||||
| provider_controller = MCPToolProviderController._from_db(self) | |||||
| tool_configuration = ProviderConfigEncrypter( | |||||
| tenant_id=self.tenant_id, | |||||
| config=list(provider_controller.get_credentials_schema()), | |||||
| provider_type=provider_controller.provider_type.value, | |||||
| provider_identity=provider_controller.provider_id, | |||||
| ) | |||||
| return tool_configuration.decrypt(self.credentials, use_cache=False) | |||||
| class ToolModelInvoke(Base): | class ToolModelInvoke(Base): | ||||
| """ | """ | ||||
| store the invoke logs from tool invoke | store the invoke logs from tool invoke | 
| "weave~=0.51.0", | "weave~=0.51.0", | ||||
| "yarl~=1.18.3", | "yarl~=1.18.3", | ||||
| "webvtt-py~=0.5.1", | "webvtt-py~=0.5.1", | ||||
| "sseclient-py>=1.8.0", | |||||
| "httpx-sse>=0.4.0", | |||||
| "sendgrid~=6.12.3", | "sendgrid~=6.12.3", | ||||
| ] | ] | ||||
| # Before adding new dependency, consider place it in | # Before adding new dependency, consider place it in | 
| import hashlib | |||||
| import json | |||||
| from datetime import datetime | |||||
| from typing import Any | |||||
| from sqlalchemy import or_ | |||||
| from sqlalchemy.exc import IntegrityError | |||||
| from core.helper import encrypter | |||||
| from core.mcp.error import MCPAuthError, MCPError | |||||
| from core.mcp.mcp_client import MCPClient | |||||
| from core.tools.entities.api_entities import ToolProviderApiEntity | |||||
| from core.tools.entities.common_entities import I18nObject | |||||
| from core.tools.entities.tool_entities import ToolProviderType | |||||
| from core.tools.mcp_tool.provider import MCPToolProviderController | |||||
| from core.tools.utils.configuration import ProviderConfigEncrypter | |||||
| from extensions.ext_database import db | |||||
| from models.tools import MCPToolProvider | |||||
| from services.tools.tools_transform_service import ToolTransformService | |||||
| UNCHANGED_SERVER_URL_PLACEHOLDER = "[__HIDDEN__]" | |||||
| class MCPToolManageService: | |||||
| """ | |||||
| Service class for managing mcp tools. | |||||
| """ | |||||
| @staticmethod | |||||
| def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider: | |||||
| res = ( | |||||
| db.session.query(MCPToolProvider) | |||||
| .filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.id == provider_id) | |||||
| .first() | |||||
| ) | |||||
| if not res: | |||||
| raise ValueError("MCP tool not found") | |||||
| return res | |||||
| @staticmethod | |||||
| def get_mcp_provider_by_server_identifier(server_identifier: str, tenant_id: str) -> MCPToolProvider: | |||||
| res = ( | |||||
| db.session.query(MCPToolProvider) | |||||
| .filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.server_identifier == server_identifier) | |||||
| .first() | |||||
| ) | |||||
| if not res: | |||||
| raise ValueError("MCP tool not found") | |||||
| return res | |||||
| @staticmethod | |||||
| def create_mcp_provider( | |||||
| tenant_id: str, | |||||
| name: str, | |||||
| server_url: str, | |||||
| user_id: str, | |||||
| icon: str, | |||||
| icon_type: str, | |||||
| icon_background: str, | |||||
| server_identifier: str, | |||||
| ) -> ToolProviderApiEntity: | |||||
| server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() | |||||
| existing_provider = ( | |||||
| db.session.query(MCPToolProvider) | |||||
| .filter( | |||||
| MCPToolProvider.tenant_id == tenant_id, | |||||
| or_( | |||||
| MCPToolProvider.name == name, | |||||
| MCPToolProvider.server_url_hash == server_url_hash, | |||||
| MCPToolProvider.server_identifier == server_identifier, | |||||
| ), | |||||
| MCPToolProvider.tenant_id == tenant_id, | |||||
| ) | |||||
| .first() | |||||
| ) | |||||
| if existing_provider: | |||||
| if existing_provider.name == name: | |||||
| raise ValueError(f"MCP tool {name} already exists") | |||||
| elif existing_provider.server_url_hash == server_url_hash: | |||||
| raise ValueError(f"MCP tool {server_url} already exists") | |||||
| elif existing_provider.server_identifier == server_identifier: | |||||
| raise ValueError(f"MCP tool {server_identifier} already exists") | |||||
| encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url) | |||||
| mcp_tool = MCPToolProvider( | |||||
| tenant_id=tenant_id, | |||||
| name=name, | |||||
| server_url=encrypted_server_url, | |||||
| server_url_hash=server_url_hash, | |||||
| user_id=user_id, | |||||
| authed=False, | |||||
| tools="[]", | |||||
| icon=json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon, | |||||
| server_identifier=server_identifier, | |||||
| ) | |||||
| db.session.add(mcp_tool) | |||||
| db.session.commit() | |||||
| return ToolTransformService.mcp_provider_to_user_provider(mcp_tool, for_list=True) | |||||
| @staticmethod | |||||
| def retrieve_mcp_tools(tenant_id: str, for_list: bool = False) -> list[ToolProviderApiEntity]: | |||||
| mcp_providers = ( | |||||
| db.session.query(MCPToolProvider) | |||||
| .filter(MCPToolProvider.tenant_id == tenant_id) | |||||
| .order_by(MCPToolProvider.name) | |||||
| .all() | |||||
| ) | |||||
| return [ | |||||
| ToolTransformService.mcp_provider_to_user_provider(mcp_provider, for_list=for_list) | |||||
| for mcp_provider in mcp_providers | |||||
| ] | |||||
| @classmethod | |||||
| def list_mcp_tool_from_remote_server(cls, tenant_id: str, provider_id: str): | |||||
| mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) | |||||
| try: | |||||
| with MCPClient( | |||||
| mcp_provider.decrypted_server_url, provider_id, tenant_id, authed=mcp_provider.authed, for_list=True | |||||
| ) as mcp_client: | |||||
| tools = mcp_client.list_tools() | |||||
| except MCPAuthError as e: | |||||
| raise ValueError("Please auth the tool first") | |||||
| except MCPError as e: | |||||
| raise ValueError(f"Failed to connect to MCP server: {e}") | |||||
| mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools]) | |||||
| mcp_provider.authed = True | |||||
| mcp_provider.updated_at = datetime.now() | |||||
| db.session.commit() | |||||
| user = mcp_provider.load_user() | |||||
| return ToolProviderApiEntity( | |||||
| id=mcp_provider.id, | |||||
| name=mcp_provider.name, | |||||
| tools=ToolTransformService.mcp_tool_to_user_tool(mcp_provider, tools), | |||||
| type=ToolProviderType.MCP, | |||||
| icon=mcp_provider.icon, | |||||
| author=user.name if user else "Anonymous", | |||||
| server_url=mcp_provider.masked_server_url, | |||||
| updated_at=int(mcp_provider.updated_at.timestamp()), | |||||
| description=I18nObject(en_US="", zh_Hans=""), | |||||
| label=I18nObject(en_US=mcp_provider.name, zh_Hans=mcp_provider.name), | |||||
| plugin_unique_identifier=mcp_provider.server_identifier, | |||||
| ) | |||||
| @classmethod | |||||
| def delete_mcp_tool(cls, tenant_id: str, provider_id: str): | |||||
| mcp_tool = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) | |||||
| db.session.delete(mcp_tool) | |||||
| db.session.commit() | |||||
| @classmethod | |||||
| def update_mcp_provider( | |||||
| cls, | |||||
| tenant_id: str, | |||||
| provider_id: str, | |||||
| name: str, | |||||
| server_url: str, | |||||
| icon: str, | |||||
| icon_type: str, | |||||
| icon_background: str, | |||||
| server_identifier: str, | |||||
| ): | |||||
| mcp_provider = cls.get_mcp_provider_by_provider_id(provider_id, tenant_id) | |||||
| mcp_provider.updated_at = datetime.now() | |||||
| mcp_provider.name = name | |||||
| mcp_provider.icon = ( | |||||
| json.dumps({"content": icon, "background": icon_background}) if icon_type == "emoji" else icon | |||||
| ) | |||||
| mcp_provider.server_identifier = server_identifier | |||||
| if UNCHANGED_SERVER_URL_PLACEHOLDER not in server_url: | |||||
| encrypted_server_url = encrypter.encrypt_token(tenant_id, server_url) | |||||
| mcp_provider.server_url = encrypted_server_url | |||||
| server_url_hash = hashlib.sha256(server_url.encode()).hexdigest() | |||||
| if server_url_hash != mcp_provider.server_url_hash: | |||||
| cls._re_connect_mcp_provider(mcp_provider, provider_id, tenant_id) | |||||
| mcp_provider.server_url_hash = server_url_hash | |||||
| try: | |||||
| db.session.commit() | |||||
| except IntegrityError as e: | |||||
| db.session.rollback() | |||||
| error_msg = str(e.orig) | |||||
| if "unique_mcp_provider_name" in error_msg: | |||||
| raise ValueError(f"MCP tool {name} already exists") | |||||
| elif "unique_mcp_provider_server_url" in error_msg: | |||||
| raise ValueError(f"MCP tool {server_url} already exists") | |||||
| elif "unique_mcp_provider_server_identifier" in error_msg: | |||||
| raise ValueError(f"MCP tool {server_identifier} already exists") | |||||
| else: | |||||
| raise | |||||
| @classmethod | |||||
| def update_mcp_provider_credentials( | |||||
| cls, mcp_provider: MCPToolProvider, credentials: dict[str, Any], authed: bool = False | |||||
| ): | |||||
| provider_controller = MCPToolProviderController._from_db(mcp_provider) | |||||
| tool_configuration = ProviderConfigEncrypter( | |||||
| tenant_id=mcp_provider.tenant_id, | |||||
| config=list(provider_controller.get_credentials_schema()), | |||||
| provider_type=provider_controller.provider_type.value, | |||||
| provider_identity=provider_controller.provider_id, | |||||
| ) | |||||
| credentials = tool_configuration.encrypt(credentials) | |||||
| mcp_provider.updated_at = datetime.now() | |||||
| mcp_provider.encrypted_credentials = json.dumps({**mcp_provider.credentials, **credentials}) | |||||
| mcp_provider.authed = authed | |||||
| if not authed: | |||||
| mcp_provider.tools = "[]" | |||||
| db.session.commit() | |||||
| @classmethod | |||||
| def _re_connect_mcp_provider(cls, mcp_provider: MCPToolProvider, provider_id: str, tenant_id: str): | |||||
| """re-connect mcp provider""" | |||||
| try: | |||||
| with MCPClient( | |||||
| mcp_provider.decrypted_server_url, | |||||
| provider_id, | |||||
| tenant_id, | |||||
| authed=False, | |||||
| for_list=True, | |||||
| ) as mcp_client: | |||||
| tools = mcp_client.list_tools() | |||||
| mcp_provider.authed = True | |||||
| mcp_provider.tools = json.dumps([tool.model_dump() for tool in tools]) | |||||
| except MCPAuthError: | |||||
| mcp_provider.authed = False | |||||
| mcp_provider.tools = "[]" | |||||
| except MCPError as e: | |||||
| raise ValueError(f"Failed to re-connect MCP server: {e}") from e | |||||
| # reset credentials | |||||
| mcp_provider.encrypted_credentials = "{}" | 
| import json | import json | ||||
| import logging | import logging | ||||
| from typing import Optional, Union, cast | |||||
| from typing import Any, Optional, Union, cast | |||||
| from yarl import URL | from yarl import URL | ||||
| from configs import dify_config | from configs import dify_config | ||||
| from core.mcp.types import Tool as MCPTool | |||||
| from core.tools.__base.tool import Tool | from core.tools.__base.tool import Tool | ||||
| from core.tools.__base.tool_runtime import ToolRuntime | from core.tools.__base.tool_runtime import ToolRuntime | ||||
| from core.tools.builtin_tool.provider import BuiltinToolProviderController | from core.tools.builtin_tool.provider import BuiltinToolProviderController | ||||
| from core.tools.utils.configuration import ProviderConfigEncrypter | from core.tools.utils.configuration import ProviderConfigEncrypter | ||||
| from core.tools.workflow_as_tool.provider import WorkflowToolProviderController | from core.tools.workflow_as_tool.provider import WorkflowToolProviderController | ||||
| from core.tools.workflow_as_tool.tool import WorkflowTool | from core.tools.workflow_as_tool.tool import WorkflowTool | ||||
| from models.tools import ApiToolProvider, BuiltinToolProvider, WorkflowToolProvider | |||||
| from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider | |||||
| logger = logging.getLogger(__name__) | logger = logging.getLogger(__name__) | ||||
| return icon | return icon | ||||
| except Exception: | except Exception: | ||||
| return {"background": "#252525", "content": "\ud83d\ude01"} | return {"background": "#252525", "content": "\ud83d\ude01"} | ||||
| elif provider_type == ToolProviderType.MCP.value: | |||||
| return icon | |||||
| return "" | return "" | ||||
| @staticmethod | @staticmethod | ||||
| labels=labels or [], | labels=labels or [], | ||||
| ) | ) | ||||
| @staticmethod | |||||
| def mcp_provider_to_user_provider(db_provider: MCPToolProvider, for_list: bool = False) -> ToolProviderApiEntity: | |||||
| user = db_provider.load_user() | |||||
| return ToolProviderApiEntity( | |||||
| id=db_provider.server_identifier if not for_list else db_provider.id, | |||||
| author=user.name if user else "Anonymous", | |||||
| name=db_provider.name, | |||||
| icon=db_provider.provider_icon, | |||||
| type=ToolProviderType.MCP, | |||||
| is_team_authorization=db_provider.authed, | |||||
| server_url=db_provider.masked_server_url, | |||||
| tools=ToolTransformService.mcp_tool_to_user_tool( | |||||
| db_provider, [MCPTool(**tool) for tool in json.loads(db_provider.tools)] | |||||
| ), | |||||
| updated_at=int(db_provider.updated_at.timestamp()), | |||||
| label=I18nObject(en_US=db_provider.name, zh_Hans=db_provider.name), | |||||
| description=I18nObject(en_US="", zh_Hans=""), | |||||
| server_identifier=db_provider.server_identifier, | |||||
| ) | |||||
| @staticmethod | |||||
| def mcp_tool_to_user_tool(mcp_provider: MCPToolProvider, tools: list[MCPTool]) -> list[ToolApiEntity]: | |||||
| user = mcp_provider.load_user() | |||||
| return [ | |||||
| ToolApiEntity( | |||||
| author=user.name if user else "Anonymous", | |||||
| name=tool.name, | |||||
| label=I18nObject(en_US=tool.name, zh_Hans=tool.name), | |||||
| description=I18nObject(en_US=tool.description, zh_Hans=tool.description), | |||||
| parameters=ToolTransformService.convert_mcp_schema_to_parameter(tool.inputSchema), | |||||
| labels=[], | |||||
| ) | |||||
| for tool in tools | |||||
| ] | |||||
| @classmethod | @classmethod | ||||
| def api_provider_to_user_provider( | def api_provider_to_user_provider( | ||||
| cls, | cls, | ||||
| parameters=tool.parameters, | parameters=tool.parameters, | ||||
| labels=labels or [], | labels=labels or [], | ||||
| ) | ) | ||||
| @staticmethod | |||||
| def convert_mcp_schema_to_parameter(schema: dict) -> list["ToolParameter"]: | |||||
| """ | |||||
| Convert MCP JSON schema to tool parameters | |||||
| :param schema: JSON schema dictionary | |||||
| :return: list of ToolParameter instances | |||||
| """ | |||||
| def create_parameter( | |||||
| name: str, description: str, param_type: str, required: bool, input_schema: dict | None = None | |||||
| ) -> ToolParameter: | |||||
| """Create a ToolParameter instance with given attributes""" | |||||
| input_schema_dict: dict[str, Any] = {"input_schema": input_schema} if input_schema else {} | |||||
| return ToolParameter( | |||||
| name=name, | |||||
| llm_description=description, | |||||
| label=I18nObject(en_US=name), | |||||
| form=ToolParameter.ToolParameterForm.LLM, | |||||
| required=required, | |||||
| type=ToolParameter.ToolParameterType(param_type), | |||||
| human_description=I18nObject(en_US=description), | |||||
| **input_schema_dict, | |||||
| ) | |||||
| def process_properties(props: dict, required: list, prefix: str = "") -> list[ToolParameter]: | |||||
| """Process properties recursively""" | |||||
| TYPE_MAPPING = {"integer": "number", "float": "number"} | |||||
| COMPLEX_TYPES = ["array", "object"] | |||||
| parameters = [] | |||||
| for name, prop in props.items(): | |||||
| current_description = prop.get("description", "") | |||||
| prop_type = prop.get("type", "string") | |||||
| if isinstance(prop_type, list): | |||||
| prop_type = prop_type[0] | |||||
| if prop_type in TYPE_MAPPING: | |||||
| prop_type = TYPE_MAPPING[prop_type] | |||||
| input_schema = prop if prop_type in COMPLEX_TYPES else None | |||||
| parameters.append( | |||||
| create_parameter(name, current_description, prop_type, name in required, input_schema) | |||||
| ) | |||||
| return parameters | |||||
| if schema.get("type") == "object" and "properties" in schema: | |||||
| return process_properties(schema["properties"], schema.get("required", [])) | |||||
| return [] | 
| AppAnnotationHitHistory, | AppAnnotationHitHistory, | ||||
| AppAnnotationSetting, | AppAnnotationSetting, | ||||
| AppDatasetJoin, | AppDatasetJoin, | ||||
| AppMCPServer, | |||||
| AppModelConfig, | AppModelConfig, | ||||
| Conversation, | Conversation, | ||||
| EndUser, | EndUser, | ||||
| # Delete related data | # Delete related data | ||||
| _delete_app_model_configs(tenant_id, app_id) | _delete_app_model_configs(tenant_id, app_id) | ||||
| _delete_app_site(tenant_id, app_id) | _delete_app_site(tenant_id, app_id) | ||||
| _delete_app_mcp_servers(tenant_id, app_id) | |||||
| _delete_app_api_tokens(tenant_id, app_id) | _delete_app_api_tokens(tenant_id, app_id) | ||||
| _delete_installed_apps(tenant_id, app_id) | _delete_installed_apps(tenant_id, app_id) | ||||
| _delete_recommended_apps(tenant_id, app_id) | _delete_recommended_apps(tenant_id, app_id) | ||||
| _delete_records("""select id from sites where app_id=:app_id limit 1000""", {"app_id": app_id}, del_site, "site") | _delete_records("""select id from sites where app_id=:app_id limit 1000""", {"app_id": app_id}, del_site, "site") | ||||
| def _delete_app_mcp_servers(tenant_id: str, app_id: str): | |||||
| def del_mcp_server(mcp_server_id: str): | |||||
| db.session.query(AppMCPServer).filter(AppMCPServer.id == mcp_server_id).delete(synchronize_session=False) | |||||
| _delete_records( | |||||
| """select id from app_mcp_servers where app_id=:app_id limit 1000""", | |||||
| {"app_id": app_id}, | |||||
| del_mcp_server, | |||||
| "app mcp server", | |||||
| ) | |||||
| def _delete_app_api_tokens(tenant_id: str, app_id: str): | def _delete_app_api_tokens(tenant_id: str, app_id: str): | ||||
| def del_api_token(api_token_id: str): | def del_api_token(api_token_id: str): | ||||
| db.session.query(ApiToken).filter(ApiToken.id == api_token_id).delete(synchronize_session=False) | db.session.query(ApiToken).filter(ApiToken.id == api_token_id).delete(synchronize_session=False) | 
| import queue | |||||
| import threading | |||||
| from typing import Any | |||||
| from core.mcp import types | |||||
| from core.mcp.entities import RequestContext | |||||
| from core.mcp.session.base_session import RequestResponder | |||||
| from core.mcp.session.client_session import DEFAULT_CLIENT_INFO, ClientSession | |||||
| from core.mcp.types import ( | |||||
| LATEST_PROTOCOL_VERSION, | |||||
| ClientNotification, | |||||
| ClientRequest, | |||||
| Implementation, | |||||
| InitializedNotification, | |||||
| InitializeRequest, | |||||
| InitializeResult, | |||||
| JSONRPCMessage, | |||||
| JSONRPCNotification, | |||||
| JSONRPCRequest, | |||||
| JSONRPCResponse, | |||||
| ServerCapabilities, | |||||
| ServerResult, | |||||
| SessionMessage, | |||||
| ) | |||||
| def test_client_session_initialize(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| initialized_notification = None | |||||
| def mock_server(): | |||||
| nonlocal initialized_notification | |||||
| # Receive initialization request | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| # Create response | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion=LATEST_PROTOCOL_VERSION, | |||||
| capabilities=ServerCapabilities( | |||||
| logging=None, | |||||
| resources=None, | |||||
| tools=None, | |||||
| experimental=None, | |||||
| prompts=None, | |||||
| ), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| instructions="The server instructions.", | |||||
| ) | |||||
| ) | |||||
| # Send response | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Receive initialized notification | |||||
| session_notification = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_notification = session_notification.message | |||||
| assert isinstance(jsonrpc_notification.root, JSONRPCNotification) | |||||
| initialized_notification = ClientNotification.model_validate( | |||||
| jsonrpc_notification.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| # Create message handler | |||||
| def message_handler( | |||||
| message: RequestResponder[types.ServerRequest, types.ClientResult] | types.ServerNotification | Exception, | |||||
| ) -> None: | |||||
| if isinstance(message, Exception): | |||||
| raise message | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| # Create and use client session | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| message_handler=message_handler, | |||||
| ) as session: | |||||
| result = session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| # Assert results | |||||
| assert isinstance(result, InitializeResult) | |||||
| assert result.protocolVersion == LATEST_PROTOCOL_VERSION | |||||
| assert isinstance(result.capabilities, ServerCapabilities) | |||||
| assert result.serverInfo == Implementation(name="mock-server", version="0.1.0") | |||||
| assert result.instructions == "The server instructions." | |||||
| # Check that client sent initialized notification | |||||
| assert initialized_notification | |||||
| assert isinstance(initialized_notification.root, InitializedNotification) | |||||
| def test_client_session_custom_client_info(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| custom_client_info = Implementation(name="test-client", version="1.2.3") | |||||
| received_client_info = None | |||||
| def mock_server(): | |||||
| nonlocal received_client_info | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| received_client_info = request.root.params.clientInfo | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion=LATEST_PROTOCOL_VERSION, | |||||
| capabilities=ServerCapabilities(), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| ) | |||||
| ) | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Receive initialized notification | |||||
| client_to_server.get(timeout=5.0) | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| client_info=custom_client_info, | |||||
| ) as session: | |||||
| session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| # Assert that custom client info was sent | |||||
| assert received_client_info == custom_client_info | |||||
| def test_client_session_default_client_info(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| received_client_info = None | |||||
| def mock_server(): | |||||
| nonlocal received_client_info | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| received_client_info = request.root.params.clientInfo | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion=LATEST_PROTOCOL_VERSION, | |||||
| capabilities=ServerCapabilities(), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| ) | |||||
| ) | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Receive initialized notification | |||||
| client_to_server.get(timeout=5.0) | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| ) as session: | |||||
| session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| # Assert that default client info was used | |||||
| assert received_client_info == DEFAULT_CLIENT_INFO | |||||
| def test_client_session_version_negotiation_success(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| def mock_server(): | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| # Send supported protocol version | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion=LATEST_PROTOCOL_VERSION, | |||||
| capabilities=ServerCapabilities(), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| ) | |||||
| ) | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Receive initialized notification | |||||
| client_to_server.get(timeout=5.0) | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| ) as session: | |||||
| result = session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| # Should successfully initialize | |||||
| assert isinstance(result, InitializeResult) | |||||
| assert result.protocolVersion == LATEST_PROTOCOL_VERSION | |||||
| def test_client_session_version_negotiation_failure(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| def mock_server(): | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| # Send unsupported protocol version | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion="99.99.99", # Unsupported version | |||||
| capabilities=ServerCapabilities(), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| ) | |||||
| ) | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| ) as session: | |||||
| import pytest | |||||
| with pytest.raises(RuntimeError, match="Unsupported protocol version"): | |||||
| session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| def test_client_capabilities_default(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| received_capabilities = None | |||||
| def mock_server(): | |||||
| nonlocal received_capabilities | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| received_capabilities = request.root.params.capabilities | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion=LATEST_PROTOCOL_VERSION, | |||||
| capabilities=ServerCapabilities(), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| ) | |||||
| ) | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Receive initialized notification | |||||
| client_to_server.get(timeout=5.0) | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| ) as session: | |||||
| session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| # Assert default capabilities | |||||
| assert received_capabilities is not None | |||||
| assert received_capabilities.sampling is not None | |||||
| assert received_capabilities.roots is not None | |||||
| assert received_capabilities.roots.listChanged is True | |||||
| def test_client_capabilities_with_custom_callbacks(): | |||||
| # Create synchronous queues to replace async streams | |||||
| client_to_server: queue.Queue[SessionMessage] = queue.Queue() | |||||
| server_to_client: queue.Queue[SessionMessage] = queue.Queue() | |||||
| def custom_sampling_callback( | |||||
| context: RequestContext["ClientSession", Any], | |||||
| params: types.CreateMessageRequestParams, | |||||
| ) -> types.CreateMessageResult | types.ErrorData: | |||||
| return types.CreateMessageResult( | |||||
| model="test-model", | |||||
| role="assistant", | |||||
| content=types.TextContent(type="text", text="Custom response"), | |||||
| ) | |||||
| def custom_list_roots_callback( | |||||
| context: RequestContext["ClientSession", Any], | |||||
| ) -> types.ListRootsResult | types.ErrorData: | |||||
| return types.ListRootsResult(roots=[]) | |||||
| def mock_server(): | |||||
| session_message = client_to_server.get(timeout=5.0) | |||||
| jsonrpc_request = session_message.message | |||||
| assert isinstance(jsonrpc_request.root, JSONRPCRequest) | |||||
| request = ClientRequest.model_validate( | |||||
| jsonrpc_request.root.model_dump(by_alias=True, mode="json", exclude_none=True) | |||||
| ) | |||||
| assert isinstance(request.root, InitializeRequest) | |||||
| result = ServerResult( | |||||
| InitializeResult( | |||||
| protocolVersion=LATEST_PROTOCOL_VERSION, | |||||
| capabilities=ServerCapabilities(), | |||||
| serverInfo=Implementation(name="mock-server", version="0.1.0"), | |||||
| ) | |||||
| ) | |||||
| server_to_client.put( | |||||
| SessionMessage( | |||||
| message=JSONRPCMessage( | |||||
| JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id=jsonrpc_request.root.id, | |||||
| result=result.model_dump(by_alias=True, mode="json", exclude_none=True), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| # Receive initialized notification | |||||
| client_to_server.get(timeout=5.0) | |||||
| # Start mock server thread | |||||
| server_thread = threading.Thread(target=mock_server, daemon=True) | |||||
| server_thread.start() | |||||
| with ClientSession( | |||||
| server_to_client, | |||||
| client_to_server, | |||||
| sampling_callback=custom_sampling_callback, | |||||
| list_roots_callback=custom_list_roots_callback, | |||||
| ) as session: | |||||
| result = session.initialize() | |||||
| # Wait for server thread to complete | |||||
| server_thread.join(timeout=10.0) | |||||
| # Verify initialization succeeded | |||||
| assert isinstance(result, InitializeResult) | |||||
| assert result.protocolVersion == LATEST_PROTOCOL_VERSION | 
| import json | |||||
| import queue | |||||
| import threading | |||||
| import time | |||||
| from typing import Any | |||||
| from unittest.mock import Mock, patch | |||||
| import httpx | |||||
| import pytest | |||||
| from core.mcp import types | |||||
| from core.mcp.client.sse_client import sse_client | |||||
| from core.mcp.error import MCPAuthError, MCPConnectionError | |||||
| SERVER_NAME = "test_server_for_SSE" | |||||
| def test_sse_message_id_coercion(): | |||||
| """Test that string message IDs that look like integers are parsed as integers. | |||||
| See <https://github.com/modelcontextprotocol/python-sdk/pull/851> for more details. | |||||
| """ | |||||
| json_message = '{"jsonrpc": "2.0", "id": "123", "method": "ping", "params": null}' | |||||
| msg = types.JSONRPCMessage.model_validate_json(json_message) | |||||
| expected = types.JSONRPCMessage(root=types.JSONRPCRequest(method="ping", jsonrpc="2.0", id=123)) | |||||
| # Check if both are JSONRPCRequest instances | |||||
| assert isinstance(msg.root, types.JSONRPCRequest) | |||||
| assert isinstance(expected.root, types.JSONRPCRequest) | |||||
| assert msg.root.id == expected.root.id | |||||
| assert msg.root.method == expected.root.method | |||||
| assert msg.root.jsonrpc == expected.root.jsonrpc | |||||
| class MockSSEClient: | |||||
| """Mock SSE client for testing.""" | |||||
| def __init__(self, url: str, headers: dict[str, Any] | None = None): | |||||
| self.url = url | |||||
| self.headers = headers or {} | |||||
| self.connected = False | |||||
| self.read_queue: queue.Queue = queue.Queue() | |||||
| self.write_queue: queue.Queue = queue.Queue() | |||||
| def connect(self): | |||||
| """Simulate connection establishment.""" | |||||
| self.connected = True | |||||
| # Send endpoint event | |||||
| endpoint_data = "/messages/?session_id=test-session-123" | |||||
| self.read_queue.put(("endpoint", endpoint_data)) | |||||
| return self.read_queue, self.write_queue | |||||
| def send_initialize_response(self): | |||||
| """Send a mock initialize response.""" | |||||
| response = { | |||||
| "jsonrpc": "2.0", | |||||
| "id": 1, | |||||
| "result": { | |||||
| "protocolVersion": types.LATEST_PROTOCOL_VERSION, | |||||
| "capabilities": { | |||||
| "logging": None, | |||||
| "resources": None, | |||||
| "tools": None, | |||||
| "experimental": None, | |||||
| "prompts": None, | |||||
| }, | |||||
| "serverInfo": {"name": SERVER_NAME, "version": "0.1.0"}, | |||||
| "instructions": "Test server instructions.", | |||||
| }, | |||||
| } | |||||
| self.read_queue.put(("message", json.dumps(response))) | |||||
| def test_sse_client_message_id_handling(): | |||||
| """Test SSE client properly handles message ID coercion.""" | |||||
| mock_client = MockSSEClient("http://test.example/sse") | |||||
| read_queue, write_queue = mock_client.connect() | |||||
| # Send a message with string ID that should be coerced to int | |||||
| message_data = { | |||||
| "jsonrpc": "2.0", | |||||
| "id": "456", # String ID | |||||
| "result": {"test": "data"}, | |||||
| } | |||||
| read_queue.put(("message", json.dumps(message_data))) | |||||
| read_queue.get(timeout=1.0) | |||||
| # Get the message from queue | |||||
| event_type, data = read_queue.get(timeout=1.0) | |||||
| assert event_type == "message" | |||||
| # Parse the message | |||||
| parsed_message = types.JSONRPCMessage.model_validate_json(data) | |||||
| # Check that it's a JSONRPCResponse and verify the ID | |||||
| assert isinstance(parsed_message.root, types.JSONRPCResponse) | |||||
| assert parsed_message.root.id == 456 # Should be converted to int | |||||
| def test_sse_client_connection_validation(): | |||||
| """Test SSE client validates endpoint URLs properly.""" | |||||
| test_url = "http://test.example/sse" | |||||
| with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect: | |||||
| # Mock the HTTP client | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| # Mock the SSE connection | |||||
| mock_event_source = Mock() | |||||
| mock_event_source.response.raise_for_status.return_value = None | |||||
| mock_sse_connect.return_value.__enter__.return_value = mock_event_source | |||||
| # Mock SSE events | |||||
| class MockSSEEvent: | |||||
| def __init__(self, event_type: str, data: str): | |||||
| self.event = event_type | |||||
| self.data = data | |||||
| # Simulate endpoint event | |||||
| endpoint_event = MockSSEEvent("endpoint", "/messages/?session_id=test-123") | |||||
| mock_event_source.iter_sse.return_value = [endpoint_event] | |||||
| # Test connection | |||||
| try: | |||||
| with sse_client(test_url) as (read_queue, write_queue): | |||||
| assert read_queue is not None | |||||
| assert write_queue is not None | |||||
| except Exception as e: | |||||
| # Connection might fail due to mocking, but we're testing the validation logic | |||||
| pass | |||||
| def test_sse_client_error_handling(): | |||||
| """Test SSE client properly handles various error conditions.""" | |||||
| test_url = "http://test.example/sse" | |||||
| # Test 401 error handling | |||||
| with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect: | |||||
| # Mock 401 HTTP error | |||||
| mock_error = httpx.HTTPStatusError("Unauthorized", request=Mock(), response=Mock(status_code=401)) | |||||
| mock_sse_connect.side_effect = mock_error | |||||
| with pytest.raises(MCPAuthError): | |||||
| with sse_client(test_url): | |||||
| pass | |||||
| # Test other HTTP errors | |||||
| with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect: | |||||
| # Mock other HTTP error | |||||
| mock_error = httpx.HTTPStatusError("Server Error", request=Mock(), response=Mock(status_code=500)) | |||||
| mock_sse_connect.side_effect = mock_error | |||||
| with pytest.raises(MCPConnectionError): | |||||
| with sse_client(test_url): | |||||
| pass | |||||
| def test_sse_client_timeout_configuration(): | |||||
| """Test SSE client timeout configuration.""" | |||||
| test_url = "http://test.example/sse" | |||||
| custom_timeout = 10.0 | |||||
| custom_sse_timeout = 300.0 | |||||
| custom_headers = {"Authorization": "Bearer test-token"} | |||||
| with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect: | |||||
| # Mock successful connection | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| mock_event_source = Mock() | |||||
| mock_event_source.response.raise_for_status.return_value = None | |||||
| mock_event_source.iter_sse.return_value = [] | |||||
| mock_sse_connect.return_value.__enter__.return_value = mock_event_source | |||||
| try: | |||||
| with sse_client( | |||||
| test_url, headers=custom_headers, timeout=custom_timeout, sse_read_timeout=custom_sse_timeout | |||||
| ) as (read_queue, write_queue): | |||||
| # Verify the configuration was passed correctly | |||||
| mock_client_factory.assert_called_with(headers=custom_headers) | |||||
| # Check that timeout was configured | |||||
| call_args = mock_sse_connect.call_args | |||||
| assert call_args is not None | |||||
| timeout_arg = call_args[1]["timeout"] | |||||
| assert timeout_arg.read == custom_sse_timeout | |||||
| except Exception: | |||||
| # Connection might fail due to mocking, but we tested the configuration | |||||
| pass | |||||
| def test_sse_transport_endpoint_validation(): | |||||
| """Test SSE transport validates endpoint URLs correctly.""" | |||||
| from core.mcp.client.sse_client import SSETransport | |||||
| transport = SSETransport("http://example.com/sse") | |||||
| # Valid endpoint (same origin) | |||||
| valid_endpoint = "http://example.com/messages/session123" | |||||
| assert transport._validate_endpoint_url(valid_endpoint) == True | |||||
| # Invalid endpoint (different origin) | |||||
| invalid_endpoint = "http://malicious.com/messages/session123" | |||||
| assert transport._validate_endpoint_url(invalid_endpoint) == False | |||||
| # Invalid endpoint (different scheme) | |||||
| invalid_scheme = "https://example.com/messages/session123" | |||||
| assert transport._validate_endpoint_url(invalid_scheme) == False | |||||
| def test_sse_transport_message_parsing(): | |||||
| """Test SSE transport properly parses different message types.""" | |||||
| from core.mcp.client.sse_client import SSETransport | |||||
| transport = SSETransport("http://example.com/sse") | |||||
| read_queue: queue.Queue = queue.Queue() | |||||
| # Test valid JSON-RPC message | |||||
| valid_message = '{"jsonrpc": "2.0", "id": 1, "method": "ping"}' | |||||
| transport._handle_message_event(valid_message, read_queue) | |||||
| # Should have a SessionMessage in the queue | |||||
| message = read_queue.get(timeout=1.0) | |||||
| assert message is not None | |||||
| assert hasattr(message, "message") | |||||
| # Test invalid JSON | |||||
| invalid_json = '{"invalid": json}' | |||||
| transport._handle_message_event(invalid_json, read_queue) | |||||
| # Should have an exception in the queue | |||||
| error = read_queue.get(timeout=1.0) | |||||
| assert isinstance(error, Exception) | |||||
| def test_sse_client_queue_cleanup(): | |||||
| """Test that SSE client properly cleans up queues on exit.""" | |||||
| test_url = "http://test.example/sse" | |||||
| read_queue = None | |||||
| write_queue = None | |||||
| with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect: | |||||
| # Mock connection that raises an exception | |||||
| mock_sse_connect.side_effect = Exception("Connection failed") | |||||
| try: | |||||
| with sse_client(test_url) as (rq, wq): | |||||
| read_queue = rq | |||||
| write_queue = wq | |||||
| except Exception: | |||||
| pass # Expected to fail | |||||
| # Queues should be cleaned up even on exception | |||||
| # Note: In real implementation, cleanup should put None to signal shutdown | |||||
| def test_sse_client_url_processing(): | |||||
| """Test SSE client URL processing functions.""" | |||||
| from core.mcp.client.sse_client import remove_request_params | |||||
| # Test URL with parameters | |||||
| url_with_params = "http://example.com/sse?param1=value1¶m2=value2" | |||||
| cleaned_url = remove_request_params(url_with_params) | |||||
| assert cleaned_url == "http://example.com/sse" | |||||
| # Test URL without parameters | |||||
| url_without_params = "http://example.com/sse" | |||||
| cleaned_url = remove_request_params(url_without_params) | |||||
| assert cleaned_url == "http://example.com/sse" | |||||
| # Test URL with path and parameters | |||||
| complex_url = "http://example.com/path/to/sse?session=123&token=abc" | |||||
| cleaned_url = remove_request_params(complex_url) | |||||
| assert cleaned_url == "http://example.com/path/to/sse" | |||||
| def test_sse_client_headers_propagation(): | |||||
| """Test that custom headers are properly propagated in SSE client.""" | |||||
| test_url = "http://test.example/sse" | |||||
| custom_headers = { | |||||
| "Authorization": "Bearer test-token", | |||||
| "X-Custom-Header": "test-value", | |||||
| "User-Agent": "test-client/1.0", | |||||
| } | |||||
| with patch("core.mcp.client.sse_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| with patch("core.mcp.client.sse_client.ssrf_proxy_sse_connect") as mock_sse_connect: | |||||
| # Mock the client factory to capture headers | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| # Mock the SSE connection | |||||
| mock_event_source = Mock() | |||||
| mock_event_source.response.raise_for_status.return_value = None | |||||
| mock_event_source.iter_sse.return_value = [] | |||||
| mock_sse_connect.return_value.__enter__.return_value = mock_event_source | |||||
| try: | |||||
| with sse_client(test_url, headers=custom_headers): | |||||
| pass | |||||
| except Exception: | |||||
| pass # Expected due to mocking | |||||
| # Verify headers were passed to client factory | |||||
| mock_client_factory.assert_called_with(headers=custom_headers) | |||||
| def test_sse_client_concurrent_access(): | |||||
| """Test SSE client behavior with concurrent queue access.""" | |||||
| test_read_queue: queue.Queue = queue.Queue() | |||||
| # Simulate concurrent producers and consumers | |||||
| def producer(): | |||||
| for i in range(10): | |||||
| test_read_queue.put(f"message_{i}") | |||||
| time.sleep(0.01) # Small delay to simulate real conditions | |||||
| def consumer(): | |||||
| received = [] | |||||
| for _ in range(10): | |||||
| try: | |||||
| msg = test_read_queue.get(timeout=2.0) | |||||
| received.append(msg) | |||||
| except queue.Empty: | |||||
| break | |||||
| return received | |||||
| # Start producer in separate thread | |||||
| producer_thread = threading.Thread(target=producer, daemon=True) | |||||
| producer_thread.start() | |||||
| # Consume messages | |||||
| received_messages = consumer() | |||||
| # Wait for producer to finish | |||||
| producer_thread.join(timeout=5.0) | |||||
| # Verify all messages were received | |||||
| assert len(received_messages) == 10 | |||||
| for i in range(10): | |||||
| assert f"message_{i}" in received_messages | 
| """ | |||||
| Tests for the StreamableHTTP client transport. | |||||
| Contains tests for only the client side of the StreamableHTTP transport. | |||||
| """ | |||||
| import queue | |||||
| import threading | |||||
| import time | |||||
| from typing import Any | |||||
| from unittest.mock import Mock, patch | |||||
| from core.mcp import types | |||||
| from core.mcp.client.streamable_client import streamablehttp_client | |||||
| # Test constants | |||||
| SERVER_NAME = "test_streamable_http_server" | |||||
| TEST_SESSION_ID = "test-session-id-12345" | |||||
| INIT_REQUEST = { | |||||
| "jsonrpc": "2.0", | |||||
| "method": "initialize", | |||||
| "params": { | |||||
| "clientInfo": {"name": "test-client", "version": "1.0"}, | |||||
| "protocolVersion": "2025-03-26", | |||||
| "capabilities": {}, | |||||
| }, | |||||
| "id": "init-1", | |||||
| } | |||||
| class MockStreamableHTTPClient: | |||||
| """Mock StreamableHTTP client for testing.""" | |||||
| def __init__(self, url: str, headers: dict[str, Any] | None = None): | |||||
| self.url = url | |||||
| self.headers = headers or {} | |||||
| self.connected = False | |||||
| self.read_queue: queue.Queue = queue.Queue() | |||||
| self.write_queue: queue.Queue = queue.Queue() | |||||
| self.session_id = TEST_SESSION_ID | |||||
| def connect(self): | |||||
| """Simulate connection establishment.""" | |||||
| self.connected = True | |||||
| return self.read_queue, self.write_queue, lambda: self.session_id | |||||
| def send_initialize_response(self): | |||||
| """Send a mock initialize response.""" | |||||
| session_message = types.SessionMessage( | |||||
| message=types.JSONRPCMessage( | |||||
| root=types.JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id="init-1", | |||||
| result={ | |||||
| "protocolVersion": types.LATEST_PROTOCOL_VERSION, | |||||
| "capabilities": { | |||||
| "logging": None, | |||||
| "resources": None, | |||||
| "tools": None, | |||||
| "experimental": None, | |||||
| "prompts": None, | |||||
| }, | |||||
| "serverInfo": {"name": SERVER_NAME, "version": "0.1.0"}, | |||||
| "instructions": "Test server instructions.", | |||||
| }, | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| self.read_queue.put(session_message) | |||||
| def send_tools_response(self): | |||||
| """Send a mock tools list response.""" | |||||
| session_message = types.SessionMessage( | |||||
| message=types.JSONRPCMessage( | |||||
| root=types.JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id="tools-1", | |||||
| result={ | |||||
| "tools": [ | |||||
| { | |||||
| "name": "test_tool", | |||||
| "description": "A test tool", | |||||
| "inputSchema": {"type": "object", "properties": {}}, | |||||
| } | |||||
| ], | |||||
| }, | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| self.read_queue.put(session_message) | |||||
| def test_streamablehttp_client_message_id_handling(): | |||||
| """Test StreamableHTTP client properly handles message ID coercion.""" | |||||
| mock_client = MockStreamableHTTPClient("http://test.example/mcp") | |||||
| read_queue, write_queue, get_session_id = mock_client.connect() | |||||
| # Send a message with string ID that should be coerced to int | |||||
| response_message = types.SessionMessage( | |||||
| message=types.JSONRPCMessage(root=types.JSONRPCResponse(jsonrpc="2.0", id="789", result={"test": "data"})) | |||||
| ) | |||||
| read_queue.put(response_message) | |||||
| # Get the message from queue | |||||
| message = read_queue.get(timeout=1.0) | |||||
| assert message is not None | |||||
| assert isinstance(message, types.SessionMessage) | |||||
| # Check that the ID was properly handled | |||||
| assert isinstance(message.message.root, types.JSONRPCResponse) | |||||
| assert message.message.root.id == 789 # ID should be coerced to int due to union_mode="left_to_right" | |||||
| def test_streamablehttp_client_connection_validation(): | |||||
| """Test StreamableHTTP client validates connections properly.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| # Mock the HTTP client | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| # Mock successful response | |||||
| mock_response = Mock() | |||||
| mock_response.status_code = 200 | |||||
| mock_response.headers = {"content-type": "application/json"} | |||||
| mock_response.raise_for_status.return_value = None | |||||
| mock_client.post.return_value = mock_response | |||||
| # Test connection | |||||
| try: | |||||
| with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id): | |||||
| assert read_queue is not None | |||||
| assert write_queue is not None | |||||
| assert get_session_id is not None | |||||
| except Exception: | |||||
| # Connection might fail due to mocking, but we're testing the validation logic | |||||
| pass | |||||
| def test_streamablehttp_client_timeout_configuration(): | |||||
| """Test StreamableHTTP client timeout configuration.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| custom_headers = {"Authorization": "Bearer test-token"} | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| # Mock successful connection | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| mock_response = Mock() | |||||
| mock_response.status_code = 200 | |||||
| mock_response.headers = {"content-type": "application/json"} | |||||
| mock_response.raise_for_status.return_value = None | |||||
| mock_client.post.return_value = mock_response | |||||
| try: | |||||
| with streamablehttp_client(test_url, headers=custom_headers) as (read_queue, write_queue, get_session_id): | |||||
| # Verify the configuration was passed correctly | |||||
| mock_client_factory.assert_called_with(headers=custom_headers) | |||||
| except Exception: | |||||
| # Connection might fail due to mocking, but we tested the configuration | |||||
| pass | |||||
| def test_streamablehttp_client_session_id_handling(): | |||||
| """Test StreamableHTTP client properly handles session IDs.""" | |||||
| mock_client = MockStreamableHTTPClient("http://test.example/mcp") | |||||
| read_queue, write_queue, get_session_id = mock_client.connect() | |||||
| # Test that session ID is available | |||||
| session_id = get_session_id() | |||||
| assert session_id == TEST_SESSION_ID | |||||
| # Test that we can use the session ID in subsequent requests | |||||
| assert session_id is not None | |||||
| assert len(session_id) > 0 | |||||
| def test_streamablehttp_client_message_parsing(): | |||||
| """Test StreamableHTTP client properly parses different message types.""" | |||||
| mock_client = MockStreamableHTTPClient("http://test.example/mcp") | |||||
| read_queue, write_queue, get_session_id = mock_client.connect() | |||||
| # Test valid initialization response | |||||
| mock_client.send_initialize_response() | |||||
| # Should have a SessionMessage in the queue | |||||
| message = read_queue.get(timeout=1.0) | |||||
| assert message is not None | |||||
| assert isinstance(message, types.SessionMessage) | |||||
| assert isinstance(message.message.root, types.JSONRPCResponse) | |||||
| # Test tools response | |||||
| mock_client.send_tools_response() | |||||
| tools_message = read_queue.get(timeout=1.0) | |||||
| assert tools_message is not None | |||||
| assert isinstance(tools_message, types.SessionMessage) | |||||
| def test_streamablehttp_client_queue_cleanup(): | |||||
| """Test that StreamableHTTP client properly cleans up queues on exit.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| read_queue = None | |||||
| write_queue = None | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| # Mock connection that raises an exception | |||||
| mock_client_factory.side_effect = Exception("Connection failed") | |||||
| try: | |||||
| with streamablehttp_client(test_url) as (rq, wq, get_session_id): | |||||
| read_queue = rq | |||||
| write_queue = wq | |||||
| except Exception: | |||||
| pass # Expected to fail | |||||
| # Queues should be cleaned up even on exception | |||||
| # Note: In real implementation, cleanup should put None to signal shutdown | |||||
| def test_streamablehttp_client_headers_propagation(): | |||||
| """Test that custom headers are properly propagated in StreamableHTTP client.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| custom_headers = { | |||||
| "Authorization": "Bearer test-token", | |||||
| "X-Custom-Header": "test-value", | |||||
| "User-Agent": "test-client/1.0", | |||||
| } | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| # Mock the client factory to capture headers | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| mock_response = Mock() | |||||
| mock_response.status_code = 200 | |||||
| mock_response.headers = {"content-type": "application/json"} | |||||
| mock_response.raise_for_status.return_value = None | |||||
| mock_client.post.return_value = mock_response | |||||
| try: | |||||
| with streamablehttp_client(test_url, headers=custom_headers): | |||||
| pass | |||||
| except Exception: | |||||
| pass # Expected due to mocking | |||||
| # Verify headers were passed to client factory | |||||
| # Check that the call was made with headers that include our custom headers | |||||
| mock_client_factory.assert_called_once() | |||||
| call_args = mock_client_factory.call_args | |||||
| assert "headers" in call_args.kwargs | |||||
| passed_headers = call_args.kwargs["headers"] | |||||
| # Verify all custom headers are present | |||||
| for key, value in custom_headers.items(): | |||||
| assert key in passed_headers | |||||
| assert passed_headers[key] == value | |||||
| def test_streamablehttp_client_concurrent_access(): | |||||
| """Test StreamableHTTP client behavior with concurrent queue access.""" | |||||
| test_read_queue: queue.Queue = queue.Queue() | |||||
| test_write_queue: queue.Queue = queue.Queue() | |||||
| # Simulate concurrent producers and consumers | |||||
| def producer(): | |||||
| for i in range(10): | |||||
| test_read_queue.put(f"message_{i}") | |||||
| time.sleep(0.01) # Small delay to simulate real conditions | |||||
| def consumer(): | |||||
| received = [] | |||||
| for _ in range(10): | |||||
| try: | |||||
| msg = test_read_queue.get(timeout=2.0) | |||||
| received.append(msg) | |||||
| except queue.Empty: | |||||
| break | |||||
| return received | |||||
| # Start producer in separate thread | |||||
| producer_thread = threading.Thread(target=producer, daemon=True) | |||||
| producer_thread.start() | |||||
| # Consume messages | |||||
| received_messages = consumer() | |||||
| # Wait for producer to finish | |||||
| producer_thread.join(timeout=5.0) | |||||
| # Verify all messages were received | |||||
| assert len(received_messages) == 10 | |||||
| for i in range(10): | |||||
| assert f"message_{i}" in received_messages | |||||
| def test_streamablehttp_client_json_vs_sse_mode(): | |||||
| """Test StreamableHTTP client handling of JSON vs SSE response modes.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| # Mock JSON response | |||||
| mock_json_response = Mock() | |||||
| mock_json_response.status_code = 200 | |||||
| mock_json_response.headers = {"content-type": "application/json"} | |||||
| mock_json_response.json.return_value = {"result": "json_mode"} | |||||
| mock_json_response.raise_for_status.return_value = None | |||||
| # Mock SSE response | |||||
| mock_sse_response = Mock() | |||||
| mock_sse_response.status_code = 200 | |||||
| mock_sse_response.headers = {"content-type": "text/event-stream"} | |||||
| mock_sse_response.raise_for_status.return_value = None | |||||
| # Test JSON mode | |||||
| mock_client.post.return_value = mock_json_response | |||||
| try: | |||||
| with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id): | |||||
| # Should handle JSON responses | |||||
| assert read_queue is not None | |||||
| assert write_queue is not None | |||||
| except Exception: | |||||
| pass # Expected due to mocking | |||||
| # Test SSE mode | |||||
| mock_client.post.return_value = mock_sse_response | |||||
| try: | |||||
| with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id): | |||||
| # Should handle SSE responses | |||||
| assert read_queue is not None | |||||
| assert write_queue is not None | |||||
| except Exception: | |||||
| pass # Expected due to mocking | |||||
| def test_streamablehttp_client_terminate_on_close(): | |||||
| """Test StreamableHTTP client terminate_on_close parameter.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| mock_response = Mock() | |||||
| mock_response.status_code = 200 | |||||
| mock_response.headers = {"content-type": "application/json"} | |||||
| mock_response.raise_for_status.return_value = None | |||||
| mock_client.post.return_value = mock_response | |||||
| mock_client.delete.return_value = mock_response | |||||
| # Test with terminate_on_close=True (default) | |||||
| try: | |||||
| with streamablehttp_client(test_url, terminate_on_close=True) as (read_queue, write_queue, get_session_id): | |||||
| pass | |||||
| except Exception: | |||||
| pass # Expected due to mocking | |||||
| # Test with terminate_on_close=False | |||||
| try: | |||||
| with streamablehttp_client(test_url, terminate_on_close=False) as (read_queue, write_queue, get_session_id): | |||||
| pass | |||||
| except Exception: | |||||
| pass # Expected due to mocking | |||||
| def test_streamablehttp_client_protocol_version_handling(): | |||||
| """Test StreamableHTTP client protocol version handling.""" | |||||
| mock_client = MockStreamableHTTPClient("http://test.example/mcp") | |||||
| read_queue, write_queue, get_session_id = mock_client.connect() | |||||
| # Send initialize response with specific protocol version | |||||
| session_message = types.SessionMessage( | |||||
| message=types.JSONRPCMessage( | |||||
| root=types.JSONRPCResponse( | |||||
| jsonrpc="2.0", | |||||
| id="init-1", | |||||
| result={ | |||||
| "protocolVersion": "2024-11-05", | |||||
| "capabilities": {}, | |||||
| "serverInfo": {"name": SERVER_NAME, "version": "0.1.0"}, | |||||
| }, | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| read_queue.put(session_message) | |||||
| # Get the message and verify protocol version | |||||
| message = read_queue.get(timeout=1.0) | |||||
| assert message is not None | |||||
| assert isinstance(message.message.root, types.JSONRPCResponse) | |||||
| result = message.message.root.result | |||||
| assert result["protocolVersion"] == "2024-11-05" | |||||
| def test_streamablehttp_client_error_response_handling(): | |||||
| """Test StreamableHTTP client handling of error responses.""" | |||||
| mock_client = MockStreamableHTTPClient("http://test.example/mcp") | |||||
| read_queue, write_queue, get_session_id = mock_client.connect() | |||||
| # Send an error response | |||||
| session_message = types.SessionMessage( | |||||
| message=types.JSONRPCMessage( | |||||
| root=types.JSONRPCError( | |||||
| jsonrpc="2.0", | |||||
| id="test-1", | |||||
| error=types.ErrorData(code=-32601, message="Method not found", data=None), | |||||
| ) | |||||
| ) | |||||
| ) | |||||
| read_queue.put(session_message) | |||||
| # Get the error message | |||||
| message = read_queue.get(timeout=1.0) | |||||
| assert message is not None | |||||
| assert isinstance(message.message.root, types.JSONRPCError) | |||||
| assert message.message.root.error.code == -32601 | |||||
| assert message.message.root.error.message == "Method not found" | |||||
| def test_streamablehttp_client_resumption_token_handling(): | |||||
| """Test StreamableHTTP client resumption token functionality.""" | |||||
| test_url = "http://test.example/mcp" | |||||
| test_resumption_token = "resume-token-123" | |||||
| with patch("core.mcp.client.streamable_client.create_ssrf_proxy_mcp_http_client") as mock_client_factory: | |||||
| mock_client = Mock() | |||||
| mock_client_factory.return_value.__enter__.return_value = mock_client | |||||
| mock_response = Mock() | |||||
| mock_response.status_code = 200 | |||||
| mock_response.headers = {"content-type": "application/json", "last-event-id": test_resumption_token} | |||||
| mock_response.raise_for_status.return_value = None | |||||
| mock_client.post.return_value = mock_response | |||||
| try: | |||||
| with streamablehttp_client(test_url) as (read_queue, write_queue, get_session_id): | |||||
| # Test that resumption token can be captured from headers | |||||
| assert read_queue is not None | |||||
| assert write_queue is not None | |||||
| except Exception: | |||||
| pass # Expected due to mocking | 
| [[package]] | [[package]] | ||||
| name = "aiosignal" | name = "aiosignal" | ||||
| version = "1.3.2" | |||||
| version = "1.4.0" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "frozenlist" }, | { name = "frozenlist" }, | ||||
| { name = "typing-extensions" }, | |||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424, upload-time = "2024-12-13T17:10:40.86Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597, upload-time = "2024-12-13T17:10:38.469Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| name = "alembic" | name = "alembic" | ||||
| version = "1.16.2" | |||||
| version = "1.16.3" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "mako" }, | { name = "mako" }, | ||||
| { name = "sqlalchemy" }, | { name = "sqlalchemy" }, | ||||
| { name = "typing-extensions" }, | { name = "typing-extensions" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/9c/35/116797ff14635e496bbda0c168987f5326a6555b09312e9b817e360d1f56/alembic-1.16.2.tar.gz", hash = "sha256:e53c38ff88dadb92eb22f8b150708367db731d58ad7e9d417c9168ab516cbed8", size = 1963563, upload-time = "2025-06-16T18:05:08.566Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/b9/40/28683414cc8711035a65256ca689e159471aa9ef08e8741ad1605bc01066/alembic-1.16.3.tar.gz", hash = "sha256:18ad13c1f40a5796deee4b2346d1a9c382f44b8af98053897484fa6cf88025e4", size = 1967462, upload-time = "2025-07-08T18:57:50.991Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/dd/e2/88e425adac5ad887a087c38d04fe2030010572a3e0e627f8a6e8c33eeda8/alembic-1.16.2-py3-none-any.whl", hash = "sha256:5f42e9bd0afdbd1d5e3ad856c01754530367debdebf21ed6894e34af52b3bb03", size = 242717, upload-time = "2025-06-16T18:05:10.27Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/e6/68/1dea77887af7304528ea944c355d769a7ccc4599d3a23bd39182486deb42/alembic-1.16.3-py3-none-any.whl", hash = "sha256:70a7c7829b792de52d08ca0e3aefaf060687cb8ed6bebfa557e597a1a5e5a481", size = 246933, upload-time = "2025-07-08T18:57:52.793Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "alibabacloud-tea-openapi" | name = "alibabacloud-tea-openapi" | ||||
| version = "0.3.15" | |||||
| version = "0.3.16" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "alibabacloud-credentials" }, | { name = "alibabacloud-credentials" }, | ||||
| { name = "alibabacloud-tea-util" }, | { name = "alibabacloud-tea-util" }, | ||||
| { name = "alibabacloud-tea-xml" }, | { name = "alibabacloud-tea-xml" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/be/cb/f1b10b1da37e4c0de2aa9ca1e7153a6960a7f2dc496664e85fdc8b621f84/alibabacloud_tea_openapi-0.3.15.tar.gz", hash = "sha256:56a0aa6d51d8cf18c0cf3d219d861f4697f59d3e17fa6726b1101826d93988a2", size = 13021, upload-time = "2025-05-06T12:56:29.402Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087, upload-time = "2025-07-04T09:30:10.689Z" } | |||||
| [[package]] | [[package]] | ||||
| name = "alibabacloud-tea-util" | name = "alibabacloud-tea-util" | ||||
| [[package]] | [[package]] | ||||
| name = "asgiref" | name = "asgiref" | ||||
| version = "3.8.1" | |||||
| version = "3.9.1" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/29/38/b3395cc9ad1b56d2ddac9970bc8f4141312dbaec28bc7c218b0dfafd0f42/asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590", size = 35186, upload-time = "2024-03-22T14:39:36.863Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/39/e3/893e8757be2612e6c266d9bb58ad2e3651524b5b40cf56761e985a28b13e/asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47", size = 23828, upload-time = "2024-03-22T14:39:34.521Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "boto3-stubs" | name = "boto3-stubs" | ||||
| version = "1.39.2" | |||||
| version = "1.39.3" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "botocore-stubs" }, | { name = "botocore-stubs" }, | ||||
| { name = "types-s3transfer" }, | { name = "types-s3transfer" }, | ||||
| { name = "typing-extensions", marker = "python_full_version < '3.12'" }, | { name = "typing-extensions", marker = "python_full_version < '3.12'" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/06/09/206a17938bfc7ec6e7c0b13ed58ad78146e46c29436d324ed55ceb5136ed/boto3_stubs-1.39.2.tar.gz", hash = "sha256:b1f1baef1658bd575a29ca85cc0877dbb3adeb376ffa8cbf242b876719ae0f95", size = 99939, upload-time = "2025-07-02T19:28:20.423Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/f0/ea/85b9940d6eedc04d0c6febf24d27311b6ee54f85ccc37192eb4db0dff5d6/boto3_stubs-1.39.3.tar.gz", hash = "sha256:9aad443b1d690951fd9ccb6fa20ad387bd0b1054c704566ff65dd0043a63fc26", size = 99947, upload-time = "2025-07-03T19:28:15.602Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/39/be/9c65f2bfc6df27ec5f16d28c454e2e3cb9a7af3ef8588440658334325a85/boto3_stubs-1.39.2-py3-none-any.whl", hash = "sha256:ce98d96fe1a7177b05067be3cd933277c88f745de836752f9ef8b4286dbfa53b", size = 69196, upload-time = "2025-07-02T19:28:07.025Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/be/b8/0c56297e5f290de17e838c7e4ff338f5b94351c6566aed70ee197a671dc5/boto3_stubs-1.39.3-py3-none-any.whl", hash = "sha256:4daddb19374efa6d1bef7aded9cede0075f380722a9e60ab129ebba14ae66b69", size = 69196, upload-time = "2025-07-03T19:28:09.4Z" }, | |||||
| ] | ] | ||||
| [package.optional-dependencies] | [package.optional-dependencies] | ||||
| { name = "googleapis-common-protos" }, | { name = "googleapis-common-protos" }, | ||||
| { name = "gunicorn" }, | { name = "gunicorn" }, | ||||
| { name = "httpx", extra = ["socks"] }, | { name = "httpx", extra = ["socks"] }, | ||||
| { name = "httpx-sse" }, | |||||
| { name = "jieba" }, | { name = "jieba" }, | ||||
| { name = "json-repair" }, | { name = "json-repair" }, | ||||
| { name = "langfuse" }, | { name = "langfuse" }, | ||||
| { name = "sendgrid" }, | { name = "sendgrid" }, | ||||
| { name = "sentry-sdk", extra = ["flask"] }, | { name = "sentry-sdk", extra = ["flask"] }, | ||||
| { name = "sqlalchemy" }, | { name = "sqlalchemy" }, | ||||
| { name = "sseclient-py" }, | |||||
| { name = "starlette" }, | { name = "starlette" }, | ||||
| { name = "tiktoken" }, | { name = "tiktoken" }, | ||||
| { name = "transformers" }, | { name = "transformers" }, | ||||
| { name = "googleapis-common-protos", specifier = "==1.63.0" }, | { name = "googleapis-common-protos", specifier = "==1.63.0" }, | ||||
| { name = "gunicorn", specifier = "~=23.0.0" }, | { name = "gunicorn", specifier = "~=23.0.0" }, | ||||
| { name = "httpx", extras = ["socks"], specifier = "~=0.27.0" }, | { name = "httpx", extras = ["socks"], specifier = "~=0.27.0" }, | ||||
| { name = "httpx-sse", specifier = ">=0.4.0" }, | |||||
| { name = "jieba", specifier = "==0.42.1" }, | { name = "jieba", specifier = "==0.42.1" }, | ||||
| { name = "json-repair", specifier = ">=0.41.1" }, | { name = "json-repair", specifier = ">=0.41.1" }, | ||||
| { name = "langfuse", specifier = "~=2.51.3" }, | { name = "langfuse", specifier = "~=2.51.3" }, | ||||
| { name = "sendgrid", specifier = "~=6.12.3" }, | { name = "sendgrid", specifier = "~=6.12.3" }, | ||||
| { name = "sentry-sdk", extras = ["flask"], specifier = "~=2.28.0" }, | { name = "sentry-sdk", extras = ["flask"], specifier = "~=2.28.0" }, | ||||
| { name = "sqlalchemy", specifier = "~=2.0.29" }, | { name = "sqlalchemy", specifier = "~=2.0.29" }, | ||||
| { name = "sseclient-py", specifier = ">=1.8.0" }, | |||||
| { name = "starlette", specifier = "==0.41.0" }, | { name = "starlette", specifier = "==0.41.0" }, | ||||
| { name = "tiktoken", specifier = "~=0.9.0" }, | { name = "tiktoken", specifier = "~=0.9.0" }, | ||||
| { name = "transformers", specifier = "~=4.51.0" }, | { name = "transformers", specifier = "~=4.51.0" }, | ||||
| [[package]] | [[package]] | ||||
| name = "fastapi" | name = "fastapi" | ||||
| version = "0.115.14" | |||||
| version = "0.116.0" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "pydantic" }, | { name = "pydantic" }, | ||||
| { name = "starlette" }, | { name = "starlette" }, | ||||
| { name = "typing-extensions" }, | { name = "typing-extensions" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/ca/53/8c38a874844a8b0fa10dd8adf3836ac154082cf88d3f22b544e9ceea0a15/fastapi-0.115.14.tar.gz", hash = "sha256:b1de15cdc1c499a4da47914db35d0e4ef8f1ce62b624e94e0e5824421df99739", size = 296263, upload-time = "2025-06-26T15:29:08.21Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/20/38/e1da78736143fd885c36213a3ccc493c384ae8fea6a0f0bc272ef42ebea8/fastapi-0.116.0.tar.gz", hash = "sha256:80dc0794627af0390353a6d1171618276616310d37d24faba6648398e57d687a", size = 296518, upload-time = "2025-07-07T15:09:27.82Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/53/50/b1222562c6d270fea83e9c9075b8e8600b8479150a18e4516a6138b980d1/fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca", size = 95514, upload-time = "2025-06-26T15:29:06.49Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/2f/68/d80347fe2360445b5f58cf290e588a4729746e7501080947e6cdae114b1f/fastapi-0.116.0-py3-none-any.whl", hash = "sha256:fdcc9ed272eaef038952923bef2b735c02372402d1203ee1210af4eea7a78d2b", size = 95625, upload-time = "2025-07-07T15:09:26.348Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| { name = "socksio" }, | { name = "socksio" }, | ||||
| ] | ] | ||||
| [[package]] | |||||
| name = "httpx-sse" | |||||
| version = "0.4.1" | |||||
| source = { registry = "https://pypi.org/simple" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } | |||||
| wheels = [ | |||||
| { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, | |||||
| ] | |||||
| [[package]] | [[package]] | ||||
| name = "huggingface-hub" | name = "huggingface-hub" | ||||
| version = "0.33.2" | version = "0.33.2" | ||||
| [[package]] | [[package]] | ||||
| name = "hypothesis" | name = "hypothesis" | ||||
| version = "6.135.24" | |||||
| version = "6.135.26" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "attrs" }, | { name = "attrs" }, | ||||
| { name = "sortedcontainers" }, | { name = "sortedcontainers" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/cf/ae/f846b67ce9fc80cf51cece6b7adaa3fe2de4251242d142e241ce5d4aa26f/hypothesis-6.135.24.tar.gz", hash = "sha256:e301aeb2691ec0a1f62bfc405eaa966055d603e328cd854c1ed59e1728e35ab6", size = 454011, upload-time = "2025-07-03T02:46:51.776Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/da/83/15c4e30561a0d8c8d076c88cb159187823d877118f34c851ada3b9b02a7b/hypothesis-6.135.26.tar.gz", hash = "sha256:73af0e46cd5039c6806f514fed6a3c185d91ef88b5a1577477099ddbd1a2e300", size = 454523, upload-time = "2025-07-05T04:59:45.443Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/ed/cb/c38acf27826a96712302229622f32dd356b9c4fbe52a3e9f615706027af8/hypothesis-6.135.24-py3-none-any.whl", hash = "sha256:88ed21fbfa481ca9851a9080841b3caca14cd4ed51a165dfae8006325775ee72", size = 520920, upload-time = "2025-07-03T02:46:48.286Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/3c/78/db4fdc464219455f8dde90074660c3faf8429101b2d1299cac7d219e3176/hypothesis-6.135.26-py3-none-any.whl", hash = "sha256:fa237cbe2ae2c31d65f7230dcb866139ace635dcfec6c30dddf25974dd8ff4b9", size = 521517, upload-time = "2025-07-05T04:59:42.061Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| { url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289, upload-time = "2025-06-28T18:47:25.602Z" }, | { url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289, upload-time = "2025-06-28T18:47:25.602Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310, upload-time = "2025-06-28T18:47:28.136Z" }, | { url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310, upload-time = "2025-06-28T18:47:28.136Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457, upload-time = "2025-06-26T16:26:15.068Z" }, | { url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457, upload-time = "2025-06-26T16:26:15.068Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/99/b6/3a7971aa05b7be7dfebc7ab57262ec527775c2c3c5b2f43675cac0458cad/lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991", size = 5657016, upload-time = "2025-07-03T19:19:06.008Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565, upload-time = "2025-06-26T16:26:17.906Z" }, | { url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565, upload-time = "2025-06-26T16:26:17.906Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390, upload-time = "2025-06-26T16:26:20.292Z" }, | { url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390, upload-time = "2025-06-26T16:26:20.292Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103, upload-time = "2025-06-26T16:26:22.765Z" }, | { url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103, upload-time = "2025-06-26T16:26:22.765Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428, upload-time = "2025-06-26T16:26:26.461Z" }, | { url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428, upload-time = "2025-06-26T16:26:26.461Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/29/31/c0267d03b16954a85ed6b065116b621d37f559553d9339c7dcc4943a76f1/lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16", size = 5678523, upload-time = "2025-07-03T19:19:09.837Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290, upload-time = "2025-06-26T16:26:29.406Z" }, | { url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290, upload-time = "2025-06-26T16:26:29.406Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495, upload-time = "2025-06-26T16:26:31.588Z" }, | { url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495, upload-time = "2025-06-26T16:26:31.588Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711, upload-time = "2025-06-26T16:26:33.723Z" }, | { url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711, upload-time = "2025-06-26T16:26:33.723Z" }, | ||||
| [[package]] | [[package]] | ||||
| name = "opik" | name = "opik" | ||||
| version = "1.7.41" | |||||
| version = "1.7.43" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "boto3-stubs", extra = ["bedrock-runtime"] }, | { name = "boto3-stubs", extra = ["bedrock-runtime"] }, | ||||
| { name = "tqdm" }, | { name = "tqdm" }, | ||||
| { name = "uuid6" }, | { name = "uuid6" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/82/81/6cddb705b3f416cfe4f0507916f51d0886087695f9dab49cfc6b00eb0266/opik-1.7.41.tar.gz", hash = "sha256:6ce2f72c7d23a62e2c13d419ce50754f6e17234825dcf26506e7def34dd38e26", size = 323333, upload-time = "2025-07-02T12:35:31.76Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886, upload-time = "2025-07-07T10:30:07.715Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/e9/46/ee27d06cc2049619806c992bdaa10e25b93d19ecedbc5c0fa772d8ac9a6d/opik-1.7.41-py3-none-any.whl", hash = "sha256:99df9c7b7b504777a51300b27a72bc646903201629611082b9b1f3c3adfbb3bf", size = 614890, upload-time = "2025-07-02T12:35:29.562Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356, upload-time = "2025-07-07T10:30:06.389Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, | { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, | { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, | { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, | { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, | { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, | { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, | { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, | { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, | { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, | { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, | { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, | { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, | { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, | { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, | { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, | { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, | ||||
| { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, | { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, | ||||
| [[package]] | [[package]] | ||||
| name = "posthog" | name = "posthog" | ||||
| version = "6.0.2" | |||||
| version = "6.0.3" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "backoff" }, | { name = "backoff" }, | ||||
| { name = "six" }, | { name = "six" }, | ||||
| { name = "typing-extensions" }, | { name = "typing-extensions" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/d9/10/37ea988b3ae73cbfd1f2d5e523cca31cecfcc40cbd0de6511f40462fdb78/posthog-6.0.2.tar.gz", hash = "sha256:94a28e65d7a2d1b2952e53a1b97fa4d6504b8d7e4c197c57f653621e55b549eb", size = 88141, upload-time = "2025-07-02T19:21:50.306Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/39/a2/1b68562124b0d0e615fa8431cc88c84b3db6526275c2c19a419579a49277/posthog-6.0.3.tar.gz", hash = "sha256:9005abb341af8fedd9d82ca0359b3d35a9537555cdc9881bfb469f7c0b4b0ec5", size = 91861, upload-time = "2025-07-07T07:14:08.21Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/85/2c/0c5dbbf9bc30401ae2a1b6b52b8abc19e4060cf28c3288ae9d962e65e3ad/posthog-6.0.2-py3-none-any.whl", hash = "sha256:756cc9adad9e42961454f8ac391b92a2f70ebb6607d29b0c568de08e5d8f1b18", size = 104946, upload-time = "2025-07-02T19:21:48.77Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/ca/f1/a8d86245d41c8686f7d828a4959bdf483e8ac331b249b48b8c61fc884a1c/posthog-6.0.3-py3-none-any.whl", hash = "sha256:4b808c907f3623216a9362d91fdafce8e2f57a8387fb3020475c62ec809be56d", size = 108978, upload-time = "2025-07-07T07:14:06.451Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "python-calamine" | name = "python-calamine" | ||||
| version = "0.3.2" | |||||
| version = "0.4.0" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "packaging" }, | { name = "packaging" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/6b/21/387b92059909e741af7837194d84250335d2a057f614752b6364aaaa2f56/python_calamine-0.3.2.tar.gz", hash = "sha256:5cf12f2086373047cdea681711857b672cba77a34a66dd3755d60686fc974e06", size = 117336, upload-time = "2025-04-02T10:06:23.14Z" } | |||||
| wheels = [ | |||||
| { url = "https://files.pythonhosted.org/packages/ef/b7/d59863ebe319150739d0c352c6dea2710a2f90254ed32304d52e8349edce/python_calamine-0.3.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5251746816069c38eafdd1e4eb7b83870e1fe0ff6191ce9a809b187ffba8ce93", size = 830854, upload-time = "2025-04-02T10:04:14.673Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/d3/01/b48c6f2c2e530a1a031199c5c5bf35f7c2cf7f16f3989263e616e3bc86ce/python_calamine-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9775dbc93bc635d48f45433f8869a546cca28c2a86512581a05333f97a18337b", size = 809411, upload-time = "2025-04-02T10:04:16.067Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/fe/6d/69c53ffb11b3ee1bf5bd945cc2514848adea492c879a50f38e2ed4424727/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ff4318b72ba78e8a04fb4c45342cfa23eab6f81ecdb85548cdab9f2db8ac9c7", size = 872905, upload-time = "2025-04-02T10:04:17.487Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/be/ec/b02c4bc04c426d153af1f5ff07e797dd81ada6f47c170e0207d07c90b53a/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cd8eb1ef8644da71788a33d3de602d1c08ff1c4136942d87e25f09580b512ef", size = 876464, upload-time = "2025-04-02T10:04:19.53Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/46/ef/8403ee595207de5bd277279b56384b31390987df8a61c280b4176802481a/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd560d8f88f39d23b829f666ebae4bd8daeec7ed57adfb9313543f3c5fa35", size = 942289, upload-time = "2025-04-02T10:04:20.902Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/89/97/b4e5b77c70b36613c10f2dbeece75b5d43727335a33bf5176792ec83c3fc/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5e79b9eae4b30c82d045f9952314137c7089c88274e1802947f9e3adb778a59", size = 978699, upload-time = "2025-04-02T10:04:22.263Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/5f/e9/03bbafd6b11cdf70c004f2e856978fc252ec5ea7e77529f14f969134c7a8/python_calamine-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce5e8cc518c8e3e5988c5c658f9dcd8229f5541ca63353175bb15b6ad8c456d0", size = 886008, upload-time = "2025-04-02T10:04:23.754Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/7b/20/e18f534e49b403ba0b979a4dfead146001d867f5be846b91f81ed5377972/python_calamine-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a0e596b1346c28b2de15c9f86186cceefa4accb8882992aa0b7499c593446ed", size = 925104, upload-time = "2025-04-02T10:04:25.255Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/54/4c/58933e69a0a7871487d10b958c1f83384bc430d53efbbfbf1dea141a0d85/python_calamine-0.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f521de16a9f3e951ec2e5e35d76752fe004088dbac4cdbf4dd62d0ad2bbf650f", size = 1050448, upload-time = "2025-04-02T10:04:26.649Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/83/95/5c96d093eaaa2d15c63b43bcf8c87708eaab8428c72b6ebdcafc2604aa47/python_calamine-0.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417d6825a36bba526ae17bed1b6ca576fbb54e23dc60c97eeb536c622e77c62f", size = 1056840, upload-time = "2025-04-02T10:04:28.18Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/23/e0/b03cc3ad4f40fd3be0ebac0b71d273864ddf2bf0e611ec309328fdedded9/python_calamine-0.3.2-cp311-cp311-win32.whl", hash = "sha256:cd3ea1ca768139753633f9f0b16997648db5919894579f363d71f914f85f7ade", size = 663268, upload-time = "2025-04-02T10:04:29.659Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/6b/bd/550da64770257fc70a185482f6353c0654a11f381227e146bb0170db040f/python_calamine-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:4560100412d8727c49048cca102eadeb004f91cfb9c99ae63cd7d4dc0a61333a", size = 692393, upload-time = "2025-04-02T10:04:31.534Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/be/2e/0b4b7a146c3bb41116fe8e59a2f616340786db12aed51c7a9e75817cfa03/python_calamine-0.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:a2526e6ba79087b1634f49064800339edb7316780dd7e1e86d10a0ca9de4e90f", size = 667312, upload-time = "2025-04-02T10:04:32.911Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/f2/0f/c2e3e3bae774dae47cba6ffa640ff95525bd6a10a13d3cd998f33aeafc7f/python_calamine-0.3.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7c063b1f783352d6c6792305b2b0123784882e2436b638a9b9a1e97f6d74fa51", size = 825179, upload-time = "2025-04-02T10:04:34.377Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/c7/81/a05285f06d71ea38ab99b09f3119f93f575487c9d24d7a1bab65657b258b/python_calamine-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85016728937e8f5d1810ff3c9603ffd2458d66e34d495202d7759fa8219871cd", size = 804036, upload-time = "2025-04-02T10:04:35.938Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/24/b5/320f366ffd91ee5d5f0f77817d4fb684f62a5a68e438dcdb90e4f5f35137/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81f243323bf712bb0b2baf0b938a2e6d6c9fa3b9902a44c0654474d04f999fac", size = 871527, upload-time = "2025-04-02T10:04:38.272Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/13/19/063afced19620b829697b90329c62ad73274cc38faaa91d9ee41047f5f8c/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b719dd2b10237b0cfb2062e3eaf199f220918a5623197e8449f37c8de845a7c", size = 875411, upload-time = "2025-04-02T10:04:39.647Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/d7/6a/c93c52414ec62cc51c4820aff434f03c4a1c69ced15cec3e4b93885e4012/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5158310b9140e8ee8665c9541a11030901e7275eb036988150c93f01c5133bf", size = 943525, upload-time = "2025-04-02T10:04:41.025Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/0a/0a/5bdecee03d235e8d111b1e8ee3ea0c0ed4ae43a402f75cebbe719930cf04/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2c1b248e8bf10194c449cb57e6ccb3f2fe3dc86975a6d746908cf2d37b048cc", size = 976332, upload-time = "2025-04-02T10:04:42.454Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/05/ad/43ff92366856ee34f958e9cf4f5b98e63b0dc219e06ccba4ad6f63463756/python_calamine-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a13ad8e5b6843a73933b8d1710bc4df39a9152cb57c11227ad51f47b5838a4", size = 885549, upload-time = "2025-04-02T10:04:43.869Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/ff/b9/76afb867e2bb4bfc296446b741cee01ae4ce6a094b43f4ed4eaed5189de4/python_calamine-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe950975a5758423c982ce1e2fdcb5c9c664d1a20b41ea21e619e5003bb4f96b", size = 926005, upload-time = "2025-04-02T10:04:45.884Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/23/cf/5252b237b0e70c263f86741aea02e8e57aedb2bce9898468be1d9d55b9da/python_calamine-0.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8707622ba816d6c26e36f1506ecda66a6a6cf43e55a43a8ef4c3bf8a805d3cfb", size = 1049380, upload-time = "2025-04-02T10:04:49.202Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/1a/4d/f151e8923e53457ca49ceeaa3a34cb23afee7d7b46e6546ab2a29adc9125/python_calamine-0.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e6eac46475c26e162a037f6711b663767f61f8fca3daffeb35aa3fc7ee6267cc", size = 1056720, upload-time = "2025-04-02T10:04:51.002Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/f5/cb/1b5db3e4a8bbaaaa7706b270570d4a65133618fa0ca7efafe5ce680f6cee/python_calamine-0.3.2-cp312-cp312-win32.whl", hash = "sha256:0dee82aedef3db27368a388d6741d69334c1d4d7a8087ddd33f1912166e17e37", size = 663502, upload-time = "2025-04-02T10:04:52.402Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/5a/53/920fa8e7b570647c08da0f1158d781db2e318918b06cb28fe0363c3398ac/python_calamine-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:ae09b779718809d31ca5d722464be2776b7d79278b1da56e159bbbe11880eecf", size = 692660, upload-time = "2025-04-02T10:04:53.721Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/a5/ea/5d0ecf5c345c4d78964a5f97e61848bc912965b276a54fb8ae698a9419a8/python_calamine-0.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:435546e401a5821fa70048b6c03a70db3b27d00037e2c4999c2126d8c40b51df", size = 666205, upload-time = "2025-04-02T10:04:56.377Z" }, | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/cc/03/269f96535705b2f18c8977fa58e76763b4e4727a9b3ae277a9468c8ffe05/python_calamine-0.4.0.tar.gz", hash = "sha256:94afcbae3fec36d2d7475095a59d4dc6fae45829968c743cb799ebae269d7bbf", size = 127737, upload-time = "2025-07-04T06:05:28.626Z" } | |||||
| wheels = [ | |||||
| { url = "https://files.pythonhosted.org/packages/d4/a5/bcd82326d0ff1ab5889e7a5e13c868b483fc56398e143aae8e93149ba43b/python_calamine-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d1687f8c4d7852920c7b4e398072f183f88dd273baf5153391edc88b7454b8c0", size = 833019, upload-time = "2025-07-04T06:03:32.214Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/f6/1a/a681f1d2f28164552e91ef47bcde6708098aa64a5f5fe3952f22362d340a/python_calamine-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:258d04230bebbbafa370a15838049d912d6a0a2c4da128943d8160ca4b6db58e", size = 812268, upload-time = "2025-07-04T06:03:33.855Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/3d/92/2fc911431733739d4e7a633cefa903fa49a6b7a61e8765bad29a4a7c47b1/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686e491634934f059553d55f77ac67ca4c235452d5b444f98fe79b3579f1ea5", size = 875733, upload-time = "2025-07-04T06:03:35.154Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/f4/f0/48bfae6802eb360028ca6c15e9edf42243aadd0006b6ac3e9edb41a57119/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4480af7babcc2f919c638a554b06b7b145d9ab3da47fd696d68c2fc6f67f9541", size = 878325, upload-time = "2025-07-04T06:03:36.638Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/a4/dc/f8c956e15bac9d5d1e05cd1b907ae780e40522d2fd103c8c6e2f21dff4ed/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e405b87a8cd1e90a994e570705898634f105442029f25bab7da658ee9cbaa771", size = 1015038, upload-time = "2025-07-04T06:03:37.971Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/54/3f/e69ab97c7734fb850fba2f506b775912fd59f04e17488582c8fbf52dbc72/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a831345ee42615f0dfcb0ed60a3b1601d2f946d4166edae64fd9a6f9bbd57fc1", size = 924969, upload-time = "2025-07-04T06:03:39.253Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/79/03/b4c056b468908d87a3de94389166e0f4dba725a70bc39e03bc039ba96f6b/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9951b8e4cafb3e1623bb5dfc31a18d38ef43589275f9657e99dfcbe4c8c4b33e", size = 888020, upload-time = "2025-07-04T06:03:41.099Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/86/4f/b9092f7c970894054083656953184e44cb2dadff8852425e950d4ca419af/python_calamine-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6619fe3b5c9633ed8b178684605f8076c9d8d85b29ade15f7a7713fcfdee2d0", size = 930337, upload-time = "2025-07-04T06:03:42.89Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/64/da/137239027bf253aabe7063450950085ec9abd827d0cbc5170f585f38f464/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cc45b8e76ee331f6ea88ca23677be0b7a05b502cd4423ba2c2bc8dad53af1be", size = 1054568, upload-time = "2025-07-04T06:03:44.153Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/80/96/74c38bcf6b6825d5180c0e147b85be8c52dbfba11848b1e98ba358e32a64/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b2cfb7ced1a7c80befa0cfddfe4aae65663eb4d63c4ae484b9b7a80ebe1b528", size = 1058317, upload-time = "2025-07-04T06:03:45.873Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/33/95/9d7b8fe8b32d99a6c79534df3132cfe40e9df4a0f5204048bf5e66ddbd93/python_calamine-0.4.0-cp311-cp311-win32.whl", hash = "sha256:04f4e32ee16814fc1fafc49300be8eeb280d94878461634768b51497e1444bd6", size = 663934, upload-time = "2025-07-04T06:03:47.407Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/7c/e3/1c6cd9fd499083bea6ff1c30033ee8215b9f64e862babf5be170cacae190/python_calamine-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:a8543f69afac2213c0257bb56215b03dadd11763064a9d6b19786f27d1bef586", size = 692535, upload-time = "2025-07-04T06:03:48.699Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/94/1c/3105d19fbab6b66874ce8831652caedd73b23b72e88ce18addf8ceca8c12/python_calamine-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:54622e35ec7c3b6f07d119da49aa821731c185e951918f152c2dbf3bec1e15d6", size = 671751, upload-time = "2025-07-04T06:03:49.979Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/63/60/f951513aaaa470b3a38a87d65eca45e0a02bc329b47864f5a17db563f746/python_calamine-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:74bca5d44a73acf3dcfa5370820797fcfd225c8c71abcddea987c5b4f5077e98", size = 826603, upload-time = "2025-07-04T06:03:51.245Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/76/3f/789955bbc77831c639890758f945eb2b25d6358065edf00da6751226cf31/python_calamine-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf80178f5d1b0ee2ccfffb8549c50855f6249e930664adc5807f4d0d6c2b269c", size = 805826, upload-time = "2025-07-04T06:03:52.482Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/00/4c/f87d17d996f647030a40bfd124fe45fe893c002bee35ae6aca9910a923ae/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65cfef345386ae86f7720f1be93495a40fd7e7feabb8caa1df5025d7fbc58a1f", size = 874989, upload-time = "2025-07-04T06:03:53.794Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/47/d2/3269367303f6c0488cf1bfebded3f9fe968d118a988222e04c9b2636bf2e/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f23e6214dbf9b29065a5dcfd6a6c674dd0e251407298c9138611c907d53423ff", size = 877504, upload-time = "2025-07-04T06:03:55.095Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/f9/6d/c7ac35f5c7125e8bd07eb36773f300fda20dd2da635eae78a8cebb0b6ab7/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d792d304ee232ab01598e1d3ab22e074a32c2511476b5fb4f16f4222d9c2a265", size = 1014171, upload-time = "2025-07-04T06:03:56.777Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/f0/81/5ea8792a2e9ab5e2a05872db3a4d3ed3538ad5af1861282c789e2f13a8cf/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf813425918fd68f3e991ef7c4b5015be0a1a95fc4a8ab7e73c016ef1b881bb4", size = 926737, upload-time = "2025-07-04T06:03:58.024Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/cc/6e/989e56e6f073fc0981a74ba7a393881eb351bb143e5486aa629b5e5d6a8b/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbe2a0ccb4d003635888eea83a995ff56b0748c8c76fc71923544f5a4a7d4cd7", size = 887032, upload-time = "2025-07-04T06:03:59.298Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/5d/92/2c9bd64277c6fe4be695d7d5a803b38d953ec8565037486be7506642c27c/python_calamine-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7b3bb5f0d910b9b03c240987560f843256626fd443279759df4e91b717826d2", size = 929700, upload-time = "2025-07-04T06:04:01.388Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/64/fa/fc758ca37701d354a6bc7d63118699f1c73788a1f2e1b44d720824992764/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bd2c0fc2b5eabd08ceac8a2935bffa88dbc6116db971aa8c3f244bad3fd0f644", size = 1053971, upload-time = "2025-07-04T06:04:02.704Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/65/52/40d7e08ae0ddba331cdc9f7fb3e92972f8f38d7afbd00228158ff6d1fceb/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:85b547cb1c5b692a0c2406678d666dbc1cec65a714046104683fe4f504a1721d", size = 1057057, upload-time = "2025-07-04T06:04:04.014Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/16/de/e8a071c0adfda73285d891898a24f6e99338328c404f497ff5b0e6bc3d45/python_calamine-0.4.0-cp312-cp312-win32.whl", hash = "sha256:4c2a1e3a0db4d6de4587999a21cc35845648c84fba81c03dd6f3072c690888e4", size = 665540, upload-time = "2025-07-04T06:04:05.679Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/5e/f2/7fdfada13f80db12356853cf08697ff4e38800a1809c2bdd26ee60962e7a/python_calamine-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b193c89ffcc146019475cd121c552b23348411e19c04dedf5c766a20db64399a", size = 695366, upload-time = "2025-07-04T06:04:06.977Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/20/66/d37412ad854480ce32f50d9f74f2a2f88b1b8a6fbc32f70aabf3211ae89e/python_calamine-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:43a0f15e0b60c75a71b21a012b911d5d6f5fa052afad2a8edbc728af43af0fcf", size = 670740, upload-time = "2025-07-04T06:04:08.656Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, | { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, | ||||
| ] | ] | ||||
| [[package]] | |||||
| name = "sseclient-py" | |||||
| version = "1.8.0" | |||||
| source = { registry = "https://pypi.org/simple" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/e8/ed/3df5ab8bb0c12f86c28d0cadb11ed1de44a92ed35ce7ff4fd5518a809325/sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8", size = 7791, upload-time = "2023-09-01T19:39:20.45Z" } | |||||
| wheels = [ | |||||
| { url = "https://files.pythonhosted.org/packages/49/58/97655efdfeb5b4eeab85b1fc5d3fa1023661246c2ab2a26ea8e47402d4f2/sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83", size = 8828, upload-time = "2023-09-01T19:39:17.627Z" }, | |||||
| ] | |||||
| [[package]] | [[package]] | ||||
| name = "starlette" | name = "starlette" | ||||
| version = "0.41.0" | version = "0.41.0" | ||||
| [[package]] | [[package]] | ||||
| name = "types-aiofiles" | name = "types-aiofiles" | ||||
| version = "24.1.0.20250606" | |||||
| version = "24.1.0.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/64/6e/fac4ffc896cb3faf2ac5d23747b65dd8bae1d9ee23305d1a3b12111c3989/types_aiofiles-24.1.0.20250606.tar.gz", hash = "sha256:48f9e26d2738a21e0b0f19381f713dcdb852a36727da8414b1ada145d40a18fe", size = 14364, upload-time = "2025-06-06T03:09:26.515Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/4a/d6/5c44761bc11cb5c7505013a39f397a9016bfb3a5c932032b2db16c38b87b/types_aiofiles-24.1.0.20250708.tar.gz", hash = "sha256:c8207ed7385491ce5ba94da02658164ebd66b69a44e892288c9f20cbbf5284ff", size = 14322, upload-time = "2025-07-08T03:14:44.814Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/71/de/f2fa2ab8a5943898e93d8036941e05bfd1e1f377a675ee52c7c307dccb75/types_aiofiles-24.1.0.20250606-py3-none-any.whl", hash = "sha256:e568c53fb9017c80897a9aa15c74bf43b7ee90e412286ec1e0912b6e79301aee", size = 14276, upload-time = "2025-06-06T03:09:25.662Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/44/e9/4e0cc79c630040aae0634ac9393341dc2aff1a5be454be9741cc6cc8989f/types_aiofiles-24.1.0.20250708-py3-none-any.whl", hash = "sha256:07f8f06465fd415d9293467d1c66cd074b2c3b62b679e26e353e560a8cf63720", size = 14320, upload-time = "2025-07-08T03:14:44.009Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "types-defusedxml" | name = "types-defusedxml" | ||||
| version = "0.7.0.20250516" | |||||
| version = "0.7.0.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/55/9d/3ba8b80536402f1a125bc5a44d82ab686aafa55a85f56160e076b2ac30de/types_defusedxml-0.7.0.20250516.tar.gz", hash = "sha256:164c2945077fa450f24ed09633f8b3a80694687fefbbc1cba5f24e4ba570666b", size = 10298, upload-time = "2025-05-16T03:08:18.951Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/b9/4b/79d046a7211e110afd885be04bb9423546df2a662ed28251512d60e51fb6/types_defusedxml-0.7.0.20250708.tar.gz", hash = "sha256:7b785780cc11c18a1af086308bf94bf53a0907943a1d145dbe00189bef323cb8", size = 10541, upload-time = "2025-07-08T03:14:33.325Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/2e/7b/567b0978150edccf7fa3aa8f2566ea9c3ffc9481ce7d64428166934d6d7f/types_defusedxml-0.7.0.20250516-py3-none-any.whl", hash = "sha256:00e793e5c385c3e142d7c2acc3b4ccea2fe0828cee11e35501f0ba40386630a0", size = 12576, upload-time = "2025-05-16T03:08:17.892Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/24/f8/870de7fbd5fee5643f05061db948df6bd574a05a42aee91e37ad47c999ef/types_defusedxml-0.7.0.20250708-py3-none-any.whl", hash = "sha256:cc426cbc31c61a0f1b1c2ad9b9ef9ef846645f28fd708cd7727a6353b5c52e54", size = 13478, upload-time = "2025-07-08T03:14:32.633Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "types-docutils" | name = "types-docutils" | ||||
| version = "0.21.0.20250604" | |||||
| version = "0.21.0.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/ef/d0/d28035370d669f14d4e23bd63d093207331f361afa24d2686d2c3fe6be8d/types_docutils-0.21.0.20250604.tar.gz", hash = "sha256:5a9cc7f5a4c5ef694aa0abc61111e0b1376a53dee90d65757f77f31acfcca8f2", size = 40953, upload-time = "2025-06-04T03:10:27.439Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/39/86/24394a71a04f416ca03df51863a3d3e2cd0542fdc40989188dca30ffb5bf/types_docutils-0.21.0.20250708.tar.gz", hash = "sha256:5625a82a9a2f26d8384545607c157e023a48ed60d940dfc738db125282864172", size = 42011, upload-time = "2025-07-08T03:14:24.214Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/89/91/887e9591c1ee50dfbf7c2fa2f3f51bc6db683013b6d2b0cd3983adf3d502/types_docutils-0.21.0.20250604-py3-none-any.whl", hash = "sha256:bfa8628176c06a80cdd1d6f3fb32e972e042db53538596488dfe0e9c5962b222", size = 65915, upload-time = "2025-06-04T03:10:26.067Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/bd/17/8c1153fc1576a0dcffdd157c69a12863c3f9485054256f6791ea17d95aed/types_docutils-0.21.0.20250708-py3-none-any.whl", hash = "sha256:166630d1aec18b9ca02547873210e04bf7674ba8f8da9cd9e6a5e77dc99372c2", size = 67953, upload-time = "2025-07-08T03:14:23.057Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "types-html5lib" | name = "types-html5lib" | ||||
| version = "1.1.11.20250516" | |||||
| version = "1.1.11.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/d0/ed/9f092ff479e2b5598941855f314a22953bb04b5fb38bcba3f880feb833ba/types_html5lib-1.1.11.20250516.tar.gz", hash = "sha256:65043a6718c97f7d52567cc0cdf41efbfc33b1f92c6c0c5e19f60a7ec69ae720", size = 16136, upload-time = "2025-05-16T03:07:12.231Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/d4/3b/1f5ba4358cfc1421cced5cdb9d2b08b4b99e4f9a41da88ce079f6d1a7bf1/types_html5lib-1.1.11.20250708.tar.gz", hash = "sha256:24321720fdbac71cee50d5a4bec9b7448495b7217974cffe3fcf1ede4eef7afe", size = 16799, upload-time = "2025-07-08T03:13:53.14Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/cc/3b/cb5b23c7b51bf48b8c9f175abb9dce2f1ecd2d2c25f92ea9f4e3720e9398/types_html5lib-1.1.11.20250516-py3-none-any.whl", hash = "sha256:5e407b14b1bd2b9b1107cbd1e2e19d4a0c46d60febd231c7ab7313d7405663c1", size = 21770, upload-time = "2025-05-16T03:07:11.102Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/a8/50/5fc23cf647eee23acdd337c8150861d39980cf11f33dd87f78e87d2a4bad/types_html5lib-1.1.11.20250708-py3-none-any.whl", hash = "sha256:bb898066b155de7081cb182179e2ded31b9e0e234605e2cb46536894e68a6954", size = 22913, upload-time = "2025-07-08T03:13:52.098Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "types-pymysql" | name = "types-pymysql" | ||||
| version = "1.1.0.20250516" | |||||
| version = "1.1.0.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/db/11/cdaa90b82cb25c5e04e75f0b0616872aa5775b001096779375084f8dbbcf/types_pymysql-1.1.0.20250516.tar.gz", hash = "sha256:fea4a9776101cf893dfc868f42ce10d2e46dcc498c792cc7c9c0fe00cb744234", size = 19640, upload-time = "2025-05-16T03:06:54.568Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/65/a3/db349a06c64b8c041c165fc470b81d37404ec342014625c7a6b7f7a4f680/types_pymysql-1.1.0.20250708.tar.gz", hash = "sha256:2cbd7cfcf9313eda784910578c4f1d06f8cc03a15cd30ce588aa92dd6255011d", size = 21715, upload-time = "2025-07-08T03:13:56.463Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/ab/64/129656e04ddda35d69faae914ce67cf60d83407ddd7afdef1e7c50bbb74a/types_pymysql-1.1.0.20250516-py3-none-any.whl", hash = "sha256:41c87a832e3ff503d5120cc6cebd64f6dcb3c407d9580a98b2cb3e3bcd109aa6", size = 20328, upload-time = "2025-05-16T03:06:53.681Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/88/e5/7f72c520f527175b6455e955426fd4f971128b4fa2f8ab2f505f254a1ddc/types_pymysql-1.1.0.20250708-py3-none-any.whl", hash = "sha256:9252966d2795945b2a7a53d5cdc49fe8e4e2f3dde4c104ed7fc782a83114e365", size = 22860, upload-time = "2025-07-08T03:13:55.367Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "types-python-dateutil" | name = "types-python-dateutil" | ||||
| version = "2.9.0.20250516" | |||||
| version = "2.9.0.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/ef/88/d65ed807393285204ab6e2801e5d11fbbea811adcaa979a2ed3b67a5ef41/types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5", size = 13943, upload-time = "2025-05-16T03:06:58.385Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/c9/95/6bdde7607da2e1e99ec1c1672a759d42f26644bbacf939916e086db34870/types_python_dateutil-2.9.0.20250708.tar.gz", hash = "sha256:ccdbd75dab2d6c9696c350579f34cffe2c281e4c5f27a585b2a2438dd1d5c8ab", size = 15834, upload-time = "2025-07-08T03:14:03.382Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/c5/3f/b0e8db149896005adc938a1e7f371d6d7e9eca4053a29b108978ed15e0c2/types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93", size = 14356, upload-time = "2025-05-16T03:06:57.249Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/72/52/43e70a8e57fefb172c22a21000b03ebcc15e47e97f5cb8495b9c2832efb4/types_python_dateutil-2.9.0.20250708-py3-none-any.whl", hash = "sha256:4d6d0cc1cc4d24a2dc3816024e502564094497b713f7befda4d5bc7a8e3fd21f", size = 17724, upload-time = "2025-07-08T03:14:02.593Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| name = "types-python-http-client" | name = "types-python-http-client" | ||||
| version = "3.3.7.20240910" | |||||
| version = "3.3.7.20250708" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/e1/d7/bb2754c2d1b20c1890593ec89799c99e8875b04f474197c41354f41e9d31/types-python-http-client-3.3.7.20240910.tar.gz", hash = "sha256:8a6ebd30ad4b90a329ace69c240291a6176388624693bc971a5ecaa7e9b05074", size = 2804, upload-time = "2024-09-10T02:38:31.608Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/55/a0/0ad93698a3ebc6846ca23aca20ff6f6f8ebe7b4f0c1de7f19e87c03dbe8f/types_python_http_client-3.3.7.20250708.tar.gz", hash = "sha256:5f85b32dc64671a4e5e016142169aa187c5abed0b196680944e4efd3d5ce3322", size = 7707, upload-time = "2025-07-08T03:14:36.197Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/64/95/8f492d37d99630e096acbb4071788483282a34a73ae89dd1a5727f4189cc/types_python_http_client-3.3.7.20240910-py3-none-any.whl", hash = "sha256:58941bd986fb8bb0f4f782ef376be145ece8023f391364fbcd22bd26b13a140e", size = 3917, upload-time = "2024-09-10T02:38:30.261Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/85/4f/b88274658cf489e35175be8571c970e9a1219713bafd8fc9e166d7351ecb/types_python_http_client-3.3.7.20250708-py3-none-any.whl", hash = "sha256:e2fc253859decab36713d82fc7f205868c3ddeaee79dbb55956ad9ca77abe12b", size = 8890, upload-time = "2025-07-08T03:14:35.506Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "typing-extensions" | name = "typing-extensions" | ||||
| version = "4.14.0" | |||||
| version = "4.14.1" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "unstructured-client" | name = "unstructured-client" | ||||
| version = "0.37.4" | |||||
| version = "0.38.1" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| dependencies = [ | dependencies = [ | ||||
| { name = "aiofiles" }, | { name = "aiofiles" }, | ||||
| { name = "pypdf" }, | { name = "pypdf" }, | ||||
| { name = "requests-toolbelt" }, | { name = "requests-toolbelt" }, | ||||
| ] | ] | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/6c/6f/8dd20dab879f25074d6abfbb98f77bb8efeea0ae1bdf9a414b3e73c152b6/unstructured_client-0.37.4.tar.gz", hash = "sha256:5a4029563c2f79de098374fd8a99090719df325b4bdcfa3a87820908f2c83e6c", size = 90481, upload-time = "2025-07-01T16:40:09.877Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/85/60/412092671bfc4952640739f2c0c9b2f4c8af26a3c921738fd12621b4ddd8/unstructured_client-0.38.1.tar.gz", hash = "sha256:43ab0670dd8ff53d71e74f9b6dfe490a84a5303dab80a4873e118a840c6d46ca", size = 91781, upload-time = "2025-07-03T15:46:35.054Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/93/09/4399b0c32564b1a19fef943b5acea5a16fa0c6aa7a320065ce726b8245c1/unstructured_client-0.37.4-py3-none-any.whl", hash = "sha256:31975c0ea4408e369e6aad11c9e746d1f3f14013ac5c89f9f8dbada3a21dcec0", size = 211242, upload-time = "2025-07-01T16:40:08.642Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/26/e0/8c249f00ba85fb4aba5c541463312befbfbf491105ff5c06e508089467be/unstructured_client-0.38.1-py3-none-any.whl", hash = "sha256:71e5467870d0a0119c788c29ec8baf5c0f7123f424affc9d6682eeeb7b8d45fa", size = 212626, upload-time = "2025-07-03T15:46:33.929Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | ||||
| [[package]] | [[package]] | ||||
| name = "uuid6" | name = "uuid6" | ||||
| version = "2025.0.0" | |||||
| version = "2025.0.1" | |||||
| source = { registry = "https://pypi.org/simple" } | source = { registry = "https://pypi.org/simple" } | ||||
| sdist = { url = "https://files.pythonhosted.org/packages/3f/49/06a089c184580f510e20226d9a081e4323d13db2fbc92d566697b5395c1e/uuid6-2025.0.0.tar.gz", hash = "sha256:bb78aa300e29db89b00410371d0c1f1824e59e29995a9daa3dedc8033d1d84ec", size = 13941, upload-time = "2025-06-11T20:02:05.324Z" } | |||||
| sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932, upload-time = "2025-07-04T18:30:35.186Z" } | |||||
| wheels = [ | wheels = [ | ||||
| { url = "https://files.pythonhosted.org/packages/0a/50/4da47101af45b6cfa291559577993b52ee4399b3cd54ba307574a11e4f3a/uuid6-2025.0.0-py3-none-any.whl", hash = "sha256:2c73405ff5333c7181443958c6865e0d1b9b816bb160549e8d80ba186263cb3a", size = 7001, upload-time = "2025-06-11T20:02:04.521Z" }, | |||||
| { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979, upload-time = "2025-07-04T18:30:34.001Z" }, | |||||
| ] | ] | ||||
| [[package]] | [[package]] | 
| proxy_pass http://web:3000; | proxy_pass http://web:3000; | ||||
| include proxy.conf; | include proxy.conf; | ||||
| } | } | ||||
| location /mcp { | |||||
| proxy_pass http://api:5001; | |||||
| include proxy.conf; | |||||
| } | |||||
| # placeholder for acme challenge location | # placeholder for acme challenge location | ||||
| ${ACME_CHALLENGE_LOCATION} | ${ACME_CHALLENGE_LOCATION} | ||||