Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>tags/1.9.0
| @@ -28,7 +28,27 @@ jobs: | |||
| run: | | |||
| uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all | |||
| uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all | |||
| # Convert Optional[T] to T | None (ignoring quoted types) | |||
| cat > /tmp/optional-rule.yml << 'EOF' | |||
| id: convert-optional-to-union | |||
| language: python | |||
| rule: | |||
| kind: generic_type | |||
| all: | |||
| - has: | |||
| kind: identifier | |||
| pattern: Optional | |||
| - has: | |||
| kind: type_parameter | |||
| has: | |||
| kind: type | |||
| pattern: $T | |||
| fix: $T | None | |||
| EOF | |||
| uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all | |||
| # Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax) | |||
| find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \; | |||
| find . -name "*.py.bak" -type f -delete | |||
| - name: mdformat | |||
| run: | | |||
| @@ -2,7 +2,7 @@ import base64 | |||
| import json | |||
| import logging | |||
| import secrets | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| import click | |||
| import sqlalchemy as sa | |||
| @@ -639,7 +639,7 @@ def old_metadata_migration(): | |||
| @click.option("--email", prompt=True, help="Tenant account email.") | |||
| @click.option("--name", prompt=True, help="Workspace name.") | |||
| @click.option("--language", prompt=True, help="Account language, default: en-US.") | |||
| def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None): | |||
| def create_tenant(email: str, language: str | None = None, name: str | None = None): | |||
| """ | |||
| Create tenant account | |||
| """ | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,28 +7,28 @@ class NotionConfig(BaseSettings): | |||
| Configuration settings for Notion integration | |||
| """ | |||
| NOTION_CLIENT_ID: Optional[str] = Field( | |||
| NOTION_CLIENT_ID: str | None = Field( | |||
| description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.", | |||
| default=None, | |||
| ) | |||
| NOTION_CLIENT_SECRET: Optional[str] = Field( | |||
| NOTION_CLIENT_SECRET: str | None = Field( | |||
| description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.", | |||
| default=None, | |||
| ) | |||
| NOTION_INTEGRATION_TYPE: Optional[str] = Field( | |||
| NOTION_INTEGRATION_TYPE: str | None = Field( | |||
| description="Type of Notion integration." | |||
| " Set to 'internal' for internal integrations, or None for public integrations.", | |||
| default=None, | |||
| ) | |||
| NOTION_INTERNAL_SECRET: Optional[str] = Field( | |||
| NOTION_INTERNAL_SECRET: str | None = Field( | |||
| description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.", | |||
| default=None, | |||
| ) | |||
| NOTION_INTEGRATION_TOKEN: Optional[str] = Field( | |||
| NOTION_INTEGRATION_TOKEN: str | None = Field( | |||
| description="Integration token for Notion API access. Used for direct API calls without OAuth flow.", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeFloat | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class SentryConfig(BaseSettings): | |||
| Configuration settings for Sentry error tracking and performance monitoring | |||
| """ | |||
| SENTRY_DSN: Optional[str] = Field( | |||
| SENTRY_DSN: str | None = Field( | |||
| description="Sentry Data Source Name (DSN)." | |||
| " This is the unique identifier of your Sentry project, used to send events to the correct project.", | |||
| default=None, | |||
| @@ -1,4 +1,4 @@ | |||
| from typing import Literal, Optional | |||
| from typing import Literal | |||
| from pydantic import ( | |||
| AliasChoices, | |||
| @@ -57,7 +57,7 @@ class SecurityConfig(BaseSettings): | |||
| default=False, | |||
| ) | |||
| ADMIN_API_KEY: Optional[str] = Field( | |||
| ADMIN_API_KEY: str | None = Field( | |||
| description="admin api key for authentication", | |||
| default=None, | |||
| ) | |||
| @@ -97,17 +97,17 @@ class CodeExecutionSandboxConfig(BaseSettings): | |||
| default="dify-sandbox", | |||
| ) | |||
| CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field( | |||
| CODE_EXECUTION_CONNECT_TIMEOUT: float | None = Field( | |||
| description="Connection timeout in seconds for code execution requests", | |||
| default=10.0, | |||
| ) | |||
| CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field( | |||
| CODE_EXECUTION_READ_TIMEOUT: float | None = Field( | |||
| description="Read timeout in seconds for code execution requests", | |||
| default=60.0, | |||
| ) | |||
| CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field( | |||
| CODE_EXECUTION_WRITE_TIMEOUT: float | None = Field( | |||
| description="Write timeout in seconds for code execution request", | |||
| default=10.0, | |||
| ) | |||
| @@ -368,17 +368,17 @@ class HttpConfig(BaseSettings): | |||
| default=3, | |||
| ) | |||
| SSRF_PROXY_ALL_URL: Optional[str] = Field( | |||
| SSRF_PROXY_ALL_URL: str | None = Field( | |||
| description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)", | |||
| default=None, | |||
| ) | |||
| SSRF_PROXY_HTTP_URL: Optional[str] = Field( | |||
| SSRF_PROXY_HTTP_URL: str | None = Field( | |||
| description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)", | |||
| default=None, | |||
| ) | |||
| SSRF_PROXY_HTTPS_URL: Optional[str] = Field( | |||
| SSRF_PROXY_HTTPS_URL: str | None = Field( | |||
| description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)", | |||
| default=None, | |||
| ) | |||
| @@ -420,7 +420,7 @@ class InnerAPIConfig(BaseSettings): | |||
| default=False, | |||
| ) | |||
| INNER_API_KEY: Optional[str] = Field( | |||
| INNER_API_KEY: str | None = Field( | |||
| description="API key for accessing the internal API", | |||
| default=None, | |||
| ) | |||
| @@ -436,7 +436,7 @@ class LoggingConfig(BaseSettings): | |||
| default="INFO", | |||
| ) | |||
| LOG_FILE: Optional[str] = Field( | |||
| LOG_FILE: str | None = Field( | |||
| description="File path for log output.", | |||
| default=None, | |||
| ) | |||
| @@ -456,12 +456,12 @@ class LoggingConfig(BaseSettings): | |||
| default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s", | |||
| ) | |||
| LOG_DATEFORMAT: Optional[str] = Field( | |||
| LOG_DATEFORMAT: str | None = Field( | |||
| description="Date format string for log timestamps", | |||
| default=None, | |||
| ) | |||
| LOG_TZ: Optional[str] = Field( | |||
| LOG_TZ: str | None = Field( | |||
| description="Timezone for log timestamps (e.g., 'America/New_York')", | |||
| default="UTC", | |||
| ) | |||
| @@ -595,22 +595,22 @@ class AuthConfig(BaseSettings): | |||
| default="/console/api/oauth/authorize", | |||
| ) | |||
| GITHUB_CLIENT_ID: Optional[str] = Field( | |||
| GITHUB_CLIENT_ID: str | None = Field( | |||
| description="GitHub OAuth client ID", | |||
| default=None, | |||
| ) | |||
| GITHUB_CLIENT_SECRET: Optional[str] = Field( | |||
| GITHUB_CLIENT_SECRET: str | None = Field( | |||
| description="GitHub OAuth client secret", | |||
| default=None, | |||
| ) | |||
| GOOGLE_CLIENT_ID: Optional[str] = Field( | |||
| GOOGLE_CLIENT_ID: str | None = Field( | |||
| description="Google OAuth client ID", | |||
| default=None, | |||
| ) | |||
| GOOGLE_CLIENT_SECRET: Optional[str] = Field( | |||
| GOOGLE_CLIENT_SECRET: str | None = Field( | |||
| description="Google OAuth client secret", | |||
| default=None, | |||
| ) | |||
| @@ -678,42 +678,42 @@ class MailConfig(BaseSettings): | |||
| Configuration for email services | |||
| """ | |||
| MAIL_TYPE: Optional[str] = Field( | |||
| MAIL_TYPE: str | None = Field( | |||
| description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.", | |||
| default=None, | |||
| ) | |||
| MAIL_DEFAULT_SEND_FROM: Optional[str] = Field( | |||
| MAIL_DEFAULT_SEND_FROM: str | None = Field( | |||
| description="Default email address to use as the sender", | |||
| default=None, | |||
| ) | |||
| RESEND_API_KEY: Optional[str] = Field( | |||
| RESEND_API_KEY: str | None = Field( | |||
| description="API key for Resend email service", | |||
| default=None, | |||
| ) | |||
| RESEND_API_URL: Optional[str] = Field( | |||
| RESEND_API_URL: str | None = Field( | |||
| description="API URL for Resend email service", | |||
| default=None, | |||
| ) | |||
| SMTP_SERVER: Optional[str] = Field( | |||
| SMTP_SERVER: str | None = Field( | |||
| description="SMTP server hostname", | |||
| default=None, | |||
| ) | |||
| SMTP_PORT: Optional[int] = Field( | |||
| SMTP_PORT: int | None = Field( | |||
| description="SMTP server port number", | |||
| default=465, | |||
| ) | |||
| SMTP_USERNAME: Optional[str] = Field( | |||
| SMTP_USERNAME: str | None = Field( | |||
| description="Username for SMTP authentication", | |||
| default=None, | |||
| ) | |||
| SMTP_PASSWORD: Optional[str] = Field( | |||
| SMTP_PASSWORD: str | None = Field( | |||
| description="Password for SMTP authentication", | |||
| default=None, | |||
| ) | |||
| @@ -733,7 +733,7 @@ class MailConfig(BaseSettings): | |||
| default=50, | |||
| ) | |||
| SENDGRID_API_KEY: Optional[str] = Field( | |||
| SENDGRID_API_KEY: str | None = Field( | |||
| description="API key for SendGrid service", | |||
| default=None, | |||
| ) | |||
| @@ -756,17 +756,17 @@ class RagEtlConfig(BaseSettings): | |||
| default="database", | |||
| ) | |||
| UNSTRUCTURED_API_URL: Optional[str] = Field( | |||
| UNSTRUCTURED_API_URL: str | None = Field( | |||
| description="API URL for Unstructured.io service", | |||
| default=None, | |||
| ) | |||
| UNSTRUCTURED_API_KEY: Optional[str] = Field( | |||
| UNSTRUCTURED_API_KEY: str | None = Field( | |||
| description="API key for Unstructured.io service", | |||
| default="", | |||
| ) | |||
| SCARF_NO_ANALYTICS: Optional[str] = Field( | |||
| SCARF_NO_ANALYTICS: str | None = Field( | |||
| description="This is about whether to disable Scarf analytics in Unstructured library.", | |||
| default="false", | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -40,17 +38,17 @@ class HostedOpenAiConfig(BaseSettings): | |||
| Configuration for hosted OpenAI service | |||
| """ | |||
| HOSTED_OPENAI_API_KEY: Optional[str] = Field( | |||
| HOSTED_OPENAI_API_KEY: str | None = Field( | |||
| description="API key for hosted OpenAI service", | |||
| default=None, | |||
| ) | |||
| HOSTED_OPENAI_API_BASE: Optional[str] = Field( | |||
| HOSTED_OPENAI_API_BASE: str | None = Field( | |||
| description="Base URL for hosted OpenAI API", | |||
| default=None, | |||
| ) | |||
| HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field( | |||
| HOSTED_OPENAI_API_ORGANIZATION: str | None = Field( | |||
| description="Organization ID for hosted OpenAI service", | |||
| default=None, | |||
| ) | |||
| @@ -110,12 +108,12 @@ class HostedAzureOpenAiConfig(BaseSettings): | |||
| default=False, | |||
| ) | |||
| HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field( | |||
| HOSTED_AZURE_OPENAI_API_KEY: str | None = Field( | |||
| description="API key for hosted Azure OpenAI service", | |||
| default=None, | |||
| ) | |||
| HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field( | |||
| HOSTED_AZURE_OPENAI_API_BASE: str | None = Field( | |||
| description="Base URL for hosted Azure OpenAI API", | |||
| default=None, | |||
| ) | |||
| @@ -131,12 +129,12 @@ class HostedAnthropicConfig(BaseSettings): | |||
| Configuration for hosted Anthropic service | |||
| """ | |||
| HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field( | |||
| HOSTED_ANTHROPIC_API_BASE: str | None = Field( | |||
| description="Base URL for hosted Anthropic API", | |||
| default=None, | |||
| ) | |||
| HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field( | |||
| HOSTED_ANTHROPIC_API_KEY: str | None = Field( | |||
| description="API key for hosted Anthropic service", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,5 @@ | |||
| import os | |||
| from typing import Any, Literal, Optional | |||
| from typing import Any, Literal | |||
| from urllib.parse import parse_qsl, quote_plus | |||
| from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field | |||
| @@ -78,18 +78,18 @@ class StorageConfig(BaseSettings): | |||
| class VectorStoreConfig(BaseSettings): | |||
| VECTOR_STORE: Optional[str] = Field( | |||
| VECTOR_STORE: str | None = Field( | |||
| description="Type of vector store to use for efficient similarity search." | |||
| " Set to None if not using a vector store.", | |||
| default=None, | |||
| ) | |||
| VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field( | |||
| VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field( | |||
| description="Enable whitelist for vector store.", | |||
| default=False, | |||
| ) | |||
| VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field( | |||
| VECTOR_INDEX_NAME_PREFIX: str | None = Field( | |||
| description="Prefix used to create collection name in vector database", | |||
| default="Vector_index", | |||
| ) | |||
| @@ -225,26 +225,26 @@ class CeleryConfig(DatabaseConfig): | |||
| default="redis", | |||
| ) | |||
| CELERY_BROKER_URL: Optional[str] = Field( | |||
| CELERY_BROKER_URL: str | None = Field( | |||
| description="URL of the message broker for Celery tasks.", | |||
| default=None, | |||
| ) | |||
| CELERY_USE_SENTINEL: Optional[bool] = Field( | |||
| CELERY_USE_SENTINEL: bool | None = Field( | |||
| description="Whether to use Redis Sentinel for high availability.", | |||
| default=False, | |||
| ) | |||
| CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field( | |||
| CELERY_SENTINEL_MASTER_NAME: str | None = Field( | |||
| description="Name of the Redis Sentinel master.", | |||
| default=None, | |||
| ) | |||
| CELERY_SENTINEL_PASSWORD: Optional[str] = Field( | |||
| CELERY_SENTINEL_PASSWORD: str | None = Field( | |||
| description="Password of the Redis Sentinel master.", | |||
| default=None, | |||
| ) | |||
| CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field( | |||
| CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field( | |||
| description="Timeout for Redis Sentinel socket operations in seconds.", | |||
| default=0.1, | |||
| ) | |||
| @@ -268,12 +268,12 @@ class InternalTestConfig(BaseSettings): | |||
| Configuration settings for Internal Test | |||
| """ | |||
| AWS_SECRET_ACCESS_KEY: Optional[str] = Field( | |||
| AWS_SECRET_ACCESS_KEY: str | None = Field( | |||
| description="Internal test AWS secret access key", | |||
| default=None, | |||
| ) | |||
| AWS_ACCESS_KEY_ID: Optional[str] = Field( | |||
| AWS_ACCESS_KEY_ID: str | None = Field( | |||
| description="Internal test AWS access key ID", | |||
| default=None, | |||
| ) | |||
| @@ -284,15 +284,15 @@ class DatasetQueueMonitorConfig(BaseSettings): | |||
| Configuration settings for Dataset Queue Monitor | |||
| """ | |||
| QUEUE_MONITOR_THRESHOLD: Optional[NonNegativeInt] = Field( | |||
| QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field( | |||
| description="Threshold for dataset queue monitor", | |||
| default=200, | |||
| ) | |||
| QUEUE_MONITOR_ALERT_EMAILS: Optional[str] = Field( | |||
| QUEUE_MONITOR_ALERT_EMAILS: str | None = Field( | |||
| description="Emails for dataset queue monitor alert, separated by commas", | |||
| default=None, | |||
| ) | |||
| QUEUE_MONITOR_INTERVAL: Optional[NonNegativeFloat] = Field( | |||
| QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field( | |||
| description="Interval for dataset queue monitor in minutes", | |||
| default=30, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -19,12 +17,12 @@ class RedisConfig(BaseSettings): | |||
| default=6379, | |||
| ) | |||
| REDIS_USERNAME: Optional[str] = Field( | |||
| REDIS_USERNAME: str | None = Field( | |||
| description="Username for Redis authentication (if required)", | |||
| default=None, | |||
| ) | |||
| REDIS_PASSWORD: Optional[str] = Field( | |||
| REDIS_PASSWORD: str | None = Field( | |||
| description="Password for Redis authentication (if required)", | |||
| default=None, | |||
| ) | |||
| @@ -44,47 +42,47 @@ class RedisConfig(BaseSettings): | |||
| default="CERT_NONE", | |||
| ) | |||
| REDIS_SSL_CA_CERTS: Optional[str] = Field( | |||
| REDIS_SSL_CA_CERTS: str | None = Field( | |||
| description="Path to the CA certificate file for SSL verification", | |||
| default=None, | |||
| ) | |||
| REDIS_SSL_CERTFILE: Optional[str] = Field( | |||
| REDIS_SSL_CERTFILE: str | None = Field( | |||
| description="Path to the client certificate file for SSL authentication", | |||
| default=None, | |||
| ) | |||
| REDIS_SSL_KEYFILE: Optional[str] = Field( | |||
| REDIS_SSL_KEYFILE: str | None = Field( | |||
| description="Path to the client private key file for SSL authentication", | |||
| default=None, | |||
| ) | |||
| REDIS_USE_SENTINEL: Optional[bool] = Field( | |||
| REDIS_USE_SENTINEL: bool | None = Field( | |||
| description="Enable Redis Sentinel mode for high availability", | |||
| default=False, | |||
| ) | |||
| REDIS_SENTINELS: Optional[str] = Field( | |||
| REDIS_SENTINELS: str | None = Field( | |||
| description="Comma-separated list of Redis Sentinel nodes (host:port)", | |||
| default=None, | |||
| ) | |||
| REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field( | |||
| REDIS_SENTINEL_SERVICE_NAME: str | None = Field( | |||
| description="Name of the Redis Sentinel service to monitor", | |||
| default=None, | |||
| ) | |||
| REDIS_SENTINEL_USERNAME: Optional[str] = Field( | |||
| REDIS_SENTINEL_USERNAME: str | None = Field( | |||
| description="Username for Redis Sentinel authentication (if required)", | |||
| default=None, | |||
| ) | |||
| REDIS_SENTINEL_PASSWORD: Optional[str] = Field( | |||
| REDIS_SENTINEL_PASSWORD: str | None = Field( | |||
| description="Password for Redis Sentinel authentication (if required)", | |||
| default=None, | |||
| ) | |||
| REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field( | |||
| REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field( | |||
| description="Socket timeout in seconds for Redis Sentinel connections", | |||
| default=0.1, | |||
| ) | |||
| @@ -94,12 +92,12 @@ class RedisConfig(BaseSettings): | |||
| default=False, | |||
| ) | |||
| REDIS_CLUSTERS: Optional[str] = Field( | |||
| REDIS_CLUSTERS: str | None = Field( | |||
| description="Comma-separated list of Redis Clusters nodes (host:port)", | |||
| default=None, | |||
| ) | |||
| REDIS_CLUSTERS_PASSWORD: Optional[str] = Field( | |||
| REDIS_CLUSTERS_PASSWORD: str | None = Field( | |||
| description="Password for Redis Clusters authentication (if required)", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,37 +7,37 @@ class AliyunOSSStorageConfig(BaseSettings): | |||
| Configuration settings for Aliyun Object Storage Service (OSS) | |||
| """ | |||
| ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field( | |||
| ALIYUN_OSS_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Aliyun OSS bucket to store and retrieve objects", | |||
| default=None, | |||
| ) | |||
| ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field( | |||
| ALIYUN_OSS_ACCESS_KEY: str | None = Field( | |||
| description="Access key ID for authenticating with Aliyun OSS", | |||
| default=None, | |||
| ) | |||
| ALIYUN_OSS_SECRET_KEY: Optional[str] = Field( | |||
| ALIYUN_OSS_SECRET_KEY: str | None = Field( | |||
| description="Secret access key for authenticating with Aliyun OSS", | |||
| default=None, | |||
| ) | |||
| ALIYUN_OSS_ENDPOINT: Optional[str] = Field( | |||
| ALIYUN_OSS_ENDPOINT: str | None = Field( | |||
| description="URL of the Aliyun OSS endpoint for your chosen region", | |||
| default=None, | |||
| ) | |||
| ALIYUN_OSS_REGION: Optional[str] = Field( | |||
| ALIYUN_OSS_REGION: str | None = Field( | |||
| description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')", | |||
| default=None, | |||
| ) | |||
| ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field( | |||
| ALIYUN_OSS_AUTH_VERSION: str | None = Field( | |||
| description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')", | |||
| default=None, | |||
| ) | |||
| ALIYUN_OSS_PATH: Optional[str] = Field( | |||
| ALIYUN_OSS_PATH: str | None = Field( | |||
| description="Base path within the bucket to store objects (e.g., 'my-app-data/')", | |||
| default=None, | |||
| ) | |||
| @@ -1,4 +1,4 @@ | |||
| from typing import Literal, Optional | |||
| from typing import Literal | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +9,27 @@ class S3StorageConfig(BaseSettings): | |||
| Configuration settings for S3-compatible object storage | |||
| """ | |||
| S3_ENDPOINT: Optional[str] = Field( | |||
| S3_ENDPOINT: str | None = Field( | |||
| description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')", | |||
| default=None, | |||
| ) | |||
| S3_REGION: Optional[str] = Field( | |||
| S3_REGION: str | None = Field( | |||
| description="Region where the S3 bucket is located (e.g., 'us-east-1')", | |||
| default=None, | |||
| ) | |||
| S3_BUCKET_NAME: Optional[str] = Field( | |||
| S3_BUCKET_NAME: str | None = Field( | |||
| description="Name of the S3 bucket to store and retrieve objects", | |||
| default=None, | |||
| ) | |||
| S3_ACCESS_KEY: Optional[str] = Field( | |||
| S3_ACCESS_KEY: str | None = Field( | |||
| description="Access key ID for authenticating with the S3 service", | |||
| default=None, | |||
| ) | |||
| S3_SECRET_KEY: Optional[str] = Field( | |||
| S3_SECRET_KEY: str | None = Field( | |||
| description="Secret access key for authenticating with the S3 service", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,22 +7,22 @@ class AzureBlobStorageConfig(BaseSettings): | |||
| Configuration settings for Azure Blob Storage | |||
| """ | |||
| AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field( | |||
| AZURE_BLOB_ACCOUNT_NAME: str | None = Field( | |||
| description="Name of the Azure Storage account (e.g., 'mystorageaccount')", | |||
| default=None, | |||
| ) | |||
| AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field( | |||
| AZURE_BLOB_ACCOUNT_KEY: str | None = Field( | |||
| description="Access key for authenticating with the Azure Storage account", | |||
| default=None, | |||
| ) | |||
| AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field( | |||
| AZURE_BLOB_CONTAINER_NAME: str | None = Field( | |||
| description="Name of the Azure Blob container to store and retrieve objects", | |||
| default=None, | |||
| ) | |||
| AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field( | |||
| AZURE_BLOB_ACCOUNT_URL: str | None = Field( | |||
| description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,22 +7,22 @@ class BaiduOBSStorageConfig(BaseSettings): | |||
| Configuration settings for Baidu Object Storage Service (OBS) | |||
| """ | |||
| BAIDU_OBS_BUCKET_NAME: Optional[str] = Field( | |||
| BAIDU_OBS_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')", | |||
| default=None, | |||
| ) | |||
| BAIDU_OBS_ACCESS_KEY: Optional[str] = Field( | |||
| BAIDU_OBS_ACCESS_KEY: str | None = Field( | |||
| description="Access Key ID for authenticating with Baidu OBS", | |||
| default=None, | |||
| ) | |||
| BAIDU_OBS_SECRET_KEY: Optional[str] = Field( | |||
| BAIDU_OBS_SECRET_KEY: str | None = Field( | |||
| description="Secret Access Key for authenticating with Baidu OBS", | |||
| default=None, | |||
| ) | |||
| BAIDU_OBS_ENDPOINT: Optional[str] = Field( | |||
| BAIDU_OBS_ENDPOINT: str | None = Field( | |||
| description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')", | |||
| default=None, | |||
| ) | |||
| @@ -1,7 +1,5 @@ | |||
| """ClickZetta Volume Storage Configuration""" | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,17 +7,17 @@ from pydantic_settings import BaseSettings | |||
| class ClickZettaVolumeStorageConfig(BaseSettings): | |||
| """Configuration for ClickZetta Volume storage.""" | |||
| CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field( | |||
| CLICKZETTA_VOLUME_USERNAME: str | None = Field( | |||
| description="Username for ClickZetta Volume authentication", | |||
| default=None, | |||
| ) | |||
| CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field( | |||
| CLICKZETTA_VOLUME_PASSWORD: str | None = Field( | |||
| description="Password for ClickZetta Volume authentication", | |||
| default=None, | |||
| ) | |||
| CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field( | |||
| CLICKZETTA_VOLUME_INSTANCE: str | None = Field( | |||
| description="ClickZetta instance identifier", | |||
| default=None, | |||
| ) | |||
| @@ -49,7 +47,7 @@ class ClickZettaVolumeStorageConfig(BaseSettings): | |||
| default="user", | |||
| ) | |||
| CLICKZETTA_VOLUME_NAME: Optional[str] = Field( | |||
| CLICKZETTA_VOLUME_NAME: str | None = Field( | |||
| description="ClickZetta volume name for external volumes", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,12 +7,12 @@ class GoogleCloudStorageConfig(BaseSettings): | |||
| Configuration settings for Google Cloud Storage | |||
| """ | |||
| GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field( | |||
| GOOGLE_STORAGE_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')", | |||
| default=None, | |||
| ) | |||
| GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field( | |||
| GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field( | |||
| description="Base64-encoded JSON key file for Google Cloud service account authentication", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,22 +7,22 @@ class HuaweiCloudOBSStorageConfig(BaseSettings): | |||
| Configuration settings for Huawei Cloud Object Storage Service (OBS) | |||
| """ | |||
| HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field( | |||
| HUAWEI_OBS_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')", | |||
| default=None, | |||
| ) | |||
| HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field( | |||
| HUAWEI_OBS_ACCESS_KEY: str | None = Field( | |||
| description="Access Key ID for authenticating with Huawei Cloud OBS", | |||
| default=None, | |||
| ) | |||
| HUAWEI_OBS_SECRET_KEY: Optional[str] = Field( | |||
| HUAWEI_OBS_SECRET_KEY: str | None = Field( | |||
| description="Secret Access Key for authenticating with Huawei Cloud OBS", | |||
| default=None, | |||
| ) | |||
| HUAWEI_OBS_SERVER: Optional[str] = Field( | |||
| HUAWEI_OBS_SERVER: str | None = Field( | |||
| description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class OCIStorageConfig(BaseSettings): | |||
| Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage | |||
| """ | |||
| OCI_ENDPOINT: Optional[str] = Field( | |||
| OCI_ENDPOINT: str | None = Field( | |||
| description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')", | |||
| default=None, | |||
| ) | |||
| OCI_REGION: Optional[str] = Field( | |||
| OCI_REGION: str | None = Field( | |||
| description="OCI region where the bucket is located (e.g., 'us-phoenix-1')", | |||
| default=None, | |||
| ) | |||
| OCI_BUCKET_NAME: Optional[str] = Field( | |||
| OCI_BUCKET_NAME: str | None = Field( | |||
| description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')", | |||
| default=None, | |||
| ) | |||
| OCI_ACCESS_KEY: Optional[str] = Field( | |||
| OCI_ACCESS_KEY: str | None = Field( | |||
| description="Access key (also known as API key) for authenticating with OCI Object Storage", | |||
| default=None, | |||
| ) | |||
| OCI_SECRET_KEY: Optional[str] = Field( | |||
| OCI_SECRET_KEY: str | None = Field( | |||
| description="Secret key associated with the access key for authenticating with OCI Object Storage", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,17 +7,17 @@ class SupabaseStorageConfig(BaseSettings): | |||
| Configuration settings for Supabase Object Storage Service | |||
| """ | |||
| SUPABASE_BUCKET_NAME: Optional[str] = Field( | |||
| SUPABASE_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')", | |||
| default=None, | |||
| ) | |||
| SUPABASE_API_KEY: Optional[str] = Field( | |||
| SUPABASE_API_KEY: str | None = Field( | |||
| description="API KEY for authenticating with Supabase", | |||
| default=None, | |||
| ) | |||
| SUPABASE_URL: Optional[str] = Field( | |||
| SUPABASE_URL: str | None = Field( | |||
| description="URL of the Supabase", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class TencentCloudCOSStorageConfig(BaseSettings): | |||
| Configuration settings for Tencent Cloud Object Storage (COS) | |||
| """ | |||
| TENCENT_COS_BUCKET_NAME: Optional[str] = Field( | |||
| TENCENT_COS_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Tencent Cloud COS bucket to store and retrieve objects", | |||
| default=None, | |||
| ) | |||
| TENCENT_COS_REGION: Optional[str] = Field( | |||
| TENCENT_COS_REGION: str | None = Field( | |||
| description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')", | |||
| default=None, | |||
| ) | |||
| TENCENT_COS_SECRET_ID: Optional[str] = Field( | |||
| TENCENT_COS_SECRET_ID: str | None = Field( | |||
| description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)", | |||
| default=None, | |||
| ) | |||
| TENCENT_COS_SECRET_KEY: Optional[str] = Field( | |||
| TENCENT_COS_SECRET_KEY: str | None = Field( | |||
| description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)", | |||
| default=None, | |||
| ) | |||
| TENCENT_COS_SCHEME: Optional[str] = Field( | |||
| TENCENT_COS_SCHEME: str | None = Field( | |||
| description="Protocol scheme for COS requests: 'https' (recommended) or 'http'", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class VolcengineTOSStorageConfig(BaseSettings): | |||
| Configuration settings for Volcengine Tinder Object Storage (TOS) | |||
| """ | |||
| VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field( | |||
| VOLCENGINE_TOS_BUCKET_NAME: str | None = Field( | |||
| description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')", | |||
| default=None, | |||
| ) | |||
| VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field( | |||
| VOLCENGINE_TOS_ACCESS_KEY: str | None = Field( | |||
| description="Access Key ID for authenticating with Volcengine TOS", | |||
| default=None, | |||
| ) | |||
| VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field( | |||
| VOLCENGINE_TOS_SECRET_KEY: str | None = Field( | |||
| description="Secret Access Key for authenticating with Volcengine TOS", | |||
| default=None, | |||
| ) | |||
| VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field( | |||
| VOLCENGINE_TOS_ENDPOINT: str | None = Field( | |||
| description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')", | |||
| default=None, | |||
| ) | |||
| VOLCENGINE_TOS_REGION: Optional[str] = Field( | |||
| VOLCENGINE_TOS_REGION: str | None = Field( | |||
| description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -11,37 +9,37 @@ class AnalyticdbConfig(BaseSettings): | |||
| https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled | |||
| """ | |||
| ANALYTICDB_KEY_ID: Optional[str] = Field( | |||
| ANALYTICDB_KEY_ID: str | None = Field( | |||
| default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication." | |||
| ) | |||
| ANALYTICDB_KEY_SECRET: Optional[str] = Field( | |||
| ANALYTICDB_KEY_SECRET: str | None = Field( | |||
| default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access." | |||
| ) | |||
| ANALYTICDB_REGION_ID: Optional[str] = Field( | |||
| ANALYTICDB_REGION_ID: str | None = Field( | |||
| default=None, | |||
| description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').", | |||
| ) | |||
| ANALYTICDB_INSTANCE_ID: Optional[str] = Field( | |||
| ANALYTICDB_INSTANCE_ID: str | None = Field( | |||
| default=None, | |||
| description="The unique identifier of the AnalyticDB instance you want to connect to.", | |||
| ) | |||
| ANALYTICDB_ACCOUNT: Optional[str] = Field( | |||
| ANALYTICDB_ACCOUNT: str | None = Field( | |||
| default=None, | |||
| description="The account name used to log in to the AnalyticDB instance" | |||
| " (usually the initial account created with the instance).", | |||
| ) | |||
| ANALYTICDB_PASSWORD: Optional[str] = Field( | |||
| ANALYTICDB_PASSWORD: str | None = Field( | |||
| default=None, description="The password associated with the AnalyticDB account for database authentication." | |||
| ) | |||
| ANALYTICDB_NAMESPACE: Optional[str] = Field( | |||
| ANALYTICDB_NAMESPACE: str | None = Field( | |||
| default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)." | |||
| ) | |||
| ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field( | |||
| ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field( | |||
| default=None, | |||
| description="The password for accessing the specified namespace within the AnalyticDB instance" | |||
| " (if namespace feature is enabled).", | |||
| ) | |||
| ANALYTICDB_HOST: Optional[str] = Field( | |||
| ANALYTICDB_HOST: str | None = Field( | |||
| default=None, description="The host of the AnalyticDB instance you want to connect to." | |||
| ) | |||
| ANALYTICDB_PORT: PositiveInt = Field( | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeInt, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class BaiduVectorDBConfig(BaseSettings): | |||
| Configuration settings for Baidu Vector Database | |||
| """ | |||
| BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field( | |||
| BAIDU_VECTOR_DB_ENDPOINT: str | None = Field( | |||
| description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')", | |||
| default=None, | |||
| ) | |||
| @@ -19,17 +17,17 @@ class BaiduVectorDBConfig(BaseSettings): | |||
| default=30000, | |||
| ) | |||
| BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field( | |||
| BAIDU_VECTOR_DB_ACCOUNT: str | None = Field( | |||
| description="Account for authenticating with the Baidu Vector Database", | |||
| default=None, | |||
| ) | |||
| BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field( | |||
| BAIDU_VECTOR_DB_API_KEY: str | None = Field( | |||
| description="API key for authenticating with the Baidu Vector Database service", | |||
| default=None, | |||
| ) | |||
| BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field( | |||
| BAIDU_VECTOR_DB_DATABASE: str | None = Field( | |||
| description="Name of the specific Baidu Vector Database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class ChromaConfig(BaseSettings): | |||
| Configuration settings for Chroma vector database | |||
| """ | |||
| CHROMA_HOST: Optional[str] = Field( | |||
| CHROMA_HOST: str | None = Field( | |||
| description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')", | |||
| default=None, | |||
| ) | |||
| @@ -19,22 +17,22 @@ class ChromaConfig(BaseSettings): | |||
| default=8000, | |||
| ) | |||
| CHROMA_TENANT: Optional[str] = Field( | |||
| CHROMA_TENANT: str | None = Field( | |||
| description="Tenant identifier for multi-tenancy support in Chroma", | |||
| default=None, | |||
| ) | |||
| CHROMA_DATABASE: Optional[str] = Field( | |||
| CHROMA_DATABASE: str | None = Field( | |||
| description="Name of the Chroma database to connect to", | |||
| default=None, | |||
| ) | |||
| CHROMA_AUTH_PROVIDER: Optional[str] = Field( | |||
| CHROMA_AUTH_PROVIDER: str | None = Field( | |||
| description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)", | |||
| default=None, | |||
| ) | |||
| CHROMA_AUTH_CREDENTIALS: Optional[str] = Field( | |||
| CHROMA_AUTH_CREDENTIALS: str | None = Field( | |||
| description="Authentication credentials for Chroma (format depends on the auth provider)", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,62 +7,62 @@ class ClickzettaConfig(BaseSettings): | |||
| Clickzetta Lakehouse vector database configuration | |||
| """ | |||
| CLICKZETTA_USERNAME: Optional[str] = Field( | |||
| CLICKZETTA_USERNAME: str | None = Field( | |||
| description="Username for authenticating with Clickzetta Lakehouse", | |||
| default=None, | |||
| ) | |||
| CLICKZETTA_PASSWORD: Optional[str] = Field( | |||
| CLICKZETTA_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with Clickzetta Lakehouse", | |||
| default=None, | |||
| ) | |||
| CLICKZETTA_INSTANCE: Optional[str] = Field( | |||
| CLICKZETTA_INSTANCE: str | None = Field( | |||
| description="Clickzetta Lakehouse instance ID", | |||
| default=None, | |||
| ) | |||
| CLICKZETTA_SERVICE: Optional[str] = Field( | |||
| CLICKZETTA_SERVICE: str | None = Field( | |||
| description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')", | |||
| default="api.clickzetta.com", | |||
| ) | |||
| CLICKZETTA_WORKSPACE: Optional[str] = Field( | |||
| CLICKZETTA_WORKSPACE: str | None = Field( | |||
| description="Clickzetta workspace name", | |||
| default="default", | |||
| ) | |||
| CLICKZETTA_VCLUSTER: Optional[str] = Field( | |||
| CLICKZETTA_VCLUSTER: str | None = Field( | |||
| description="Clickzetta virtual cluster name", | |||
| default="default_ap", | |||
| ) | |||
| CLICKZETTA_SCHEMA: Optional[str] = Field( | |||
| CLICKZETTA_SCHEMA: str | None = Field( | |||
| description="Database schema name in Clickzetta", | |||
| default="public", | |||
| ) | |||
| CLICKZETTA_BATCH_SIZE: Optional[int] = Field( | |||
| CLICKZETTA_BATCH_SIZE: int | None = Field( | |||
| description="Batch size for bulk insert operations", | |||
| default=100, | |||
| ) | |||
| CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field( | |||
| CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field( | |||
| description="Enable inverted index for full-text search capabilities", | |||
| default=True, | |||
| ) | |||
| CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field( | |||
| CLICKZETTA_ANALYZER_TYPE: str | None = Field( | |||
| description="Analyzer type for full-text search: keyword, english, chinese, unicode", | |||
| default="chinese", | |||
| ) | |||
| CLICKZETTA_ANALYZER_MODE: Optional[str] = Field( | |||
| CLICKZETTA_ANALYZER_MODE: str | None = Field( | |||
| description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)", | |||
| default="smart", | |||
| ) | |||
| CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field( | |||
| CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field( | |||
| description="Distance function for vector similarity: l2_distance or cosine_distance", | |||
| default="cosine_distance", | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class CouchbaseConfig(BaseSettings): | |||
| Couchbase configs | |||
| """ | |||
| COUCHBASE_CONNECTION_STRING: Optional[str] = Field( | |||
| COUCHBASE_CONNECTION_STRING: str | None = Field( | |||
| description="COUCHBASE connection string", | |||
| default=None, | |||
| ) | |||
| COUCHBASE_USER: Optional[str] = Field( | |||
| COUCHBASE_USER: str | None = Field( | |||
| description="COUCHBASE user", | |||
| default=None, | |||
| ) | |||
| COUCHBASE_PASSWORD: Optional[str] = Field( | |||
| COUCHBASE_PASSWORD: str | None = Field( | |||
| description="COUCHBASE password", | |||
| default=None, | |||
| ) | |||
| COUCHBASE_BUCKET_NAME: Optional[str] = Field( | |||
| COUCHBASE_BUCKET_NAME: str | None = Field( | |||
| description="COUCHBASE bucket name", | |||
| default=None, | |||
| ) | |||
| COUCHBASE_SCOPE_NAME: Optional[str] = Field( | |||
| COUCHBASE_SCOPE_NAME: str | None = Field( | |||
| description="COUCHBASE scope name", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt, model_validator | |||
| from pydantic_settings import BaseSettings | |||
| @@ -10,7 +8,7 @@ class ElasticsearchConfig(BaseSettings): | |||
| Can load from environment variables or .env files. | |||
| """ | |||
| ELASTICSEARCH_HOST: Optional[str] = Field( | |||
| ELASTICSEARCH_HOST: str | None = Field( | |||
| description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')", | |||
| default="127.0.0.1", | |||
| ) | |||
| @@ -20,30 +18,28 @@ class ElasticsearchConfig(BaseSettings): | |||
| default=9200, | |||
| ) | |||
| ELASTICSEARCH_USERNAME: Optional[str] = Field( | |||
| ELASTICSEARCH_USERNAME: str | None = Field( | |||
| description="Username for authenticating with Elasticsearch (default is 'elastic')", | |||
| default="elastic", | |||
| ) | |||
| ELASTICSEARCH_PASSWORD: Optional[str] = Field( | |||
| ELASTICSEARCH_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with Elasticsearch (default is 'elastic')", | |||
| default="elastic", | |||
| ) | |||
| # Elastic Cloud (optional) | |||
| ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field( | |||
| ELASTICSEARCH_USE_CLOUD: bool | None = Field( | |||
| description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False | |||
| ) | |||
| ELASTICSEARCH_CLOUD_URL: Optional[str] = Field( | |||
| ELASTICSEARCH_CLOUD_URL: str | None = Field( | |||
| description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')", | |||
| default=None, | |||
| ) | |||
| ELASTICSEARCH_API_KEY: Optional[str] = Field( | |||
| description="API key for authenticating with Elastic Cloud", default=None | |||
| ) | |||
| ELASTICSEARCH_API_KEY: str | None = Field(description="API key for authenticating with Elastic Cloud", default=None) | |||
| # Common options | |||
| ELASTICSEARCH_CA_CERTS: Optional[str] = Field( | |||
| ELASTICSEARCH_CA_CERTS: str | None = Field( | |||
| description="Path to CA certificate file for SSL verification", default=None | |||
| ) | |||
| ELASTICSEARCH_VERIFY_CERTS: bool = Field( | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,17 +7,17 @@ class HuaweiCloudConfig(BaseSettings): | |||
| Configuration settings for Huawei cloud search service | |||
| """ | |||
| HUAWEI_CLOUD_HOSTS: Optional[str] = Field( | |||
| HUAWEI_CLOUD_HOSTS: str | None = Field( | |||
| description="Hostname or IP address of the Huawei cloud search service instance", | |||
| default=None, | |||
| ) | |||
| HUAWEI_CLOUD_USER: Optional[str] = Field( | |||
| HUAWEI_CLOUD_USER: str | None = Field( | |||
| description="Username for authenticating with Huawei cloud search service", | |||
| default=None, | |||
| ) | |||
| HUAWEI_CLOUD_PASSWORD: Optional[str] = Field( | |||
| HUAWEI_CLOUD_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with Huawei cloud search service", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class LindormConfig(BaseSettings): | |||
| Lindorm configs | |||
| """ | |||
| LINDORM_URL: Optional[str] = Field( | |||
| LINDORM_URL: str | None = Field( | |||
| description="Lindorm url", | |||
| default=None, | |||
| ) | |||
| LINDORM_USERNAME: Optional[str] = Field( | |||
| LINDORM_USERNAME: str | None = Field( | |||
| description="Lindorm user", | |||
| default=None, | |||
| ) | |||
| LINDORM_PASSWORD: Optional[str] = Field( | |||
| LINDORM_PASSWORD: str | None = Field( | |||
| description="Lindorm password", | |||
| default=None, | |||
| ) | |||
| DEFAULT_INDEX_TYPE: Optional[str] = Field( | |||
| DEFAULT_INDEX_TYPE: str | None = Field( | |||
| description="Lindorm Vector Index Type, hnsw or flat is available in dify", | |||
| default="hnsw", | |||
| ) | |||
| DEFAULT_DISTANCE_TYPE: Optional[str] = Field( | |||
| DEFAULT_DISTANCE_TYPE: str | None = Field( | |||
| description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2" | |||
| ) | |||
| USING_UGC_INDEX: Optional[bool] = Field( | |||
| USING_UGC_INDEX: bool | None = Field( | |||
| description="Using UGC index will store the same type of Index in a single index but can retrieve separately.", | |||
| default=False, | |||
| ) | |||
| LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0) | |||
| LINDORM_QUERY_TIMEOUT: float | None = Field(description="The lindorm search request timeout (s)", default=2.0) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,22 +7,22 @@ class MilvusConfig(BaseSettings): | |||
| Configuration settings for Milvus vector database | |||
| """ | |||
| MILVUS_URI: Optional[str] = Field( | |||
| MILVUS_URI: str | None = Field( | |||
| description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')", | |||
| default="http://127.0.0.1:19530", | |||
| ) | |||
| MILVUS_TOKEN: Optional[str] = Field( | |||
| MILVUS_TOKEN: str | None = Field( | |||
| description="Authentication token for Milvus, if token-based authentication is enabled", | |||
| default=None, | |||
| ) | |||
| MILVUS_USER: Optional[str] = Field( | |||
| MILVUS_USER: str | None = Field( | |||
| description="Username for authenticating with Milvus, if username/password authentication is enabled", | |||
| default=None, | |||
| ) | |||
| MILVUS_PASSWORD: Optional[str] = Field( | |||
| MILVUS_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with Milvus, if username/password authentication is enabled", | |||
| default=None, | |||
| ) | |||
| @@ -40,7 +38,7 @@ class MilvusConfig(BaseSettings): | |||
| default=True, | |||
| ) | |||
| MILVUS_ANALYZER_PARAMS: Optional[str] = Field( | |||
| MILVUS_ANALYZER_PARAMS: str | None = Field( | |||
| description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.', | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class OceanBaseVectorConfig(BaseSettings): | |||
| Configuration settings for OceanBase Vector database | |||
| """ | |||
| OCEANBASE_VECTOR_HOST: Optional[str] = Field( | |||
| OCEANBASE_VECTOR_HOST: str | None = Field( | |||
| description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')", | |||
| default=None, | |||
| ) | |||
| OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field( | |||
| OCEANBASE_VECTOR_PORT: PositiveInt | None = Field( | |||
| description="Port number on which the OceanBase Vector server is listening (default is 2881)", | |||
| default=2881, | |||
| ) | |||
| OCEANBASE_VECTOR_USER: Optional[str] = Field( | |||
| OCEANBASE_VECTOR_USER: str | None = Field( | |||
| description="Username for authenticating with the OceanBase Vector database", | |||
| default=None, | |||
| ) | |||
| OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field( | |||
| OCEANBASE_VECTOR_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the OceanBase Vector database", | |||
| default=None, | |||
| ) | |||
| OCEANBASE_VECTOR_DATABASE: Optional[str] = Field( | |||
| OCEANBASE_VECTOR_DATABASE: str | None = Field( | |||
| description="Name of the OceanBase Vector database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class OpenGaussConfig(BaseSettings): | |||
| Configuration settings for OpenGauss | |||
| """ | |||
| OPENGAUSS_HOST: Optional[str] = Field( | |||
| OPENGAUSS_HOST: str | None = Field( | |||
| description="Hostname or IP address of the OpenGauss server(e.g., 'localhost')", | |||
| default=None, | |||
| ) | |||
| @@ -19,17 +17,17 @@ class OpenGaussConfig(BaseSettings): | |||
| default=6600, | |||
| ) | |||
| OPENGAUSS_USER: Optional[str] = Field( | |||
| OPENGAUSS_USER: str | None = Field( | |||
| description="Username for authenticating with the OpenGauss database", | |||
| default=None, | |||
| ) | |||
| OPENGAUSS_PASSWORD: Optional[str] = Field( | |||
| OPENGAUSS_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the OpenGauss database", | |||
| default=None, | |||
| ) | |||
| OPENGAUSS_DATABASE: Optional[str] = Field( | |||
| OPENGAUSS_DATABASE: str | None = Field( | |||
| description="Name of the OpenGauss database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,5 @@ | |||
| from enum import Enum | |||
| from typing import Literal, Optional | |||
| from typing import Literal | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -18,7 +18,7 @@ class OpenSearchConfig(BaseSettings): | |||
| BASIC = "basic" | |||
| AWS_MANAGED_IAM = "aws_managed_iam" | |||
| OPENSEARCH_HOST: Optional[str] = Field( | |||
| OPENSEARCH_HOST: str | None = Field( | |||
| description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')", | |||
| default=None, | |||
| ) | |||
| @@ -43,21 +43,21 @@ class OpenSearchConfig(BaseSettings): | |||
| default=AuthMethod.BASIC, | |||
| ) | |||
| OPENSEARCH_USER: Optional[str] = Field( | |||
| OPENSEARCH_USER: str | None = Field( | |||
| description="Username for authenticating with OpenSearch", | |||
| default=None, | |||
| ) | |||
| OPENSEARCH_PASSWORD: Optional[str] = Field( | |||
| OPENSEARCH_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with OpenSearch", | |||
| default=None, | |||
| ) | |||
| OPENSEARCH_AWS_REGION: Optional[str] = Field( | |||
| OPENSEARCH_AWS_REGION: str | None = Field( | |||
| description="AWS region for OpenSearch (e.g. 'us-west-2')", | |||
| default=None, | |||
| ) | |||
| OPENSEARCH_AWS_SERVICE: Optional[Literal["es", "aoss"]] = Field( | |||
| OPENSEARCH_AWS_SERVICE: Literal["es", "aoss"] | None = Field( | |||
| description="AWS service for OpenSearch (e.g. 'aoss' for OpenSearch Serverless)", default=None | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,33 +7,33 @@ class OracleConfig(BaseSettings): | |||
| Configuration settings for Oracle database | |||
| """ | |||
| ORACLE_USER: Optional[str] = Field( | |||
| ORACLE_USER: str | None = Field( | |||
| description="Username for authenticating with the Oracle database", | |||
| default=None, | |||
| ) | |||
| ORACLE_PASSWORD: Optional[str] = Field( | |||
| ORACLE_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the Oracle database", | |||
| default=None, | |||
| ) | |||
| ORACLE_DSN: Optional[str] = Field( | |||
| ORACLE_DSN: str | None = Field( | |||
| description="Oracle database connection string. For traditional database, use format 'host:port/service_name'. " | |||
| "For autonomous database, use the service name from tnsnames.ora in the wallet", | |||
| default=None, | |||
| ) | |||
| ORACLE_CONFIG_DIR: Optional[str] = Field( | |||
| ORACLE_CONFIG_DIR: str | None = Field( | |||
| description="Directory containing the tnsnames.ora configuration file. Only used in thin mode connection", | |||
| default=None, | |||
| ) | |||
| ORACLE_WALLET_LOCATION: Optional[str] = Field( | |||
| ORACLE_WALLET_LOCATION: str | None = Field( | |||
| description="Oracle wallet directory path containing the wallet files for secure connection", | |||
| default=None, | |||
| ) | |||
| ORACLE_WALLET_PASSWORD: Optional[str] = Field( | |||
| ORACLE_WALLET_PASSWORD: str | None = Field( | |||
| description="Password to decrypt the Oracle wallet, if it is encrypted", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class PGVectorConfig(BaseSettings): | |||
| Configuration settings for PGVector (PostgreSQL with vector extension) | |||
| """ | |||
| PGVECTOR_HOST: Optional[str] = Field( | |||
| PGVECTOR_HOST: str | None = Field( | |||
| description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')", | |||
| default=None, | |||
| ) | |||
| @@ -19,17 +17,17 @@ class PGVectorConfig(BaseSettings): | |||
| default=5433, | |||
| ) | |||
| PGVECTOR_USER: Optional[str] = Field( | |||
| PGVECTOR_USER: str | None = Field( | |||
| description="Username for authenticating with the PostgreSQL database", | |||
| default=None, | |||
| ) | |||
| PGVECTOR_PASSWORD: Optional[str] = Field( | |||
| PGVECTOR_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the PostgreSQL database", | |||
| default=None, | |||
| ) | |||
| PGVECTOR_DATABASE: Optional[str] = Field( | |||
| PGVECTOR_DATABASE: str | None = Field( | |||
| description="Name of the PostgreSQL database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class PGVectoRSConfig(BaseSettings): | |||
| Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL) | |||
| """ | |||
| PGVECTO_RS_HOST: Optional[str] = Field( | |||
| PGVECTO_RS_HOST: str | None = Field( | |||
| description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')", | |||
| default=None, | |||
| ) | |||
| @@ -19,17 +17,17 @@ class PGVectoRSConfig(BaseSettings): | |||
| default=5431, | |||
| ) | |||
| PGVECTO_RS_USER: Optional[str] = Field( | |||
| PGVECTO_RS_USER: str | None = Field( | |||
| description="Username for authenticating with the PostgreSQL database using PGVecto.RS", | |||
| default=None, | |||
| ) | |||
| PGVECTO_RS_PASSWORD: Optional[str] = Field( | |||
| PGVECTO_RS_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the PostgreSQL database using PGVecto.RS", | |||
| default=None, | |||
| ) | |||
| PGVECTO_RS_DATABASE: Optional[str] = Field( | |||
| PGVECTO_RS_DATABASE: str | None = Field( | |||
| description="Name of the PostgreSQL database with PGVecto.RS extension to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeInt, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,12 +7,12 @@ class QdrantConfig(BaseSettings): | |||
| Configuration settings for Qdrant vector database | |||
| """ | |||
| QDRANT_URL: Optional[str] = Field( | |||
| QDRANT_URL: str | None = Field( | |||
| description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')", | |||
| default=None, | |||
| ) | |||
| QDRANT_API_KEY: Optional[str] = Field( | |||
| QDRANT_API_KEY: str | None = Field( | |||
| description="API key for authenticating with the Qdrant server", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class RelytConfig(BaseSettings): | |||
| Configuration settings for Relyt database | |||
| """ | |||
| RELYT_HOST: Optional[str] = Field( | |||
| RELYT_HOST: str | None = Field( | |||
| description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')", | |||
| default=None, | |||
| ) | |||
| @@ -19,17 +17,17 @@ class RelytConfig(BaseSettings): | |||
| default=9200, | |||
| ) | |||
| RELYT_USER: Optional[str] = Field( | |||
| RELYT_USER: str | None = Field( | |||
| description="Username for authenticating with the Relyt database", | |||
| default=None, | |||
| ) | |||
| RELYT_PASSWORD: Optional[str] = Field( | |||
| RELYT_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the Relyt database", | |||
| default=None, | |||
| ) | |||
| RELYT_DATABASE: Optional[str] = Field( | |||
| RELYT_DATABASE: str | None = Field( | |||
| description="Name of the Relyt database to connect to (default is 'default')", | |||
| default="default", | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,22 +7,22 @@ class TableStoreConfig(BaseSettings): | |||
| Configuration settings for TableStore. | |||
| """ | |||
| TABLESTORE_ENDPOINT: Optional[str] = Field( | |||
| TABLESTORE_ENDPOINT: str | None = Field( | |||
| description="Endpoint address of the TableStore server (e.g. 'https://instance-name.cn-hangzhou.ots.aliyuncs.com')", | |||
| default=None, | |||
| ) | |||
| TABLESTORE_INSTANCE_NAME: Optional[str] = Field( | |||
| TABLESTORE_INSTANCE_NAME: str | None = Field( | |||
| description="Instance name to access TableStore server (eg. 'instance-name')", | |||
| default=None, | |||
| ) | |||
| TABLESTORE_ACCESS_KEY_ID: Optional[str] = Field( | |||
| TABLESTORE_ACCESS_KEY_ID: str | None = Field( | |||
| description="AccessKey id for the instance name", | |||
| default=None, | |||
| ) | |||
| TABLESTORE_ACCESS_KEY_SECRET: Optional[str] = Field( | |||
| TABLESTORE_ACCESS_KEY_SECRET: str | None = Field( | |||
| description="AccessKey secret for the instance name", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeInt, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,12 +7,12 @@ class TencentVectorDBConfig(BaseSettings): | |||
| Configuration settings for Tencent Vector Database | |||
| """ | |||
| TENCENT_VECTOR_DB_URL: Optional[str] = Field( | |||
| TENCENT_VECTOR_DB_URL: str | None = Field( | |||
| description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')", | |||
| default=None, | |||
| ) | |||
| TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field( | |||
| TENCENT_VECTOR_DB_API_KEY: str | None = Field( | |||
| description="API key for authenticating with the Tencent Vector Database service", | |||
| default=None, | |||
| ) | |||
| @@ -24,12 +22,12 @@ class TencentVectorDBConfig(BaseSettings): | |||
| default=30, | |||
| ) | |||
| TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field( | |||
| TENCENT_VECTOR_DB_USERNAME: str | None = Field( | |||
| description="Username for authenticating with the Tencent Vector Database (if required)", | |||
| default=None, | |||
| ) | |||
| TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field( | |||
| TENCENT_VECTOR_DB_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the Tencent Vector Database (if required)", | |||
| default=None, | |||
| ) | |||
| @@ -44,7 +42,7 @@ class TencentVectorDBConfig(BaseSettings): | |||
| default=2, | |||
| ) | |||
| TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field( | |||
| TENCENT_VECTOR_DB_DATABASE: str | None = Field( | |||
| description="Name of the specific Tencent Vector Database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, NonNegativeInt, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,12 +7,12 @@ class TidbOnQdrantConfig(BaseSettings): | |||
| Tidb on Qdrant configs | |||
| """ | |||
| TIDB_ON_QDRANT_URL: Optional[str] = Field( | |||
| TIDB_ON_QDRANT_URL: str | None = Field( | |||
| description="Tidb on Qdrant url", | |||
| default=None, | |||
| ) | |||
| TIDB_ON_QDRANT_API_KEY: Optional[str] = Field( | |||
| TIDB_ON_QDRANT_API_KEY: str | None = Field( | |||
| description="Tidb on Qdrant api key", | |||
| default=None, | |||
| ) | |||
| @@ -34,37 +32,37 @@ class TidbOnQdrantConfig(BaseSettings): | |||
| default=6334, | |||
| ) | |||
| TIDB_PUBLIC_KEY: Optional[str] = Field( | |||
| TIDB_PUBLIC_KEY: str | None = Field( | |||
| description="Tidb account public key", | |||
| default=None, | |||
| ) | |||
| TIDB_PRIVATE_KEY: Optional[str] = Field( | |||
| TIDB_PRIVATE_KEY: str | None = Field( | |||
| description="Tidb account private key", | |||
| default=None, | |||
| ) | |||
| TIDB_API_URL: Optional[str] = Field( | |||
| TIDB_API_URL: str | None = Field( | |||
| description="Tidb API url", | |||
| default=None, | |||
| ) | |||
| TIDB_IAM_API_URL: Optional[str] = Field( | |||
| TIDB_IAM_API_URL: str | None = Field( | |||
| description="Tidb IAM API url", | |||
| default=None, | |||
| ) | |||
| TIDB_REGION: Optional[str] = Field( | |||
| TIDB_REGION: str | None = Field( | |||
| description="Tidb serverless region", | |||
| default="regions/aws-us-east-1", | |||
| ) | |||
| TIDB_PROJECT_ID: Optional[str] = Field( | |||
| TIDB_PROJECT_ID: str | None = Field( | |||
| description="Tidb project id", | |||
| default=None, | |||
| ) | |||
| TIDB_SPEND_LIMIT: Optional[int] = Field( | |||
| TIDB_SPEND_LIMIT: int | None = Field( | |||
| description="Tidb spend limit", | |||
| default=100, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,27 +7,27 @@ class TiDBVectorConfig(BaseSettings): | |||
| Configuration settings for TiDB Vector database | |||
| """ | |||
| TIDB_VECTOR_HOST: Optional[str] = Field( | |||
| TIDB_VECTOR_HOST: str | None = Field( | |||
| description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')", | |||
| default=None, | |||
| ) | |||
| TIDB_VECTOR_PORT: Optional[PositiveInt] = Field( | |||
| TIDB_VECTOR_PORT: PositiveInt | None = Field( | |||
| description="Port number on which the TiDB Vector server is listening (default is 4000)", | |||
| default=4000, | |||
| ) | |||
| TIDB_VECTOR_USER: Optional[str] = Field( | |||
| TIDB_VECTOR_USER: str | None = Field( | |||
| description="Username for authenticating with the TiDB Vector database", | |||
| default=None, | |||
| ) | |||
| TIDB_VECTOR_PASSWORD: Optional[str] = Field( | |||
| TIDB_VECTOR_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the TiDB Vector database", | |||
| default=None, | |||
| ) | |||
| TIDB_VECTOR_DATABASE: Optional[str] = Field( | |||
| TIDB_VECTOR_DATABASE: str | None = Field( | |||
| description="Name of the TiDB Vector database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,12 +7,12 @@ class UpstashConfig(BaseSettings): | |||
| Configuration settings for Upstash vector database | |||
| """ | |||
| UPSTASH_VECTOR_URL: Optional[str] = Field( | |||
| UPSTASH_VECTOR_URL: str | None = Field( | |||
| description="URL of the upstash server (e.g., 'https://vector.upstash.io')", | |||
| default=None, | |||
| ) | |||
| UPSTASH_VECTOR_TOKEN: Optional[str] = Field( | |||
| UPSTASH_VECTOR_TOKEN: str | None = Field( | |||
| description="Token for authenticating with the upstash server", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,7 +7,7 @@ class VastbaseVectorConfig(BaseSettings): | |||
| Configuration settings for Vector (Vastbase with vector extension) | |||
| """ | |||
| VASTBASE_HOST: Optional[str] = Field( | |||
| VASTBASE_HOST: str | None = Field( | |||
| description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')", | |||
| default=None, | |||
| ) | |||
| @@ -19,17 +17,17 @@ class VastbaseVectorConfig(BaseSettings): | |||
| default=5432, | |||
| ) | |||
| VASTBASE_USER: Optional[str] = Field( | |||
| VASTBASE_USER: str | None = Field( | |||
| description="Username for authenticating with the Vastbase database", | |||
| default=None, | |||
| ) | |||
| VASTBASE_PASSWORD: Optional[str] = Field( | |||
| VASTBASE_PASSWORD: str | None = Field( | |||
| description="Password for authenticating with the Vastbase database", | |||
| default=None, | |||
| ) | |||
| VASTBASE_DATABASE: Optional[str] = Field( | |||
| VASTBASE_DATABASE: str | None = Field( | |||
| description="Name of the Vastbase database to connect to", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field | |||
| from pydantic_settings import BaseSettings | |||
| @@ -11,14 +9,14 @@ class VikingDBConfig(BaseSettings): | |||
| https://www.volcengine.com/docs/6291/65568 | |||
| """ | |||
| VIKINGDB_ACCESS_KEY: Optional[str] = Field( | |||
| VIKINGDB_ACCESS_KEY: str | None = Field( | |||
| description="The Access Key provided by Volcengine VikingDB for API authentication." | |||
| "Refer to the following documentation for details on obtaining credentials:" | |||
| "https://www.volcengine.com/docs/6291/65568", | |||
| default=None, | |||
| ) | |||
| VIKINGDB_SECRET_KEY: Optional[str] = Field( | |||
| VIKINGDB_SECRET_KEY: str | None = Field( | |||
| description="The Secret Key provided by Volcengine VikingDB for API authentication.", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from pydantic import Field, PositiveInt | |||
| from pydantic_settings import BaseSettings | |||
| @@ -9,12 +7,12 @@ class WeaviateConfig(BaseSettings): | |||
| Configuration settings for Weaviate vector database | |||
| """ | |||
| WEAVIATE_ENDPOINT: Optional[str] = Field( | |||
| WEAVIATE_ENDPOINT: str | None = Field( | |||
| description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')", | |||
| default=None, | |||
| ) | |||
| WEAVIATE_API_KEY: Optional[str] = Field( | |||
| WEAVIATE_API_KEY: str | None = Field( | |||
| description="API key for authenticating with the Weaviate server", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,5 @@ | |||
| from collections.abc import Mapping | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from pydantic import Field | |||
| from pydantic.fields import FieldInfo | |||
| @@ -15,22 +15,22 @@ class ApolloSettingsSourceInfo(BaseSettings): | |||
| Packaging build information | |||
| """ | |||
| APOLLO_APP_ID: Optional[str] = Field( | |||
| APOLLO_APP_ID: str | None = Field( | |||
| description="apollo app_id", | |||
| default=None, | |||
| ) | |||
| APOLLO_CLUSTER: Optional[str] = Field( | |||
| APOLLO_CLUSTER: str | None = Field( | |||
| description="apollo cluster", | |||
| default=None, | |||
| ) | |||
| APOLLO_CONFIG_URL: Optional[str] = Field( | |||
| APOLLO_CONFIG_URL: str | None = Field( | |||
| description="apollo config url", | |||
| default=None, | |||
| ) | |||
| APOLLO_NAMESPACE: Optional[str] = Field( | |||
| APOLLO_NAMESPACE: str | None = Field( | |||
| description="apollo namespace", | |||
| default=None, | |||
| ) | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| import flask_restx | |||
| from flask_login import current_user | |||
| from flask_restx import Resource, fields, marshal_with | |||
| @@ -50,7 +48,7 @@ class BaseApiKeyListResource(Resource): | |||
| method_decorators = [account_initialization_required, login_required, setup_required] | |||
| resource_type: str | None = None | |||
| resource_model: Optional[type] = None | |||
| resource_model: type | None = None | |||
| resource_id_field: str | None = None | |||
| token_prefix: str | None = None | |||
| max_keys = 10 | |||
| @@ -103,7 +101,7 @@ class BaseApiKeyResource(Resource): | |||
| method_decorators = [account_initialization_required, login_required, setup_required] | |||
| resource_type: str | None = None | |||
| resource_model: Optional[type] = None | |||
| resource_model: type | None = None | |||
| resource_id_field: str | None = None | |||
| def delete(self, resource_id, api_key_id): | |||
| @@ -1,6 +1,6 @@ | |||
| from collections.abc import Callable | |||
| from functools import wraps | |||
| from typing import Optional, ParamSpec, TypeVar, Union | |||
| from typing import ParamSpec, TypeVar, Union | |||
| from controllers.console.app.error import AppNotFoundError | |||
| from extensions.ext_database import db | |||
| @@ -12,7 +12,7 @@ P = ParamSpec("P") | |||
| R = TypeVar("R") | |||
| def _load_app_model(app_id: str) -> Optional[App]: | |||
| def _load_app_model(app_id: str) -> App | None: | |||
| assert isinstance(current_user, Account) | |||
| app_model = ( | |||
| db.session.query(App) | |||
| @@ -22,7 +22,7 @@ def _load_app_model(app_id: str) -> Optional[App]: | |||
| return app_model | |||
| def get_app_model(view: Optional[Callable[P, R]] = None, *, mode: Union[AppMode, list[AppMode], None] = None): | |||
| def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None): | |||
| def decorator(view_func: Callable[P, R]): | |||
| @wraps(view_func) | |||
| def decorated_view(*args: P.args, **kwargs: P.kwargs): | |||
| @@ -1,5 +1,4 @@ | |||
| import logging | |||
| from typing import Optional | |||
| import requests | |||
| from flask import current_app, redirect, request | |||
| @@ -157,8 +156,8 @@ class OAuthCallback(Resource): | |||
| ) | |||
| def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]: | |||
| account: Optional[Account] = Account.get_by_openid(provider, user_info.id) | |||
| def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Account | None: | |||
| account: Account | None = Account.get_by_openid(provider, user_info.id) | |||
| if not account: | |||
| with Session(db.engine) as session: | |||
| @@ -1,6 +1,6 @@ | |||
| from collections.abc import Callable | |||
| from functools import wraps | |||
| from typing import Concatenate, Optional, ParamSpec, TypeVar | |||
| from typing import Concatenate, ParamSpec, TypeVar | |||
| from flask_login import current_user | |||
| from flask_restx import Resource | |||
| @@ -20,7 +20,7 @@ R = TypeVar("R") | |||
| T = TypeVar("T") | |||
| def installed_app_required(view: Optional[Callable[Concatenate[InstalledApp, P], R]] = None): | |||
| def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | None = None): | |||
| def decorator(view: Callable[Concatenate[InstalledApp, P], R]): | |||
| @wraps(view) | |||
| def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs): | |||
| @@ -50,7 +50,7 @@ def installed_app_required(view: Optional[Callable[Concatenate[InstalledApp, P], | |||
| return decorator | |||
| def user_allowed_to_access_app(view: Optional[Callable[Concatenate[InstalledApp, P], R]] = None): | |||
| def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | None = None): | |||
| def decorator(view: Callable[Concatenate[InstalledApp, P], R]): | |||
| @wraps(view) | |||
| def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs): | |||
| @@ -1,5 +1,4 @@ | |||
| from mimetypes import guess_extension | |||
| from typing import Optional | |||
| from flask_restx import Resource, reqparse | |||
| from flask_restx.api import HTTPStatus | |||
| @@ -73,11 +72,11 @@ class PluginUploadFileApi(Resource): | |||
| nonce: str = args["nonce"] | |||
| sign: str = args["sign"] | |||
| tenant_id: str = args["tenant_id"] | |||
| user_id: Optional[str] = args.get("user_id") | |||
| user_id: str | None = args.get("user_id") | |||
| user = get_user(tenant_id, user_id) | |||
| filename: Optional[str] = file.filename | |||
| mimetype: Optional[str] = file.mimetype | |||
| filename: str | None = file.filename | |||
| mimetype: str | None = file.mimetype | |||
| if not filename or not mimetype: | |||
| raise Forbidden("Invalid request.") | |||
| @@ -1,6 +1,6 @@ | |||
| from collections.abc import Callable | |||
| from functools import wraps | |||
| from typing import Optional, ParamSpec, TypeVar, cast | |||
| from typing import ParamSpec, TypeVar, cast | |||
| from flask import current_app, request | |||
| from flask_login import user_logged_in | |||
| @@ -54,7 +54,7 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser: | |||
| return user_model | |||
| def get_user_tenant(view: Optional[Callable[P, R]] = None): | |||
| def get_user_tenant(view: Callable[P, R] | None = None): | |||
| def decorator(view_func: Callable[P, R]): | |||
| @wraps(view_func) | |||
| def decorated_view(*args: P.args, **kwargs: P.kwargs): | |||
| @@ -106,7 +106,7 @@ def get_user_tenant(view: Optional[Callable[P, R]] = None): | |||
| return decorator(view) | |||
| def plugin_data(view: Optional[Callable[P, R]] = None, *, payload_type: type[BaseModel]): | |||
| def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseModel]): | |||
| def decorator(view_func: Callable[P, R]): | |||
| def decorated_view(*args: P.args, **kwargs: P.kwargs): | |||
| try: | |||
| @@ -1,4 +1,4 @@ | |||
| from typing import Optional, Union | |||
| from typing import Union | |||
| from flask import Response | |||
| from flask_restx import Resource, reqparse | |||
| @@ -73,7 +73,7 @@ class MCPAppApi(Resource): | |||
| ValidationError: Invalid request format or parameters | |||
| """ | |||
| args = mcp_request_parser.parse_args() | |||
| request_id: Optional[Union[int, str]] = args.get("id") | |||
| request_id: Union[int, str] | None = args.get("id") | |||
| mcp_request = self._parse_mcp_request(args) | |||
| with Session(db.engine, expire_on_commit=False) as session: | |||
| @@ -107,7 +107,7 @@ class MCPAppApi(Resource): | |||
| def _process_mcp_message( | |||
| self, | |||
| mcp_request: mcp_types.ClientRequest | mcp_types.ClientNotification, | |||
| request_id: Optional[Union[int, str]], | |||
| request_id: Union[int, str] | None, | |||
| app: App, | |||
| mcp_server: AppMCPServer, | |||
| user_input_form: list[VariableEntity], | |||
| @@ -130,7 +130,7 @@ class MCPAppApi(Resource): | |||
| def _handle_request( | |||
| self, | |||
| mcp_request: mcp_types.ClientRequest, | |||
| request_id: Optional[Union[int, str]], | |||
| request_id: Union[int, str] | None, | |||
| app: App, | |||
| mcp_server: AppMCPServer, | |||
| user_input_form: list[VariableEntity], | |||
| @@ -3,7 +3,7 @@ from collections.abc import Callable | |||
| from datetime import timedelta | |||
| from enum import StrEnum, auto | |||
| from functools import wraps | |||
| from typing import Concatenate, Optional, ParamSpec, TypeVar | |||
| from typing import Concatenate, ParamSpec, TypeVar | |||
| from flask import current_app, request | |||
| from flask_login import user_logged_in | |||
| @@ -42,7 +42,7 @@ class FetchUserArg(BaseModel): | |||
| required: bool = False | |||
| def validate_app_token(view: Optional[Callable[P, R]] = None, *, fetch_user_arg: Optional[FetchUserArg] = None): | |||
| def validate_app_token(view: Callable[P, R] | None = None, *, fetch_user_arg: FetchUserArg | None = None): | |||
| def decorator(view_func: Callable[P, R]): | |||
| @wraps(view_func) | |||
| def decorated_view(*args: P.args, **kwargs: P.kwargs): | |||
| @@ -189,7 +189,7 @@ def cloud_edition_billing_rate_limit_check(resource: str, api_token_type: str): | |||
| return interceptor | |||
| def validate_dataset_token(view: Optional[Callable[Concatenate[T, P], R]] = None): | |||
| def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None): | |||
| def decorator(view: Callable[Concatenate[T, P], R]): | |||
| @wraps(view) | |||
| def decorated(*args: P.args, **kwargs: P.kwargs): | |||
| @@ -267,7 +267,7 @@ def validate_and_get_api_token(scope: str | None = None): | |||
| return api_token | |||
| def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str] = None) -> EndUser: | |||
| def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = None) -> EndUser: | |||
| """ | |||
| Create or update session terminal based on user ID. | |||
| """ | |||
| @@ -1,7 +1,7 @@ | |||
| from collections.abc import Callable | |||
| from datetime import UTC, datetime | |||
| from functools import wraps | |||
| from typing import Concatenate, Optional, ParamSpec, TypeVar | |||
| from typing import Concatenate, ParamSpec, TypeVar | |||
| from flask import request | |||
| from flask_restx import Resource | |||
| @@ -21,7 +21,7 @@ P = ParamSpec("P") | |||
| R = TypeVar("R") | |||
| def validate_jwt_token(view: Optional[Callable[Concatenate[App, EndUser, P], R]] = None): | |||
| def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None = None): | |||
| def decorator(view: Callable[Concatenate[App, EndUser, P], R]): | |||
| @wraps(view) | |||
| def decorated(*args: P.args, **kwargs: P.kwargs): | |||
| @@ -1,7 +1,7 @@ | |||
| import json | |||
| import logging | |||
| import uuid | |||
| from typing import Optional, Union, cast | |||
| from typing import Union, cast | |||
| from sqlalchemy import select | |||
| @@ -60,8 +60,8 @@ class BaseAgentRunner(AppRunner): | |||
| message: Message, | |||
| user_id: str, | |||
| model_instance: ModelInstance, | |||
| memory: Optional[TokenBufferMemory] = None, | |||
| prompt_messages: Optional[list[PromptMessage]] = None, | |||
| memory: TokenBufferMemory | None = None, | |||
| prompt_messages: list[PromptMessage] | None = None, | |||
| ): | |||
| self.tenant_id = tenant_id | |||
| self.application_generate_entity = application_generate_entity | |||
| @@ -112,7 +112,7 @@ class BaseAgentRunner(AppRunner): | |||
| features = model_schema.features if model_schema and model_schema.features else [] | |||
| self.stream_tool_call = ModelFeature.STREAM_TOOL_CALL in features | |||
| self.files = application_generate_entity.files if ModelFeature.VISION in features else [] | |||
| self.query: Optional[str] = "" | |||
| self.query: str | None = "" | |||
| self._current_thoughts: list[PromptMessage] = [] | |||
| def _repack_app_generate_entity( | |||
| @@ -1,7 +1,7 @@ | |||
| import json | |||
| from abc import ABC, abstractmethod | |||
| from collections.abc import Generator, Mapping, Sequence | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from core.agent.base_agent_runner import BaseAgentRunner | |||
| from core.agent.entities import AgentScratchpadUnit | |||
| @@ -70,12 +70,12 @@ class CotAgentRunner(BaseAgentRunner, ABC): | |||
| self._prompt_messages_tools = prompt_messages_tools | |||
| function_call_state = True | |||
| llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None} | |||
| llm_usage: dict[str, LLMUsage | None] = {"usage": None} | |||
| final_answer = "" | |||
| prompt_messages: list = [] # Initialize prompt_messages | |||
| agent_thought_id = "" # Initialize agent_thought_id | |||
| def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage): | |||
| def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage): | |||
| if not final_llm_usage_dict["usage"]: | |||
| final_llm_usage_dict["usage"] = usage | |||
| else: | |||
| @@ -122,7 +122,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): | |||
| callbacks=[], | |||
| ) | |||
| usage_dict: dict[str, Optional[LLMUsage]] = {} | |||
| usage_dict: dict[str, LLMUsage | None] = {} | |||
| react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict) | |||
| scratchpad = AgentScratchpadUnit( | |||
| agent_response="", | |||
| @@ -274,7 +274,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): | |||
| action: AgentScratchpadUnit.Action, | |||
| tool_instances: Mapping[str, Tool], | |||
| message_file_ids: list[str], | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| ) -> tuple[str, ToolInvokeMeta]: | |||
| """ | |||
| handle invoke action | |||
| @@ -1,5 +1,4 @@ | |||
| import json | |||
| from typing import Optional | |||
| from core.agent.cot_agent_runner import CotAgentRunner | |||
| from core.model_runtime.entities.message_entities import ( | |||
| @@ -31,7 +30,7 @@ class CotCompletionAgentRunner(CotAgentRunner): | |||
| return system_prompt | |||
| def _organize_historic_prompt(self, current_session_messages: Optional[list[PromptMessage]] = None) -> str: | |||
| def _organize_historic_prompt(self, current_session_messages: list[PromptMessage] | None = None) -> str: | |||
| """ | |||
| Organize historic prompt | |||
| """ | |||
| @@ -1,5 +1,5 @@ | |||
| from enum import StrEnum | |||
| from typing import Any, Optional, Union | |||
| from typing import Any, Union | |||
| from pydantic import BaseModel, Field | |||
| @@ -50,11 +50,11 @@ class AgentScratchpadUnit(BaseModel): | |||
| "action_input": self.action_input, | |||
| } | |||
| agent_response: Optional[str] = None | |||
| thought: Optional[str] = None | |||
| action_str: Optional[str] = None | |||
| observation: Optional[str] = None | |||
| action: Optional[Action] = None | |||
| agent_response: str | None = None | |||
| thought: str | None = None | |||
| action_str: str | None = None | |||
| observation: str | None = None | |||
| action: Action | None = None | |||
| def is_final(self) -> bool: | |||
| """ | |||
| @@ -81,8 +81,8 @@ class AgentEntity(BaseModel): | |||
| provider: str | |||
| model: str | |||
| strategy: Strategy | |||
| prompt: Optional[AgentPromptEntity] = None | |||
| tools: Optional[list[AgentToolEntity]] = None | |||
| prompt: AgentPromptEntity | None = None | |||
| tools: list[AgentToolEntity] | None = None | |||
| max_iteration: int = 10 | |||
| @@ -2,7 +2,7 @@ import json | |||
| import logging | |||
| from collections.abc import Generator | |||
| from copy import deepcopy | |||
| from typing import Any, Optional, Union | |||
| from typing import Any, Union | |||
| from core.agent.base_agent_runner import BaseAgentRunner | |||
| from core.app.apps.base_app_queue_manager import PublishFrom | |||
| @@ -52,14 +52,14 @@ class FunctionCallAgentRunner(BaseAgentRunner): | |||
| # continue to run until there is not any tool call | |||
| function_call_state = True | |||
| llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None} | |||
| llm_usage: dict[str, LLMUsage | None] = {"usage": None} | |||
| final_answer = "" | |||
| prompt_messages: list = [] # Initialize prompt_messages | |||
| # get tracing instance | |||
| trace_manager = app_generate_entity.trace_manager | |||
| def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage): | |||
| def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage): | |||
| if not final_llm_usage_dict["usage"]: | |||
| final_llm_usage_dict["usage"] = usage | |||
| else: | |||
| @@ -1,5 +1,5 @@ | |||
| from enum import StrEnum | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator | |||
| @@ -53,7 +53,7 @@ class AgentStrategyParameter(PluginParameter): | |||
| return cast_parameter_value(self, value) | |||
| type: AgentStrategyParameterType = Field(..., description="The type of the parameter") | |||
| help: Optional[I18nObject] = None | |||
| help: I18nObject | None = None | |||
| def init_frontend_parameter(self, value: Any): | |||
| return init_frontend_parameter(self, self.type, value) | |||
| @@ -61,7 +61,7 @@ class AgentStrategyParameter(PluginParameter): | |||
| class AgentStrategyProviderEntity(BaseModel): | |||
| identity: AgentStrategyProviderIdentity | |||
| plugin_id: Optional[str] = Field(None, description="The id of the plugin") | |||
| plugin_id: str | None = Field(None, description="The id of the plugin") | |||
| class AgentStrategyIdentity(ToolIdentity): | |||
| @@ -84,9 +84,9 @@ class AgentStrategyEntity(BaseModel): | |||
| identity: AgentStrategyIdentity | |||
| parameters: list[AgentStrategyParameter] = Field(default_factory=list) | |||
| description: I18nObject = Field(..., description="The description of the agent strategy") | |||
| output_schema: Optional[dict] = None | |||
| features: Optional[list[AgentFeature]] = None | |||
| meta_version: Optional[str] = None | |||
| output_schema: dict | None = None | |||
| features: list[AgentFeature] | None = None | |||
| meta_version: str | None = None | |||
| # pydantic configs | |||
| model_config = ConfigDict(protected_namespaces=()) | |||
| @@ -1,6 +1,6 @@ | |||
| from abc import ABC, abstractmethod | |||
| from collections.abc import Generator, Sequence | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from core.agent.entities import AgentInvokeMessage | |||
| from core.agent.plugin_entities import AgentStrategyParameter | |||
| @@ -16,10 +16,10 @@ class BaseAgentStrategy(ABC): | |||
| self, | |||
| params: dict[str, Any], | |||
| user_id: str, | |||
| conversation_id: Optional[str] = None, | |||
| app_id: Optional[str] = None, | |||
| message_id: Optional[str] = None, | |||
| credentials: Optional[InvokeCredentials] = None, | |||
| conversation_id: str | None = None, | |||
| app_id: str | None = None, | |||
| message_id: str | None = None, | |||
| credentials: InvokeCredentials | None = None, | |||
| ) -> Generator[AgentInvokeMessage, None, None]: | |||
| """ | |||
| Invoke the agent strategy. | |||
| @@ -37,9 +37,9 @@ class BaseAgentStrategy(ABC): | |||
| self, | |||
| params: dict[str, Any], | |||
| user_id: str, | |||
| conversation_id: Optional[str] = None, | |||
| app_id: Optional[str] = None, | |||
| message_id: Optional[str] = None, | |||
| credentials: Optional[InvokeCredentials] = None, | |||
| conversation_id: str | None = None, | |||
| app_id: str | None = None, | |||
| message_id: str | None = None, | |||
| credentials: InvokeCredentials | None = None, | |||
| ) -> Generator[AgentInvokeMessage, None, None]: | |||
| pass | |||
| @@ -1,5 +1,5 @@ | |||
| from collections.abc import Generator, Sequence | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from core.agent.entities import AgentInvokeMessage | |||
| from core.agent.plugin_entities import AgentStrategyEntity, AgentStrategyParameter | |||
| @@ -38,10 +38,10 @@ class PluginAgentStrategy(BaseAgentStrategy): | |||
| self, | |||
| params: dict[str, Any], | |||
| user_id: str, | |||
| conversation_id: Optional[str] = None, | |||
| app_id: Optional[str] = None, | |||
| message_id: Optional[str] = None, | |||
| credentials: Optional[InvokeCredentials] = None, | |||
| conversation_id: str | None = None, | |||
| app_id: str | None = None, | |||
| message_id: str | None = None, | |||
| credentials: InvokeCredentials | None = None, | |||
| ) -> Generator[AgentInvokeMessage, None, None]: | |||
| """ | |||
| Invoke the agent strategy. | |||
| @@ -1,12 +1,10 @@ | |||
| from typing import Optional | |||
| from core.app.app_config.entities import SensitiveWordAvoidanceEntity | |||
| from core.moderation.factory import ModerationFactory | |||
| class SensitiveWordAvoidanceConfigManager: | |||
| @classmethod | |||
| def convert(cls, config: dict) -> Optional[SensitiveWordAvoidanceEntity]: | |||
| def convert(cls, config: dict) -> SensitiveWordAvoidanceEntity | None: | |||
| sensitive_word_avoidance_dict = config.get("sensitive_word_avoidance") | |||
| if not sensitive_word_avoidance_dict: | |||
| return None | |||
| @@ -1,12 +1,10 @@ | |||
| from typing import Optional | |||
| from core.agent.entities import AgentEntity, AgentPromptEntity, AgentToolEntity | |||
| from core.agent.prompt.template import REACT_PROMPT_TEMPLATES | |||
| class AgentConfigManager: | |||
| @classmethod | |||
| def convert(cls, config: dict) -> Optional[AgentEntity]: | |||
| def convert(cls, config: dict) -> AgentEntity | None: | |||
| """ | |||
| Convert model config to model config | |||
| @@ -1,5 +1,4 @@ | |||
| import uuid | |||
| from typing import Optional | |||
| from core.app.app_config.entities import ( | |||
| DatasetEntity, | |||
| @@ -14,7 +13,7 @@ from services.dataset_service import DatasetService | |||
| class DatasetConfigManager: | |||
| @classmethod | |||
| def convert(cls, config: dict) -> Optional[DatasetEntity]: | |||
| def convert(cls, config: dict) -> DatasetEntity | None: | |||
| """ | |||
| Convert model config to model config | |||
| @@ -1,6 +1,6 @@ | |||
| from collections.abc import Sequence | |||
| from enum import StrEnum, auto | |||
| from typing import Any, Literal, Optional | |||
| from typing import Any, Literal | |||
| from pydantic import BaseModel, Field, field_validator | |||
| @@ -17,7 +17,7 @@ class ModelConfigEntity(BaseModel): | |||
| provider: str | |||
| model: str | |||
| mode: Optional[str] = None | |||
| mode: str | None = None | |||
| parameters: dict[str, Any] = Field(default_factory=dict) | |||
| stop: list[str] = Field(default_factory=list) | |||
| @@ -53,7 +53,7 @@ class AdvancedCompletionPromptTemplateEntity(BaseModel): | |||
| assistant: str | |||
| prompt: str | |||
| role_prefix: Optional[RolePrefixEntity] = None | |||
| role_prefix: RolePrefixEntity | None = None | |||
| class PromptTemplateEntity(BaseModel): | |||
| @@ -84,9 +84,9 @@ class PromptTemplateEntity(BaseModel): | |||
| raise ValueError(f"invalid prompt type value {value}") | |||
| prompt_type: PromptType | |||
| simple_prompt_template: Optional[str] = None | |||
| advanced_chat_prompt_template: Optional[AdvancedChatPromptTemplateEntity] = None | |||
| advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None | |||
| simple_prompt_template: str | None = None | |||
| advanced_chat_prompt_template: AdvancedChatPromptTemplateEntity | None = None | |||
| advanced_completion_prompt_template: AdvancedCompletionPromptTemplateEntity | None = None | |||
| class VariableEntityType(StrEnum): | |||
| @@ -112,7 +112,7 @@ class VariableEntity(BaseModel): | |||
| type: VariableEntityType | |||
| required: bool = False | |||
| hide: bool = False | |||
| max_length: Optional[int] = None | |||
| max_length: int | None = None | |||
| options: Sequence[str] = Field(default_factory=list) | |||
| allowed_file_types: Sequence[FileType] = Field(default_factory=list) | |||
| allowed_file_extensions: Sequence[str] = Field(default_factory=list) | |||
| @@ -186,8 +186,8 @@ class MetadataFilteringCondition(BaseModel): | |||
| Metadata Filtering Condition. | |||
| """ | |||
| logical_operator: Optional[Literal["and", "or"]] = "and" | |||
| conditions: Optional[list[Condition]] = Field(default=None, deprecated=True) | |||
| logical_operator: Literal["and", "or"] | None = "and" | |||
| conditions: list[Condition] | None = Field(default=None, deprecated=True) | |||
| class DatasetRetrieveConfigEntity(BaseModel): | |||
| @@ -217,18 +217,18 @@ class DatasetRetrieveConfigEntity(BaseModel): | |||
| return mode | |||
| raise ValueError(f"invalid retrieve strategy value {value}") | |||
| query_variable: Optional[str] = None # Only when app mode is completion | |||
| query_variable: str | None = None # Only when app mode is completion | |||
| retrieve_strategy: RetrieveStrategy | |||
| top_k: Optional[int] = None | |||
| score_threshold: Optional[float] = 0.0 | |||
| rerank_mode: Optional[str] = "reranking_model" | |||
| reranking_model: Optional[dict] = None | |||
| weights: Optional[dict] = None | |||
| reranking_enabled: Optional[bool] = True | |||
| metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled" | |||
| metadata_model_config: Optional[ModelConfig] = None | |||
| metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None | |||
| top_k: int | None = None | |||
| score_threshold: float | None = 0.0 | |||
| rerank_mode: str | None = "reranking_model" | |||
| reranking_model: dict | None = None | |||
| weights: dict | None = None | |||
| reranking_enabled: bool | None = True | |||
| metadata_filtering_mode: Literal["disabled", "automatic", "manual"] | None = "disabled" | |||
| metadata_model_config: ModelConfig | None = None | |||
| metadata_filtering_conditions: MetadataFilteringCondition | None = None | |||
| class DatasetEntity(BaseModel): | |||
| @@ -255,8 +255,8 @@ class TextToSpeechEntity(BaseModel): | |||
| """ | |||
| enabled: bool | |||
| voice: Optional[str] = None | |||
| language: Optional[str] = None | |||
| voice: str | None = None | |||
| language: str | None = None | |||
| class TracingConfigEntity(BaseModel): | |||
| @@ -269,15 +269,15 @@ class TracingConfigEntity(BaseModel): | |||
| class AppAdditionalFeatures(BaseModel): | |||
| file_upload: Optional[FileUploadConfig] = None | |||
| opening_statement: Optional[str] = None | |||
| file_upload: FileUploadConfig | None = None | |||
| opening_statement: str | None = None | |||
| suggested_questions: list[str] = [] | |||
| suggested_questions_after_answer: bool = False | |||
| show_retrieve_source: bool = False | |||
| more_like_this: bool = False | |||
| speech_to_text: bool = False | |||
| text_to_speech: Optional[TextToSpeechEntity] = None | |||
| trace_config: Optional[TracingConfigEntity] = None | |||
| text_to_speech: TextToSpeechEntity | None = None | |||
| trace_config: TracingConfigEntity | None = None | |||
| class AppConfig(BaseModel): | |||
| @@ -290,7 +290,7 @@ class AppConfig(BaseModel): | |||
| app_mode: AppMode | |||
| additional_features: AppAdditionalFeatures | |||
| variables: list[VariableEntity] = [] | |||
| sensitive_word_avoidance: Optional[SensitiveWordAvoidanceEntity] = None | |||
| sensitive_word_avoidance: SensitiveWordAvoidanceEntity | None = None | |||
| class EasyUIBasedAppModelConfigFrom(StrEnum): | |||
| @@ -313,7 +313,7 @@ class EasyUIBasedAppConfig(AppConfig): | |||
| app_model_config_dict: dict | |||
| model: ModelConfigEntity | |||
| prompt_template: PromptTemplateEntity | |||
| dataset: Optional[DatasetEntity] = None | |||
| dataset: DatasetEntity | None = None | |||
| external_data_variables: list[ExternalDataVariableEntity] = [] | |||
| @@ -3,7 +3,7 @@ import logging | |||
| import threading | |||
| import uuid | |||
| from collections.abc import Generator, Mapping | |||
| from typing import Any, Literal, Optional, Union, overload | |||
| from typing import Any, Literal, Union, overload | |||
| from flask import Flask, current_app | |||
| from pydantic import ValidationError | |||
| @@ -390,7 +390,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): | |||
| application_generate_entity: AdvancedChatAppGenerateEntity, | |||
| workflow_execution_repository: WorkflowExecutionRepository, | |||
| workflow_node_execution_repository: WorkflowNodeExecutionRepository, | |||
| conversation: Optional[Conversation] = None, | |||
| conversation: Conversation | None = None, | |||
| stream: bool = True, | |||
| variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, | |||
| ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]: | |||
| @@ -1,6 +1,6 @@ | |||
| import logging | |||
| from collections.abc import Mapping | |||
| from typing import Any, Optional, cast | |||
| from typing import Any, cast | |||
| from sqlalchemy import select | |||
| from sqlalchemy.orm import Session | |||
| @@ -231,7 +231,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): | |||
| def query_app_annotations_to_reply( | |||
| self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom | |||
| ) -> Optional[MessageAnnotation]: | |||
| ) -> MessageAnnotation | None: | |||
| """ | |||
| Query app annotations to reply | |||
| :param app_record: app record | |||
| @@ -4,7 +4,7 @@ import time | |||
| from collections.abc import Callable, Generator, Mapping | |||
| from contextlib import contextmanager | |||
| from threading import Thread | |||
| from typing import Any, Optional, Union | |||
| from typing import Any, Union | |||
| from sqlalchemy import select | |||
| from sqlalchemy.orm import Session | |||
| @@ -233,7 +233,7 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| return None | |||
| def _wrapper_process_stream_response( | |||
| self, trace_manager: Optional[TraceQueueManager] = None | |||
| self, trace_manager: TraceQueueManager | None = None | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| tts_publisher = None | |||
| task_id = self._application_generate_entity.task_id | |||
| @@ -294,7 +294,7 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| if not self._workflow_run_id: | |||
| raise ValueError("workflow run not initialized.") | |||
| def _ensure_graph_runtime_initialized(self, graph_runtime_state: Optional[GraphRuntimeState]) -> GraphRuntimeState: | |||
| def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState: | |||
| """Fluent validation for graph runtime state.""" | |||
| if not graph_runtime_state: | |||
| raise ValueError("graph runtime state not initialized.") | |||
| @@ -411,8 +411,8 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueTextChunkEvent, | |||
| *, | |||
| tts_publisher: Optional[AppGeneratorTTSPublisher] = None, | |||
| queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None, | |||
| tts_publisher: AppGeneratorTTSPublisher | None = None, | |||
| queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle text chunk events.""" | |||
| @@ -538,8 +538,8 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueWorkflowSucceededEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle workflow succeeded events.""" | |||
| @@ -569,8 +569,8 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueWorkflowPartialSuccessEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle workflow partial success events.""" | |||
| @@ -601,8 +601,8 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueWorkflowFailedEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle workflow failed events.""" | |||
| @@ -636,8 +636,8 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueStopEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle stop events.""" | |||
| @@ -677,7 +677,7 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueAdvancedChatMessageEndEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle advanced chat message end events.""" | |||
| @@ -775,10 +775,10 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| self, | |||
| event: Any, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| tts_publisher: Optional[AppGeneratorTTSPublisher] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| tts_publisher: AppGeneratorTTSPublisher | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Dispatch events using elegant pattern matching.""" | |||
| handlers = self._get_event_handlers() | |||
| @@ -830,15 +830,15 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| def _process_stream_response( | |||
| self, | |||
| tts_publisher: Optional[AppGeneratorTTSPublisher] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| tts_publisher: AppGeneratorTTSPublisher | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """ | |||
| Process stream response using elegant Fluent Python patterns. | |||
| Maintains exact same functionality as original 57-if-statement version. | |||
| """ | |||
| # Initialize graph runtime state | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None | |||
| graph_runtime_state: GraphRuntimeState | None = None | |||
| for queue_message in self._base_task_pipeline.queue_manager.listen(): | |||
| event = queue_message.event | |||
| @@ -888,7 +888,7 @@ class AdvancedChatAppGenerateTaskPipeline: | |||
| if self._conversation_name_generate_thread: | |||
| self._conversation_name_generate_thread.join() | |||
| def _save_message(self, *, session: Session, graph_runtime_state: Optional[GraphRuntimeState] = None): | |||
| def _save_message(self, *, session: Session, graph_runtime_state: GraphRuntimeState | None = None): | |||
| message = self._get_message(session=session) | |||
| # If there are assistant files, remove markdown image links from answer | |||
| @@ -1,6 +1,6 @@ | |||
| import uuid | |||
| from collections.abc import Mapping | |||
| from typing import Any, Optional, cast | |||
| from typing import Any, cast | |||
| from core.agent.entities import AgentEntity | |||
| from core.app.app_config.base_app_config_manager import BaseAppConfigManager | |||
| @@ -30,7 +30,7 @@ class AgentChatAppConfig(EasyUIBasedAppConfig): | |||
| Agent Chatbot App Config Entity. | |||
| """ | |||
| agent: Optional[AgentEntity] = None | |||
| agent: AgentEntity | None = None | |||
| class AgentChatAppConfigManager(BaseAppConfigManager): | |||
| @@ -39,8 +39,8 @@ class AgentChatAppConfigManager(BaseAppConfigManager): | |||
| cls, | |||
| app_model: App, | |||
| app_model_config: AppModelConfig, | |||
| conversation: Optional[Conversation] = None, | |||
| override_config_dict: Optional[dict] = None, | |||
| conversation: Conversation | None = None, | |||
| override_config_dict: dict | None = None, | |||
| ) -> AgentChatAppConfig: | |||
| """ | |||
| Convert app model config to agent chat app config | |||
| @@ -1,5 +1,5 @@ | |||
| from collections.abc import Generator, Mapping, Sequence | |||
| from typing import TYPE_CHECKING, Any, Optional, Union, final | |||
| from typing import TYPE_CHECKING, Any, Union, final | |||
| from sqlalchemy.orm import Session | |||
| @@ -24,7 +24,7 @@ class BaseAppGenerator: | |||
| def _prepare_user_inputs( | |||
| self, | |||
| *, | |||
| user_inputs: Optional[Mapping[str, Any]], | |||
| user_inputs: Mapping[str, Any] | None, | |||
| variables: Sequence["VariableEntity"], | |||
| tenant_id: str, | |||
| strict_type_validation: bool = False, | |||
| @@ -2,7 +2,7 @@ import queue | |||
| import time | |||
| from abc import abstractmethod | |||
| from enum import IntEnum, auto | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from sqlalchemy.orm import DeclarativeMeta | |||
| @@ -116,7 +116,7 @@ class AppQueueManager: | |||
| Set task stop flag | |||
| :return: | |||
| """ | |||
| result: Optional[Any] = redis_client.get(cls._generate_task_belong_cache_key(task_id)) | |||
| result: Any | None = redis_client.get(cls._generate_task_belong_cache_key(task_id)) | |||
| if result is None: | |||
| return | |||
| @@ -1,7 +1,7 @@ | |||
| import logging | |||
| import time | |||
| from collections.abc import Generator, Mapping, Sequence | |||
| from typing import TYPE_CHECKING, Any, Optional, Union | |||
| from typing import TYPE_CHECKING, Any, Union | |||
| from core.app.app_config.entities import ExternalDataVariableEntity, PromptTemplateEntity | |||
| from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom | |||
| @@ -82,11 +82,11 @@ class AppRunner: | |||
| prompt_template_entity: PromptTemplateEntity, | |||
| inputs: Mapping[str, str], | |||
| files: Sequence["File"], | |||
| query: Optional[str] = None, | |||
| context: Optional[str] = None, | |||
| memory: Optional[TokenBufferMemory] = None, | |||
| image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None, | |||
| ) -> tuple[list[PromptMessage], Optional[list[str]]]: | |||
| query: str | None = None, | |||
| context: str | None = None, | |||
| memory: TokenBufferMemory | None = None, | |||
| image_detail_config: ImagePromptMessageContent.DETAIL | None = None, | |||
| ) -> tuple[list[PromptMessage], list[str] | None]: | |||
| """ | |||
| Organize prompt messages | |||
| :param context: | |||
| @@ -161,7 +161,7 @@ class AppRunner: | |||
| prompt_messages: list, | |||
| text: str, | |||
| stream: bool, | |||
| usage: Optional[LLMUsage] = None, | |||
| usage: LLMUsage | None = None, | |||
| ): | |||
| """ | |||
| Direct output | |||
| @@ -375,7 +375,7 @@ class AppRunner: | |||
| def query_app_annotations_to_reply( | |||
| self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom | |||
| ) -> Optional[MessageAnnotation]: | |||
| ) -> MessageAnnotation | None: | |||
| """ | |||
| Query app annotations to reply | |||
| :param app_record: app record | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from core.app.app_config.base_app_config_manager import BaseAppConfigManager | |||
| from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager | |||
| from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager | |||
| @@ -32,8 +30,8 @@ class ChatAppConfigManager(BaseAppConfigManager): | |||
| cls, | |||
| app_model: App, | |||
| app_model_config: AppModelConfig, | |||
| conversation: Optional[Conversation] = None, | |||
| override_config_dict: Optional[dict] = None, | |||
| conversation: Conversation | None = None, | |||
| override_config_dict: dict | None = None, | |||
| ) -> ChatAppConfig: | |||
| """ | |||
| Convert app model config to chat app config | |||
| @@ -1,7 +1,7 @@ | |||
| import time | |||
| from collections.abc import Mapping, Sequence | |||
| from datetime import UTC, datetime | |||
| from typing import Any, Optional, Union, cast | |||
| from typing import Any, Union, cast | |||
| from sqlalchemy.orm import Session | |||
| @@ -140,7 +140,7 @@ class WorkflowResponseConverter: | |||
| event: QueueNodeStartedEvent, | |||
| task_id: str, | |||
| workflow_node_execution: WorkflowNodeExecution, | |||
| ) -> Optional[NodeStartStreamResponse]: | |||
| ) -> NodeStartStreamResponse | None: | |||
| if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: | |||
| return None | |||
| if not workflow_node_execution.workflow_execution_id: | |||
| @@ -190,7 +190,7 @@ class WorkflowResponseConverter: | |||
| | QueueNodeExceptionEvent, | |||
| task_id: str, | |||
| workflow_node_execution: WorkflowNodeExecution, | |||
| ) -> Optional[NodeFinishStreamResponse]: | |||
| ) -> NodeFinishStreamResponse | None: | |||
| if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: | |||
| return None | |||
| if not workflow_node_execution.workflow_execution_id: | |||
| @@ -235,7 +235,7 @@ class WorkflowResponseConverter: | |||
| event: QueueNodeRetryEvent, | |||
| task_id: str, | |||
| workflow_node_execution: WorkflowNodeExecution, | |||
| ) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]: | |||
| ) -> Union[NodeRetryStreamResponse, NodeFinishStreamResponse] | None: | |||
| if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: | |||
| return None | |||
| if not workflow_node_execution.workflow_execution_id: | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from core.app.app_config.base_app_config_manager import BaseAppConfigManager | |||
| from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager | |||
| from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager | |||
| @@ -24,7 +22,7 @@ class CompletionAppConfig(EasyUIBasedAppConfig): | |||
| class CompletionAppConfigManager(BaseAppConfigManager): | |||
| @classmethod | |||
| def get_app_config( | |||
| cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: Optional[dict] = None | |||
| cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: dict | None = None | |||
| ) -> CompletionAppConfig: | |||
| """ | |||
| Convert app model config to completion app config | |||
| @@ -1,7 +1,7 @@ | |||
| import json | |||
| import logging | |||
| from collections.abc import Generator | |||
| from typing import Optional, Union, cast | |||
| from typing import Union, cast | |||
| from sqlalchemy import select | |||
| from sqlalchemy.orm import Session | |||
| @@ -84,7 +84,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): | |||
| logger.exception("Failed to handle response, conversation_id: %s", conversation.id) | |||
| raise e | |||
| def _get_app_model_config(self, app_model: App, conversation: Optional[Conversation] = None) -> AppModelConfig: | |||
| def _get_app_model_config(self, app_model: App, conversation: Conversation | None = None) -> AppModelConfig: | |||
| if conversation: | |||
| stmt = select(AppModelConfig).where( | |||
| AppModelConfig.id == conversation.app_model_config_id, AppModelConfig.app_id == app_model.id | |||
| @@ -112,7 +112,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): | |||
| AgentChatAppGenerateEntity, | |||
| AdvancedChatAppGenerateEntity, | |||
| ], | |||
| conversation: Optional[Conversation] = None, | |||
| conversation: Conversation | None = None, | |||
| ) -> tuple[Conversation, Message]: | |||
| """ | |||
| Initialize generate records | |||
| @@ -3,7 +3,7 @@ import logging | |||
| import threading | |||
| import uuid | |||
| from collections.abc import Generator, Mapping, Sequence | |||
| from typing import Any, Literal, Optional, Union, overload | |||
| from typing import Any, Literal, Union, overload | |||
| from flask import Flask, current_app | |||
| from pydantic import ValidationError | |||
| @@ -53,7 +53,7 @@ class WorkflowAppGenerator(BaseAppGenerator): | |||
| invoke_from: InvokeFrom, | |||
| streaming: Literal[True], | |||
| call_depth: int, | |||
| workflow_thread_pool_id: Optional[str], | |||
| workflow_thread_pool_id: str | None, | |||
| ) -> Generator[Mapping | str, None, None]: ... | |||
| @overload | |||
| @@ -67,7 +67,7 @@ class WorkflowAppGenerator(BaseAppGenerator): | |||
| invoke_from: InvokeFrom, | |||
| streaming: Literal[False], | |||
| call_depth: int, | |||
| workflow_thread_pool_id: Optional[str], | |||
| workflow_thread_pool_id: str | None, | |||
| ) -> Mapping[str, Any]: ... | |||
| @overload | |||
| @@ -81,7 +81,7 @@ class WorkflowAppGenerator(BaseAppGenerator): | |||
| invoke_from: InvokeFrom, | |||
| streaming: bool, | |||
| call_depth: int, | |||
| workflow_thread_pool_id: Optional[str], | |||
| workflow_thread_pool_id: str | None, | |||
| ) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: ... | |||
| def generate( | |||
| @@ -94,7 +94,7 @@ class WorkflowAppGenerator(BaseAppGenerator): | |||
| invoke_from: InvokeFrom, | |||
| streaming: bool = True, | |||
| call_depth: int = 0, | |||
| workflow_thread_pool_id: Optional[str] = None, | |||
| workflow_thread_pool_id: str | None = None, | |||
| ) -> Union[Mapping[str, Any], Generator[Mapping | str, None, None]]: | |||
| files: Sequence[Mapping[str, Any]] = args.get("files") or [] | |||
| @@ -200,7 +200,7 @@ class WorkflowAppGenerator(BaseAppGenerator): | |||
| workflow_execution_repository: WorkflowExecutionRepository, | |||
| workflow_node_execution_repository: WorkflowNodeExecutionRepository, | |||
| streaming: bool = True, | |||
| workflow_thread_pool_id: Optional[str] = None, | |||
| workflow_thread_pool_id: str | None = None, | |||
| variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, | |||
| ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]: | |||
| """ | |||
| @@ -434,7 +434,7 @@ class WorkflowAppGenerator(BaseAppGenerator): | |||
| queue_manager: AppQueueManager, | |||
| context: contextvars.Context, | |||
| variable_loader: VariableLoader, | |||
| workflow_thread_pool_id: Optional[str] = None, | |||
| workflow_thread_pool_id: str | None = None, | |||
| ): | |||
| """ | |||
| Generate worker in a new thread. | |||
| @@ -1,5 +1,5 @@ | |||
| import logging | |||
| from typing import Optional, cast | |||
| from typing import cast | |||
| from configs import dify_config | |||
| from core.app.apps.base_app_queue_manager import AppQueueManager | |||
| @@ -31,7 +31,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): | |||
| application_generate_entity: WorkflowAppGenerateEntity, | |||
| queue_manager: AppQueueManager, | |||
| variable_loader: VariableLoader, | |||
| workflow_thread_pool_id: Optional[str] = None, | |||
| workflow_thread_pool_id: str | None = None, | |||
| workflow: Workflow, | |||
| system_user_id: str, | |||
| ): | |||
| @@ -2,7 +2,7 @@ import logging | |||
| import time | |||
| from collections.abc import Callable, Generator | |||
| from contextlib import contextmanager | |||
| from typing import Any, Optional, Union | |||
| from typing import Any, Union | |||
| from sqlalchemy.orm import Session | |||
| @@ -206,7 +206,7 @@ class WorkflowAppGenerateTaskPipeline: | |||
| return None | |||
| def _wrapper_process_stream_response( | |||
| self, trace_manager: Optional[TraceQueueManager] = None | |||
| self, trace_manager: TraceQueueManager | None = None | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| tts_publisher = None | |||
| task_id = self._application_generate_entity.task_id | |||
| @@ -268,7 +268,7 @@ class WorkflowAppGenerateTaskPipeline: | |||
| if not self._workflow_run_id: | |||
| raise ValueError("workflow run not initialized.") | |||
| def _ensure_graph_runtime_initialized(self, graph_runtime_state: Optional[GraphRuntimeState]) -> GraphRuntimeState: | |||
| def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState: | |||
| """Fluent validation for graph runtime state.""" | |||
| if not graph_runtime_state: | |||
| raise ValueError("graph runtime state not initialized.") | |||
| @@ -474,8 +474,8 @@ class WorkflowAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueWorkflowSucceededEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle workflow succeeded events.""" | |||
| @@ -508,8 +508,8 @@ class WorkflowAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueWorkflowPartialSuccessEvent, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle workflow partial success events.""" | |||
| @@ -543,8 +543,8 @@ class WorkflowAppGenerateTaskPipeline: | |||
| self, | |||
| event: Union[QueueWorkflowFailedEvent, QueueStopEvent], | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle workflow failed and stop events.""" | |||
| @@ -581,8 +581,8 @@ class WorkflowAppGenerateTaskPipeline: | |||
| self, | |||
| event: QueueTextChunkEvent, | |||
| *, | |||
| tts_publisher: Optional[AppGeneratorTTSPublisher] = None, | |||
| queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None, | |||
| tts_publisher: AppGeneratorTTSPublisher | None = None, | |||
| queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, | |||
| **kwargs, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Handle text chunk events.""" | |||
| @@ -635,10 +635,10 @@ class WorkflowAppGenerateTaskPipeline: | |||
| self, | |||
| event: Any, | |||
| *, | |||
| graph_runtime_state: Optional[GraphRuntimeState] = None, | |||
| tts_publisher: Optional[AppGeneratorTTSPublisher] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None, | |||
| graph_runtime_state: GraphRuntimeState | None = None, | |||
| tts_publisher: AppGeneratorTTSPublisher | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """Dispatch events using elegant pattern matching.""" | |||
| handlers = self._get_event_handlers() | |||
| @@ -701,8 +701,8 @@ class WorkflowAppGenerateTaskPipeline: | |||
| def _process_stream_response( | |||
| self, | |||
| tts_publisher: Optional[AppGeneratorTTSPublisher] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| tts_publisher: AppGeneratorTTSPublisher | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """ | |||
| Process stream response using elegant Fluent Python patterns. | |||
| @@ -769,7 +769,7 @@ class WorkflowAppGenerateTaskPipeline: | |||
| session.commit() | |||
| def _text_chunk_to_stream_response( | |||
| self, text: str, from_variable_selector: Optional[list[str]] = None | |||
| self, text: str, from_variable_selector: list[str] | None = None | |||
| ) -> TextChunkStreamResponse: | |||
| """ | |||
| Handle completed event. | |||
| @@ -1,6 +1,6 @@ | |||
| from collections.abc import Mapping, Sequence | |||
| from enum import StrEnum | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator | |||
| @@ -96,7 +96,7 @@ class AppGenerateEntity(BaseModel): | |||
| # app config | |||
| app_config: Any = None | |||
| file_upload_config: Optional[FileUploadConfig] = None | |||
| file_upload_config: FileUploadConfig | None = None | |||
| inputs: Mapping[str, Any] | |||
| files: Sequence[File] | |||
| @@ -114,7 +114,7 @@ class AppGenerateEntity(BaseModel): | |||
| # tracing instance | |||
| # Using Any to avoid circular import with TraceQueueManager | |||
| trace_manager: Optional[Any] = None | |||
| trace_manager: Any | None = None | |||
| class EasyUIBasedAppGenerateEntity(AppGenerateEntity): | |||
| @@ -126,7 +126,7 @@ class EasyUIBasedAppGenerateEntity(AppGenerateEntity): | |||
| app_config: EasyUIBasedAppConfig = None # type: ignore | |||
| model_conf: ModelConfigWithCredentialsEntity | |||
| query: Optional[str] = None | |||
| query: str | None = None | |||
| # pydantic configs | |||
| model_config = ConfigDict(protected_namespaces=()) | |||
| @@ -137,8 +137,8 @@ class ConversationAppGenerateEntity(AppGenerateEntity): | |||
| Base entity for conversation-based app generation. | |||
| """ | |||
| conversation_id: Optional[str] = None | |||
| parent_message_id: Optional[str] = Field( | |||
| conversation_id: str | None = None | |||
| parent_message_id: str | None = Field( | |||
| default=None, | |||
| description=( | |||
| "Starting from v0.9.0, parent_message_id is used to support message regeneration for internal chat API." | |||
| @@ -188,7 +188,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity): | |||
| # app config | |||
| app_config: WorkflowUIBasedAppConfig = None # type: ignore | |||
| workflow_run_id: Optional[str] = None | |||
| workflow_run_id: str | None = None | |||
| query: str | |||
| class SingleIterationRunEntity(BaseModel): | |||
| @@ -199,7 +199,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity): | |||
| node_id: str | |||
| inputs: Mapping | |||
| single_iteration_run: Optional[SingleIterationRunEntity] = None | |||
| single_iteration_run: SingleIterationRunEntity | None = None | |||
| class SingleLoopRunEntity(BaseModel): | |||
| """ | |||
| @@ -209,7 +209,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity): | |||
| node_id: str | |||
| inputs: Mapping | |||
| single_loop_run: Optional[SingleLoopRunEntity] = None | |||
| single_loop_run: SingleLoopRunEntity | None = None | |||
| class WorkflowAppGenerateEntity(AppGenerateEntity): | |||
| @@ -229,7 +229,7 @@ class WorkflowAppGenerateEntity(AppGenerateEntity): | |||
| node_id: str | |||
| inputs: dict | |||
| single_iteration_run: Optional[SingleIterationRunEntity] = None | |||
| single_iteration_run: SingleIterationRunEntity | None = None | |||
| class SingleLoopRunEntity(BaseModel): | |||
| """ | |||
| @@ -239,4 +239,4 @@ class WorkflowAppGenerateEntity(AppGenerateEntity): | |||
| node_id: str | |||
| inputs: dict | |||
| single_loop_run: Optional[SingleLoopRunEntity] = None | |||
| single_loop_run: SingleLoopRunEntity | None = None | |||
| @@ -1,7 +1,7 @@ | |||
| from collections.abc import Mapping, Sequence | |||
| from datetime import datetime | |||
| from enum import StrEnum, auto | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from pydantic import BaseModel | |||
| @@ -81,20 +81,20 @@ class QueueIterationStartEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| start_at: datetime | |||
| node_run_index: int | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| predecessor_node_id: Optional[str] = None | |||
| metadata: Optional[Mapping[str, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| predecessor_node_id: str | None = None | |||
| metadata: Mapping[str, Any] | None = None | |||
| class QueueIterationNextEvent(AppQueueEvent): | |||
| @@ -109,19 +109,19 @@ class QueueIterationNextEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| parallel_mode_run_id: Optional[str] = None | |||
| parallel_mode_run_id: str | None = None | |||
| """iteration run in parallel mode run id""" | |||
| node_run_index: int | |||
| output: Optional[Any] = None # output for the current iteration | |||
| duration: Optional[float] = None | |||
| output: Any | None = None # output for the current iteration | |||
| duration: float | None = None | |||
| class QueueIterationCompletedEvent(AppQueueEvent): | |||
| @@ -135,23 +135,23 @@ class QueueIterationCompletedEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| start_at: datetime | |||
| node_run_index: int | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| metadata: Optional[Mapping[str, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| metadata: Mapping[str, Any] | None = None | |||
| steps: int = 0 | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| class QueueLoopStartEvent(AppQueueEvent): | |||
| @@ -164,20 +164,20 @@ class QueueLoopStartEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| start_at: datetime | |||
| node_run_index: int | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| predecessor_node_id: Optional[str] = None | |||
| metadata: Optional[Mapping[str, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| predecessor_node_id: str | None = None | |||
| metadata: Mapping[str, Any] | None = None | |||
| class QueueLoopNextEvent(AppQueueEvent): | |||
| @@ -192,19 +192,19 @@ class QueueLoopNextEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| parallel_mode_run_id: Optional[str] = None | |||
| parallel_mode_run_id: str | None = None | |||
| """iteration run in parallel mode run id""" | |||
| node_run_index: int | |||
| output: Optional[Any] = None # output for the current loop | |||
| duration: Optional[float] = None | |||
| output: Any | None = None # output for the current loop | |||
| duration: float | None = None | |||
| class QueueLoopCompletedEvent(AppQueueEvent): | |||
| @@ -218,23 +218,23 @@ class QueueLoopCompletedEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| start_at: datetime | |||
| node_run_index: int | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| metadata: Optional[Mapping[str, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| metadata: Mapping[str, Any] | None = None | |||
| steps: int = 0 | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| class QueueTextChunkEvent(AppQueueEvent): | |||
| @@ -244,11 +244,11 @@ class QueueTextChunkEvent(AppQueueEvent): | |||
| event: QueueEvent = QueueEvent.TEXT_CHUNK | |||
| text: str | |||
| from_variable_selector: Optional[list[str]] = None | |||
| from_variable_selector: list[str] | None = None | |||
| """from variable selector""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| @@ -285,9 +285,9 @@ class QueueRetrieverResourcesEvent(AppQueueEvent): | |||
| event: QueueEvent = QueueEvent.RETRIEVER_RESOURCES | |||
| retriever_resources: Sequence[RetrievalSourceMetadata] | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| @@ -306,7 +306,7 @@ class QueueMessageEndEvent(AppQueueEvent): | |||
| """ | |||
| event: QueueEvent = QueueEvent.MESSAGE_END | |||
| llm_result: Optional[LLMResult] = None | |||
| llm_result: LLMResult | None = None | |||
| class QueueAdvancedChatMessageEndEvent(AppQueueEvent): | |||
| @@ -332,7 +332,7 @@ class QueueWorkflowSucceededEvent(AppQueueEvent): | |||
| """ | |||
| event: QueueEvent = QueueEvent.WORKFLOW_SUCCEEDED | |||
| outputs: Optional[dict[str, Any]] = None | |||
| outputs: dict[str, Any] | None = None | |||
| class QueueWorkflowFailedEvent(AppQueueEvent): | |||
| @@ -352,7 +352,7 @@ class QueueWorkflowPartialSuccessEvent(AppQueueEvent): | |||
| event: QueueEvent = QueueEvent.WORKFLOW_PARTIAL_SUCCEEDED | |||
| exceptions_count: int | |||
| outputs: Optional[dict[str, Any]] = None | |||
| outputs: dict[str, Any] | None = None | |||
| class QueueNodeStartedEvent(AppQueueEvent): | |||
| @@ -367,23 +367,23 @@ class QueueNodeStartedEvent(AppQueueEvent): | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| node_run_index: int = 1 | |||
| predecessor_node_id: Optional[str] = None | |||
| parallel_id: Optional[str] = None | |||
| predecessor_node_id: str | None = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| start_at: datetime | |||
| parallel_mode_run_id: Optional[str] = None | |||
| parallel_mode_run_id: str | None = None | |||
| """iteration run in parallel mode run id""" | |||
| agent_strategy: Optional[AgentNodeStrategyInit] = None | |||
| agent_strategy: AgentNodeStrategyInit | None = None | |||
| class QueueNodeSucceededEvent(AppQueueEvent): | |||
| @@ -397,30 +397,30 @@ class QueueNodeSucceededEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| start_at: datetime | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| """single iteration duration map""" | |||
| iteration_duration_map: Optional[dict[str, float]] = None | |||
| iteration_duration_map: dict[str, float] | None = None | |||
| """single loop duration map""" | |||
| loop_duration_map: Optional[dict[str, float]] = None | |||
| loop_duration_map: dict[str, float] | None = None | |||
| class QueueAgentLogEvent(AppQueueEvent): | |||
| @@ -436,7 +436,7 @@ class QueueAgentLogEvent(AppQueueEvent): | |||
| error: str | None = None | |||
| status: str | |||
| data: Mapping[str, Any] | |||
| metadata: Optional[Mapping[str, Any]] = None | |||
| metadata: Mapping[str, Any] | None = None | |||
| node_id: str | |||
| @@ -445,10 +445,10 @@ class QueueNodeRetryEvent(QueueNodeStartedEvent): | |||
| event: QueueEvent = QueueEvent.RETRY | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| error: str | |||
| retry_index: int # retry index | |||
| @@ -465,24 +465,24 @@ class QueueNodeInIterationFailedEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| start_at: datetime | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| error: str | |||
| @@ -498,24 +498,24 @@ class QueueNodeInLoopFailedEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| start_at: datetime | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| error: str | |||
| @@ -531,24 +531,24 @@ class QueueNodeExceptionEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| start_at: datetime | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| error: str | |||
| @@ -564,24 +564,24 @@ class QueueNodeFailedEvent(AppQueueEvent): | |||
| node_id: str | |||
| node_type: NodeType | |||
| node_data: BaseNodeData | |||
| parallel_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| """parallel id if node is in parallel""" | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_start_node_id: str | None = None | |||
| """parallel start node id if node is in parallel""" | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| start_at: datetime | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| error: str | |||
| @@ -610,7 +610,7 @@ class QueueErrorEvent(AppQueueEvent): | |||
| """ | |||
| event: QueueEvent = QueueEvent.ERROR | |||
| error: Optional[Any] = None | |||
| error: Any | None = None | |||
| class QueuePingEvent(AppQueueEvent): | |||
| @@ -689,13 +689,13 @@ class QueueParallelBranchRunStartedEvent(AppQueueEvent): | |||
| parallel_id: str | |||
| parallel_start_node_id: str | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| @@ -708,13 +708,13 @@ class QueueParallelBranchRunSucceededEvent(AppQueueEvent): | |||
| parallel_id: str | |||
| parallel_start_node_id: str | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| @@ -727,12 +727,12 @@ class QueueParallelBranchRunFailedEvent(AppQueueEvent): | |||
| parallel_id: str | |||
| parallel_start_node_id: str | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| """parent parallel id if node is in parallel""" | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| """parent parallel start node id if node is in parallel""" | |||
| in_iteration_id: Optional[str] = None | |||
| in_iteration_id: str | None = None | |||
| """iteration id if node is in iteration""" | |||
| in_loop_id: Optional[str] = None | |||
| in_loop_id: str | None = None | |||
| """loop id if node is in loop""" | |||
| error: str | |||
| @@ -1,6 +1,6 @@ | |||
| from collections.abc import Mapping, Sequence | |||
| from enum import StrEnum, auto | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from pydantic import BaseModel, ConfigDict, Field | |||
| @@ -110,7 +110,7 @@ class MessageStreamResponse(StreamResponse): | |||
| event: StreamEvent = StreamEvent.MESSAGE | |||
| id: str | |||
| answer: str | |||
| from_variable_selector: Optional[list[str]] = None | |||
| from_variable_selector: list[str] | None = None | |||
| class MessageAudioStreamResponse(StreamResponse): | |||
| @@ -139,7 +139,7 @@ class MessageEndStreamResponse(StreamResponse): | |||
| event: StreamEvent = StreamEvent.MESSAGE_END | |||
| id: str | |||
| metadata: dict = Field(default_factory=dict) | |||
| files: Optional[Sequence[Mapping[str, Any]]] = None | |||
| files: Sequence[Mapping[str, Any]] | None = None | |||
| class MessageFileStreamResponse(StreamResponse): | |||
| @@ -172,12 +172,12 @@ class AgentThoughtStreamResponse(StreamResponse): | |||
| event: StreamEvent = StreamEvent.AGENT_THOUGHT | |||
| id: str | |||
| position: int | |||
| thought: Optional[str] = None | |||
| observation: Optional[str] = None | |||
| tool: Optional[str] = None | |||
| tool_labels: Optional[dict] = None | |||
| tool_input: Optional[str] = None | |||
| message_files: Optional[list[str]] = None | |||
| thought: str | None = None | |||
| observation: str | None = None | |||
| tool: str | None = None | |||
| tool_labels: dict | None = None | |||
| tool_input: str | None = None | |||
| message_files: list[str] | None = None | |||
| class AgentMessageStreamResponse(StreamResponse): | |||
| @@ -223,16 +223,16 @@ class WorkflowFinishStreamResponse(StreamResponse): | |||
| id: str | |||
| workflow_id: str | |||
| status: str | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| error: Optional[str] = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| error: str | None = None | |||
| elapsed_time: float | |||
| total_tokens: int | |||
| total_steps: int | |||
| created_by: Optional[dict] = None | |||
| created_by: dict | None = None | |||
| created_at: int | |||
| finished_at: int | |||
| exceptions_count: Optional[int] = 0 | |||
| files: Optional[Sequence[Mapping[str, Any]]] = [] | |||
| exceptions_count: int | None = 0 | |||
| files: Sequence[Mapping[str, Any]] | None = [] | |||
| event: StreamEvent = StreamEvent.WORKFLOW_FINISHED | |||
| workflow_run_id: str | |||
| @@ -254,18 +254,18 @@ class NodeStartStreamResponse(StreamResponse): | |||
| node_type: str | |||
| title: str | |||
| index: int | |||
| predecessor_node_id: Optional[str] = None | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| predecessor_node_id: str | None = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| created_at: int | |||
| extras: dict = Field(default_factory=dict) | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| iteration_id: Optional[str] = None | |||
| loop_id: Optional[str] = None | |||
| parallel_run_id: Optional[str] = None | |||
| agent_strategy: Optional[AgentNodeStrategyInit] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| parent_parallel_id: str | None = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| iteration_id: str | None = None | |||
| loop_id: str | None = None | |||
| parallel_run_id: str | None = None | |||
| agent_strategy: AgentNodeStrategyInit | None = None | |||
| event: StreamEvent = StreamEvent.NODE_STARTED | |||
| workflow_run_id: str | |||
| @@ -311,23 +311,23 @@ class NodeFinishStreamResponse(StreamResponse): | |||
| node_type: str | |||
| title: str | |||
| index: int | |||
| predecessor_node_id: Optional[str] = None | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| predecessor_node_id: str | None = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| status: str | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| elapsed_time: float | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| created_at: int | |||
| finished_at: int | |||
| files: Optional[Sequence[Mapping[str, Any]]] = [] | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| iteration_id: Optional[str] = None | |||
| loop_id: Optional[str] = None | |||
| files: Sequence[Mapping[str, Any]] | None = [] | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| parent_parallel_id: str | None = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| iteration_id: str | None = None | |||
| loop_id: str | None = None | |||
| event: StreamEvent = StreamEvent.NODE_FINISHED | |||
| workflow_run_id: str | |||
| @@ -380,23 +380,23 @@ class NodeRetryStreamResponse(StreamResponse): | |||
| node_type: str | |||
| title: str | |||
| index: int | |||
| predecessor_node_id: Optional[str] = None | |||
| inputs: Optional[Mapping[str, Any]] = None | |||
| process_data: Optional[Mapping[str, Any]] = None | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| predecessor_node_id: str | None = None | |||
| inputs: Mapping[str, Any] | None = None | |||
| process_data: Mapping[str, Any] | None = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| status: str | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| elapsed_time: float | |||
| execution_metadata: Optional[Mapping[WorkflowNodeExecutionMetadataKey, Any]] = None | |||
| execution_metadata: Mapping[WorkflowNodeExecutionMetadataKey, Any] | None = None | |||
| created_at: int | |||
| finished_at: int | |||
| files: Optional[Sequence[Mapping[str, Any]]] = [] | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| iteration_id: Optional[str] = None | |||
| loop_id: Optional[str] = None | |||
| files: Sequence[Mapping[str, Any]] | None = [] | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| parent_parallel_id: str | None = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| iteration_id: str | None = None | |||
| loop_id: str | None = None | |||
| retry_index: int = 0 | |||
| event: StreamEvent = StreamEvent.NODE_RETRY | |||
| @@ -448,10 +448,10 @@ class ParallelBranchStartStreamResponse(StreamResponse): | |||
| parallel_id: str | |||
| parallel_branch_id: str | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| iteration_id: Optional[str] = None | |||
| loop_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| iteration_id: str | None = None | |||
| loop_id: str | None = None | |||
| created_at: int | |||
| event: StreamEvent = StreamEvent.PARALLEL_BRANCH_STARTED | |||
| @@ -471,12 +471,12 @@ class ParallelBranchFinishedStreamResponse(StreamResponse): | |||
| parallel_id: str | |||
| parallel_branch_id: str | |||
| parent_parallel_id: Optional[str] = None | |||
| parent_parallel_start_node_id: Optional[str] = None | |||
| iteration_id: Optional[str] = None | |||
| loop_id: Optional[str] = None | |||
| parent_parallel_id: str | None = None | |||
| parent_parallel_start_node_id: str | None = None | |||
| iteration_id: str | None = None | |||
| loop_id: str | None = None | |||
| status: str | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| created_at: int | |||
| event: StreamEvent = StreamEvent.PARALLEL_BRANCH_FINISHED | |||
| @@ -502,8 +502,8 @@ class IterationNodeStartStreamResponse(StreamResponse): | |||
| extras: dict = Field(default_factory=dict) | |||
| metadata: Mapping = {} | |||
| inputs: Mapping = {} | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| event: StreamEvent = StreamEvent.ITERATION_STARTED | |||
| workflow_run_id: str | |||
| @@ -526,12 +526,12 @@ class IterationNodeNextStreamResponse(StreamResponse): | |||
| title: str | |||
| index: int | |||
| created_at: int | |||
| pre_iteration_output: Optional[Any] = None | |||
| pre_iteration_output: Any | None = None | |||
| extras: dict = Field(default_factory=dict) | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_mode_run_id: Optional[str] = None | |||
| duration: Optional[float] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| parallel_mode_run_id: str | None = None | |||
| duration: float | None = None | |||
| event: StreamEvent = StreamEvent.ITERATION_NEXT | |||
| workflow_run_id: str | |||
| @@ -552,19 +552,19 @@ class IterationNodeCompletedStreamResponse(StreamResponse): | |||
| node_id: str | |||
| node_type: str | |||
| title: str | |||
| outputs: Optional[Mapping] = None | |||
| outputs: Mapping | None = None | |||
| created_at: int | |||
| extras: Optional[dict] = None | |||
| inputs: Optional[Mapping] = None | |||
| extras: dict | None = None | |||
| inputs: Mapping | None = None | |||
| status: WorkflowNodeExecutionStatus | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| elapsed_time: float | |||
| total_tokens: int | |||
| execution_metadata: Optional[Mapping] = None | |||
| execution_metadata: Mapping | None = None | |||
| finished_at: int | |||
| steps: int | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| event: StreamEvent = StreamEvent.ITERATION_COMPLETED | |||
| workflow_run_id: str | |||
| @@ -589,8 +589,8 @@ class LoopNodeStartStreamResponse(StreamResponse): | |||
| extras: dict = Field(default_factory=dict) | |||
| metadata: Mapping = {} | |||
| inputs: Mapping = {} | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| event: StreamEvent = StreamEvent.LOOP_STARTED | |||
| workflow_run_id: str | |||
| @@ -613,12 +613,12 @@ class LoopNodeNextStreamResponse(StreamResponse): | |||
| title: str | |||
| index: int | |||
| created_at: int | |||
| pre_loop_output: Optional[Any] = None | |||
| pre_loop_output: Any | None = None | |||
| extras: dict = Field(default_factory=dict) | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_mode_run_id: Optional[str] = None | |||
| duration: Optional[float] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| parallel_mode_run_id: str | None = None | |||
| duration: float | None = None | |||
| event: StreamEvent = StreamEvent.LOOP_NEXT | |||
| workflow_run_id: str | |||
| @@ -639,19 +639,19 @@ class LoopNodeCompletedStreamResponse(StreamResponse): | |||
| node_id: str | |||
| node_type: str | |||
| title: str | |||
| outputs: Optional[Mapping] = None | |||
| outputs: Mapping | None = None | |||
| created_at: int | |||
| extras: Optional[dict] = None | |||
| inputs: Optional[Mapping] = None | |||
| extras: dict | None = None | |||
| inputs: Mapping | None = None | |||
| status: WorkflowNodeExecutionStatus | |||
| error: Optional[str] = None | |||
| error: str | None = None | |||
| elapsed_time: float | |||
| total_tokens: int | |||
| execution_metadata: Optional[Mapping] = None | |||
| execution_metadata: Mapping | None = None | |||
| finished_at: int | |||
| steps: int | |||
| parallel_id: Optional[str] = None | |||
| parallel_start_node_id: Optional[str] = None | |||
| parallel_id: str | None = None | |||
| parallel_start_node_id: str | None = None | |||
| event: StreamEvent = StreamEvent.LOOP_COMPLETED | |||
| workflow_run_id: str | |||
| @@ -669,7 +669,7 @@ class TextChunkStreamResponse(StreamResponse): | |||
| """ | |||
| text: str | |||
| from_variable_selector: Optional[list[str]] = None | |||
| from_variable_selector: list[str] | None = None | |||
| event: StreamEvent = StreamEvent.TEXT_CHUNK | |||
| data: Data | |||
| @@ -731,7 +731,7 @@ class WorkflowAppStreamResponse(AppStreamResponse): | |||
| WorkflowAppStreamResponse entity | |||
| """ | |||
| workflow_run_id: Optional[str] = None | |||
| workflow_run_id: str | None = None | |||
| class AppBlockingResponse(BaseModel): | |||
| @@ -796,8 +796,8 @@ class WorkflowAppBlockingResponse(AppBlockingResponse): | |||
| id: str | |||
| workflow_id: str | |||
| status: str | |||
| outputs: Optional[Mapping[str, Any]] = None | |||
| error: Optional[str] = None | |||
| outputs: Mapping[str, Any] | None = None | |||
| error: str | None = None | |||
| elapsed_time: float | |||
| total_tokens: int | |||
| total_steps: int | |||
| @@ -825,7 +825,7 @@ class AgentLogStreamResponse(StreamResponse): | |||
| error: str | None = None | |||
| status: str | |||
| data: Mapping[str, Any] | |||
| metadata: Optional[Mapping[str, Any]] = None | |||
| metadata: Mapping[str, Any] | None = None | |||
| node_id: str | |||
| event: StreamEvent = StreamEvent.AGENT_LOG | |||
| @@ -1,5 +1,4 @@ | |||
| import logging | |||
| from typing import Optional | |||
| from sqlalchemy import select | |||
| @@ -17,7 +16,7 @@ logger = logging.getLogger(__name__) | |||
| class AnnotationReplyFeature: | |||
| def query( | |||
| self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom | |||
| ) -> Optional[MessageAnnotation]: | |||
| ) -> MessageAnnotation | None: | |||
| """ | |||
| Query app annotations to reply | |||
| :param app_record: app record | |||
| @@ -3,7 +3,7 @@ import time | |||
| import uuid | |||
| from collections.abc import Generator, Mapping | |||
| from datetime import timedelta | |||
| from typing import Any, Optional, Union | |||
| from typing import Any, Union | |||
| from core.errors.error import AppInvokeQuotaExceededError | |||
| from extensions.ext_redis import redis_client | |||
| @@ -63,7 +63,7 @@ class RateLimit: | |||
| if timeout_requests: | |||
| redis_client.hdel(self.active_requests_key, *timeout_requests) | |||
| def enter(self, request_id: Optional[str] = None) -> str: | |||
| def enter(self, request_id: str | None = None) -> str: | |||
| if self.disabled(): | |||
| return RateLimit._UNLIMITED_REQUEST_ID | |||
| if time.time() - self.last_recalculate_time > RateLimit._ACTIVE_REQUESTS_COUNT_FLUSH_INTERVAL: | |||
| @@ -1,6 +1,5 @@ | |||
| import logging | |||
| import time | |||
| from typing import Optional | |||
| from sqlalchemy import select | |||
| from sqlalchemy.orm import Session | |||
| @@ -101,7 +100,7 @@ class BasedGenerateTaskPipeline: | |||
| """ | |||
| return PingStreamResponse(task_id=self._application_generate_entity.task_id) | |||
| def _init_output_moderation(self) -> Optional[OutputModeration]: | |||
| def _init_output_moderation(self) -> OutputModeration | None: | |||
| """ | |||
| Init output moderation. | |||
| :return: | |||
| @@ -118,7 +117,7 @@ class BasedGenerateTaskPipeline: | |||
| ) | |||
| return None | |||
| def handle_output_moderation_when_task_finished(self, completion: str) -> Optional[str]: | |||
| def handle_output_moderation_when_task_finished(self, completion: str) -> str | None: | |||
| """ | |||
| Handle output moderation when task finished. | |||
| :param completion: completion | |||
| @@ -2,7 +2,7 @@ import logging | |||
| import time | |||
| from collections.abc import Generator | |||
| from threading import Thread | |||
| from typing import Optional, Union, cast | |||
| from typing import Union, cast | |||
| from sqlalchemy import select | |||
| from sqlalchemy.orm import Session | |||
| @@ -109,7 +109,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): | |||
| task_state=self._task_state, | |||
| ) | |||
| self._conversation_name_generate_thread: Optional[Thread] = None | |||
| self._conversation_name_generate_thread: Thread | None = None | |||
| def process( | |||
| self, | |||
| @@ -209,7 +209,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): | |||
| return None | |||
| def _wrapper_process_stream_response( | |||
| self, trace_manager: Optional[TraceQueueManager] = None | |||
| self, trace_manager: TraceQueueManager | None = None | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| tenant_id = self._application_generate_entity.app_config.tenant_id | |||
| task_id = self._application_generate_entity.task_id | |||
| @@ -252,7 +252,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): | |||
| yield MessageAudioEndStreamResponse(audio="", task_id=task_id) | |||
| def _process_stream_response( | |||
| self, publisher: Optional[AppGeneratorTTSPublisher], trace_manager: Optional[TraceQueueManager] = None | |||
| self, publisher: AppGeneratorTTSPublisher | None, trace_manager: TraceQueueManager | None = None | |||
| ) -> Generator[StreamResponse, None, None]: | |||
| """ | |||
| Process stream response. | |||
| @@ -362,7 +362,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): | |||
| if self._conversation_name_generate_thread: | |||
| self._conversation_name_generate_thread.join() | |||
| def _save_message(self, *, session: Session, trace_manager: Optional[TraceQueueManager] = None): | |||
| def _save_message(self, *, session: Session, trace_manager: TraceQueueManager | None = None): | |||
| """ | |||
| Save message. | |||
| :return: | |||
| @@ -466,14 +466,14 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline): | |||
| task_id=self._application_generate_entity.task_id, id=message_id, answer=answer | |||
| ) | |||
| def _agent_thought_to_stream_response(self, event: QueueAgentThoughtEvent) -> Optional[AgentThoughtStreamResponse]: | |||
| def _agent_thought_to_stream_response(self, event: QueueAgentThoughtEvent) -> AgentThoughtStreamResponse | None: | |||
| """ | |||
| Agent thought to stream response. | |||
| :param event: agent thought event | |||
| :return: | |||
| """ | |||
| with Session(db.engine, expire_on_commit=False) as session: | |||
| agent_thought: Optional[MessageAgentThought] = ( | |||
| agent_thought: MessageAgentThought | None = ( | |||
| session.query(MessageAgentThought).where(MessageAgentThought.id == event.agent_thought_id).first() | |||
| ) | |||
| @@ -1,6 +1,6 @@ | |||
| import logging | |||
| from threading import Thread | |||
| from typing import Optional, Union | |||
| from typing import Union | |||
| from flask import Flask, current_app | |||
| from sqlalchemy import select | |||
| @@ -52,7 +52,7 @@ class MessageCycleManager: | |||
| self._application_generate_entity = application_generate_entity | |||
| self._task_state = task_state | |||
| def generate_conversation_name(self, *, conversation_id: str, query: str) -> Optional[Thread]: | |||
| def generate_conversation_name(self, *, conversation_id: str, query: str) -> Thread | None: | |||
| """ | |||
| Generate conversation name. | |||
| :param conversation_id: conversation id | |||
| @@ -111,7 +111,7 @@ class MessageCycleManager: | |||
| db.session.commit() | |||
| db.session.close() | |||
| def handle_annotation_reply(self, event: QueueAnnotationReplyEvent) -> Optional[MessageAnnotation]: | |||
| def handle_annotation_reply(self, event: QueueAnnotationReplyEvent) -> MessageAnnotation | None: | |||
| """ | |||
| Handle annotation reply. | |||
| :param event: event | |||
| @@ -141,7 +141,7 @@ class MessageCycleManager: | |||
| if self._application_generate_entity.app_config.additional_features.show_retrieve_source: | |||
| self._task_state.metadata.retriever_resources = event.retriever_resources | |||
| def message_file_to_stream_response(self, event: QueueMessageFileEvent) -> Optional[MessageFileStreamResponse]: | |||
| def message_file_to_stream_response(self, event: QueueMessageFileEvent) -> MessageFileStreamResponse | None: | |||
| """ | |||
| Message file to stream response. | |||
| :param event: event | |||
| @@ -180,7 +180,7 @@ class MessageCycleManager: | |||
| return None | |||
| def message_to_stream_response( | |||
| self, answer: str, message_id: str, from_variable_selector: Optional[list[str]] = None | |||
| self, answer: str, message_id: str, from_variable_selector: list[str] | None = None | |||
| ) -> MessageStreamResponse: | |||
| """ | |||
| Message to stream response. | |||
| @@ -5,7 +5,6 @@ import queue | |||
| import re | |||
| import threading | |||
| from collections.abc import Iterable | |||
| from typing import Optional | |||
| from core.app.entities.queue_entities import ( | |||
| MessageQueueMessage, | |||
| @@ -56,7 +55,7 @@ def _process_future( | |||
| class AppGeneratorTTSPublisher: | |||
| def __init__(self, tenant_id: str, voice: str, language: Optional[str] = None): | |||
| def __init__(self, tenant_id: str, voice: str, language: str | None = None): | |||
| self.logger = logging.getLogger(__name__) | |||
| self.tenant_id = tenant_id | |||
| self.msg_text = "" | |||
| @@ -73,7 +72,7 @@ class AppGeneratorTTSPublisher: | |||
| if not voice or voice not in values: | |||
| self.voice = self.voices[0].get("value") | |||
| self.max_sentence = 2 | |||
| self._last_audio_event: Optional[AudioTrunk] = None | |||
| self._last_audio_event: AudioTrunk | None = None | |||
| # FIXME better way to handle this threading.start | |||
| threading.Thread(target=self._runtime).start() | |||
| self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) | |||
| @@ -1,5 +1,5 @@ | |||
| from collections.abc import Iterable, Mapping | |||
| from typing import Any, Optional, TextIO, Union | |||
| from typing import Any, TextIO, Union | |||
| from pydantic import BaseModel | |||
| @@ -23,7 +23,7 @@ def get_colored_text(text: str, color: str) -> str: | |||
| return f"\u001b[{color_str}m\033[1;3m{text}\u001b[0m" | |||
| def print_text(text: str, color: Optional[str] = None, end: str = "", file: Optional[TextIO] = None): | |||
| def print_text(text: str, color: str | None = None, end: str = "", file: TextIO | None = None): | |||
| """Print text with highlighting and no end characters.""" | |||
| text_to_print = get_colored_text(text, color) if color else text | |||
| print(text_to_print, end=end, file=file) | |||
| @@ -34,10 +34,10 @@ def print_text(text: str, color: Optional[str] = None, end: str = "", file: Opti | |||
| class DifyAgentCallbackHandler(BaseModel): | |||
| """Callback Handler that prints to std out.""" | |||
| color: Optional[str] = "" | |||
| color: str | None = "" | |||
| current_loop: int = 1 | |||
| def __init__(self, color: Optional[str] = None): | |||
| def __init__(self, color: str | None = None): | |||
| super().__init__() | |||
| """Initialize callback handler.""" | |||
| # use a specific color is not specified | |||
| @@ -58,9 +58,9 @@ class DifyAgentCallbackHandler(BaseModel): | |||
| tool_name: str, | |||
| tool_inputs: Mapping[str, Any], | |||
| tool_outputs: Iterable[ToolInvokeMessage] | str, | |||
| message_id: Optional[str] = None, | |||
| timer: Optional[Any] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| message_id: str | None = None, | |||
| timer: Any | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| ): | |||
| """If not the final action, print out observation.""" | |||
| if dify_config.DEBUG: | |||
| @@ -98,7 +98,7 @@ class DifyAgentCallbackHandler(BaseModel): | |||
| else: | |||
| print_text("\n[on_agent_start] \nCurrent Loop: " + str(self.current_loop) + "\n", color=self.color) | |||
| def on_agent_finish(self, color: Optional[str] = None, **kwargs: Any): | |||
| def on_agent_finish(self, color: str | None = None, **kwargs: Any): | |||
| """Run on agent end.""" | |||
| if dify_config.DEBUG: | |||
| print_text("\n[on_agent_finish]\n Loop: " + str(self.current_loop) + "\n", color=self.color) | |||
| @@ -1,5 +1,5 @@ | |||
| from collections.abc import Generator, Iterable, Mapping | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from core.callback_handler.agent_tool_callback_handler import DifyAgentCallbackHandler, print_text | |||
| from core.ops.ops_trace_manager import TraceQueueManager | |||
| @@ -14,9 +14,9 @@ class DifyWorkflowCallbackHandler(DifyAgentCallbackHandler): | |||
| tool_name: str, | |||
| tool_inputs: Mapping[str, Any], | |||
| tool_outputs: Iterable[ToolInvokeMessage], | |||
| message_id: Optional[str] = None, | |||
| timer: Optional[Any] = None, | |||
| trace_manager: Optional[TraceQueueManager] = None, | |||
| message_id: str | None = None, | |||
| timer: Any | None = None, | |||
| trace_manager: TraceQueueManager | None = None, | |||
| ) -> Generator[ToolInvokeMessage, None, None]: | |||
| for tool_output in tool_outputs: | |||
| print_text("\n[on_tool_execution]\n", color=self.color) | |||
| @@ -1,11 +1,9 @@ | |||
| from typing import Optional | |||
| from pydantic import BaseModel | |||
| class PreviewDetail(BaseModel): | |||
| content: str | |||
| child_chunks: Optional[list[str]] = None | |||
| child_chunks: list[str] | None = None | |||
| class QAPreviewDetail(BaseModel): | |||
| @@ -16,4 +14,4 @@ class QAPreviewDetail(BaseModel): | |||
| class IndexingEstimate(BaseModel): | |||
| total_segments: int | |||
| preview: list[PreviewDetail] | |||
| qa_preview: Optional[list[QAPreviewDetail]] = None | |||
| qa_preview: list[QAPreviewDetail] | None = None | |||
| @@ -1,6 +1,5 @@ | |||
| from collections.abc import Sequence | |||
| from enum import StrEnum, auto | |||
| from typing import Optional | |||
| from pydantic import BaseModel, ConfigDict | |||
| @@ -29,8 +28,8 @@ class SimpleModelProviderEntity(BaseModel): | |||
| provider: str | |||
| label: I18nObject | |||
| icon_small: Optional[I18nObject] = None | |||
| icon_large: Optional[I18nObject] = None | |||
| icon_small: I18nObject | None = None | |||
| icon_large: I18nObject | None = None | |||
| supported_model_types: list[ModelType] | |||
| def __init__(self, provider_entity: ProviderEntity): | |||
| @@ -92,8 +91,8 @@ class DefaultModelProviderEntity(BaseModel): | |||
| provider: str | |||
| label: I18nObject | |||
| icon_small: Optional[I18nObject] = None | |||
| icon_large: Optional[I18nObject] = None | |||
| icon_small: I18nObject | None = None | |||
| icon_large: I18nObject | None = None | |||
| supported_model_types: Sequence[ModelType] = [] | |||
| @@ -4,7 +4,6 @@ import re | |||
| from collections import defaultdict | |||
| from collections.abc import Iterator, Sequence | |||
| from json import JSONDecodeError | |||
| from typing import Optional | |||
| from pydantic import BaseModel, ConfigDict, Field | |||
| from sqlalchemy import func, select | |||
| @@ -92,7 +91,7 @@ class ProviderConfiguration(BaseModel): | |||
| ): | |||
| self.provider.configurate_methods.append(ConfigurateMethod.PREDEFINED_MODEL) | |||
| def get_current_credentials(self, model_type: ModelType, model: str) -> Optional[dict]: | |||
| def get_current_credentials(self, model_type: ModelType, model: str) -> dict | None: | |||
| """ | |||
| Get current credentials. | |||
| @@ -165,7 +164,7 @@ class ProviderConfiguration(BaseModel): | |||
| return credentials | |||
| def get_system_configuration_status(self) -> Optional[SystemConfigurationStatus]: | |||
| def get_system_configuration_status(self) -> SystemConfigurationStatus | None: | |||
| """ | |||
| Get system configuration status. | |||
| :return: | |||
| @@ -793,9 +792,7 @@ class ProviderConfiguration(BaseModel): | |||
| stmt = stmt.where(ProviderModelCredential.id != exclude_id) | |||
| return session.execute(stmt).scalar_one_or_none() is not None | |||
| def get_custom_model_credential( | |||
| self, model_type: ModelType, model: str, credential_id: str | None | |||
| ) -> Optional[dict]: | |||
| def get_custom_model_credential(self, model_type: ModelType, model: str, credential_id: str | None) -> dict | None: | |||
| """ | |||
| Get custom model credentials. | |||
| @@ -1272,7 +1269,7 @@ class ProviderConfiguration(BaseModel): | |||
| return model_setting | |||
| def get_provider_model_setting(self, model_type: ModelType, model: str) -> Optional[ProviderModelSetting]: | |||
| def get_provider_model_setting(self, model_type: ModelType, model: str) -> ProviderModelSetting | None: | |||
| """ | |||
| Get provider model setting. | |||
| :param model_type: model type | |||
| @@ -1448,7 +1445,7 @@ class ProviderConfiguration(BaseModel): | |||
| def get_provider_model( | |||
| self, model_type: ModelType, model: str, only_active: bool = False | |||
| ) -> Optional[ModelWithProviderEntity]: | |||
| ) -> ModelWithProviderEntity | None: | |||
| """ | |||
| Get provider model. | |||
| :param model_type: model type | |||
| @@ -1465,7 +1462,7 @@ class ProviderConfiguration(BaseModel): | |||
| return None | |||
| def get_provider_models( | |||
| self, model_type: Optional[ModelType] = None, only_active: bool = False, model: Optional[str] = None | |||
| self, model_type: ModelType | None = None, only_active: bool = False, model: str | None = None | |||
| ) -> list[ModelWithProviderEntity]: | |||
| """ | |||
| Get provider models. | |||
| @@ -1649,7 +1646,7 @@ class ProviderConfiguration(BaseModel): | |||
| model_types: Sequence[ModelType], | |||
| provider_schema: ProviderEntity, | |||
| model_setting_map: dict[ModelType, dict[str, ModelSettings]], | |||
| model: Optional[str] = None, | |||
| model: str | None = None, | |||
| ) -> list[ModelWithProviderEntity]: | |||
| """ | |||
| Get custom provider models. | |||
| @@ -1783,7 +1780,7 @@ class ProviderConfigurations(BaseModel): | |||
| super().__init__(tenant_id=tenant_id) | |||
| def get_models( | |||
| self, provider: Optional[str] = None, model_type: Optional[ModelType] = None, only_active: bool = False | |||
| self, provider: str | None = None, model_type: ModelType | None = None, only_active: bool = False | |||
| ) -> list[ModelWithProviderEntity]: | |||
| """ | |||
| Get available models. | |||
| @@ -1,5 +1,5 @@ | |||
| from enum import StrEnum, auto | |||
| from typing import Optional, Union | |||
| from typing import Union | |||
| from pydantic import BaseModel, ConfigDict, Field | |||
| @@ -49,7 +49,7 @@ class SystemConfigurationStatus(StrEnum): | |||
| class RestrictModel(BaseModel): | |||
| model: str | |||
| base_model_name: Optional[str] = None | |||
| base_model_name: str | None = None | |||
| model_type: ModelType | |||
| # pydantic configs | |||
| @@ -84,9 +84,9 @@ class SystemConfiguration(BaseModel): | |||
| """ | |||
| enabled: bool | |||
| current_quota_type: Optional[ProviderQuotaType] = None | |||
| current_quota_type: ProviderQuotaType | None = None | |||
| quota_configurations: list[QuotaConfiguration] = [] | |||
| credentials: Optional[dict] = None | |||
| credentials: dict | None = None | |||
| class CustomProviderConfiguration(BaseModel): | |||
| @@ -95,8 +95,8 @@ class CustomProviderConfiguration(BaseModel): | |||
| """ | |||
| credentials: dict | |||
| current_credential_id: Optional[str] = None | |||
| current_credential_name: Optional[str] = None | |||
| current_credential_id: str | None = None | |||
| current_credential_name: str | None = None | |||
| available_credentials: list[CredentialConfiguration] = [] | |||
| @@ -108,10 +108,10 @@ class CustomModelConfiguration(BaseModel): | |||
| model: str | |||
| model_type: ModelType | |||
| credentials: dict | None = None | |||
| current_credential_id: Optional[str] = None | |||
| current_credential_name: Optional[str] = None | |||
| current_credential_id: str | None = None | |||
| current_credential_name: str | None = None | |||
| available_model_credentials: list[CredentialConfiguration] = [] | |||
| unadded_to_model_list: Optional[bool] = False | |||
| unadded_to_model_list: bool | None = False | |||
| # pydantic configs | |||
| model_config = ConfigDict(protected_namespaces=()) | |||
| @@ -131,7 +131,7 @@ class CustomConfiguration(BaseModel): | |||
| Model class for provider custom configuration. | |||
| """ | |||
| provider: Optional[CustomProviderConfiguration] = None | |||
| provider: CustomProviderConfiguration | None = None | |||
| models: list[CustomModelConfiguration] = [] | |||
| can_added_models: list[UnaddedModelConfiguration] = [] | |||
| @@ -205,12 +205,12 @@ class ProviderConfig(BasicProviderConfig): | |||
| scope: AppSelectorScope | ModelSelectorScope | ToolSelectorScope | None = None | |||
| required: bool = False | |||
| default: Optional[Union[int, str, float, bool]] = None | |||
| options: Optional[list[Option]] = None | |||
| label: Optional[I18nObject] = None | |||
| help: Optional[I18nObject] = None | |||
| url: Optional[str] = None | |||
| placeholder: Optional[I18nObject] = None | |||
| default: Union[int, str, float, bool] | None = None | |||
| options: list[Option] | None = None | |||
| label: I18nObject | None = None | |||
| help: I18nObject | None = None | |||
| url: str | None = None | |||
| placeholder: I18nObject | None = None | |||
| def to_basic_provider_config(self) -> BasicProviderConfig: | |||
| return BasicProviderConfig(type=self.type, name=self.name) | |||
| @@ -1,12 +1,9 @@ | |||
| from typing import Optional | |||
| class LLMError(ValueError): | |||
| """Base class for all LLM exceptions.""" | |||
| description: Optional[str] = None | |||
| description: str | None = None | |||
| def __init__(self, description: Optional[str] = None): | |||
| def __init__(self, description: str | None = None): | |||
| self.description = description | |||
| @@ -4,7 +4,7 @@ import logging | |||
| import os | |||
| from enum import StrEnum, auto | |||
| from pathlib import Path | |||
| from typing import Any, Optional | |||
| from typing import Any | |||
| from pydantic import BaseModel | |||
| @@ -19,12 +19,12 @@ class ExtensionModule(StrEnum): | |||
| class ModuleExtension(BaseModel): | |||
| extension_class: Optional[Any] = None | |||
| extension_class: Any | None = None | |||
| name: str | |||
| label: Optional[dict] = None | |||
| form_schema: Optional[list] = None | |||
| label: dict | None = None | |||
| form_schema: list | None = None | |||
| builtin: bool = True | |||
| position: Optional[int] = None | |||
| position: int | None = None | |||
| class Extensible: | |||
| @@ -32,9 +32,9 @@ class Extensible: | |||
| name: str | |||
| tenant_id: str | |||
| config: Optional[dict] = None | |||
| config: dict | None = None | |||
| def __init__(self, tenant_id: str, config: Optional[dict] = None): | |||
| def __init__(self, tenant_id: str, config: dict | None = None): | |||
| self.tenant_id = tenant_id | |||
| self.config = config | |||
| @@ -1,5 +1,3 @@ | |||
| from typing import Optional | |||
| from sqlalchemy import select | |||
| from core.extension.api_based_extension_requestor import APIBasedExtensionRequestor | |||
| @@ -39,7 +37,7 @@ class ApiExternalDataTool(ExternalDataTool): | |||
| if not api_based_extension: | |||
| raise ValueError("api_based_extension_id is invalid") | |||
| def query(self, inputs: dict, query: Optional[str] = None) -> str: | |||
| def query(self, inputs: dict, query: str | None = None) -> str: | |||
| """ | |||
| Query the external data tool. | |||
| @@ -1,5 +1,4 @@ | |||
| from abc import ABC, abstractmethod | |||
| from typing import Optional | |||
| from core.extension.extensible import Extensible, ExtensionModule | |||
| @@ -16,7 +15,7 @@ class ExternalDataTool(Extensible, ABC): | |||
| variable: str | |||
| """the tool variable name of app tool""" | |||
| def __init__(self, tenant_id: str, app_id: str, variable: str, config: Optional[dict] = None): | |||
| def __init__(self, tenant_id: str, app_id: str, variable: str, config: dict | None = None): | |||
| super().__init__(tenant_id, config) | |||
| self.app_id = app_id | |||
| self.variable = variable | |||
| @@ -34,7 +33,7 @@ class ExternalDataTool(Extensible, ABC): | |||
| raise NotImplementedError | |||
| @abstractmethod | |||
| def query(self, inputs: dict, query: Optional[str] = None) -> str: | |||
| def query(self, inputs: dict, query: str | None = None) -> str: | |||
| """ | |||
| Query the external data tool. | |||