Co-authored-by: -LAN- <laipz8200@outlook.com>tags/0.6.12
| @@ -1,5 +1,7 @@ | |||
| import os | |||
| from configs.app_configs import DifyConfigs | |||
| if not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != 'true': | |||
| from gevent import monkey | |||
| @@ -74,10 +76,19 @@ config_type = os.getenv('EDITION', default='SELF_HOSTED') # ce edition first | |||
| # Application Factory Function | |||
| # ---------------------------- | |||
| def create_flask_app_with_configs() -> Flask: | |||
| """ | |||
| create a raw flask app | |||
| with configs loaded from .env file | |||
| """ | |||
| dify_app = DifyApp(__name__) | |||
| dify_app.config.from_object(Config()) | |||
| dify_app.config.from_mapping(DifyConfigs().model_dump()) | |||
| return dify_app | |||
| def create_app() -> Flask: | |||
| app = DifyApp(__name__) | |||
| app.config.from_object(Config()) | |||
| app = create_flask_app_with_configs() | |||
| app.secret_key = app.config['SECRET_KEY'] | |||
| @@ -2,52 +2,27 @@ import os | |||
| import dotenv | |||
| dotenv.load_dotenv() | |||
| DEFAULTS = { | |||
| 'EDITION': 'SELF_HOSTED', | |||
| 'DB_USERNAME': 'postgres', | |||
| 'DB_PASSWORD': '', | |||
| 'DB_HOST': 'localhost', | |||
| 'DB_PORT': '5432', | |||
| 'DB_DATABASE': 'dify', | |||
| 'DB_CHARSET': '', | |||
| 'REDIS_HOST': 'localhost', | |||
| 'REDIS_PORT': '6379', | |||
| 'REDIS_DB': '0', | |||
| 'REDIS_USE_SSL': 'False', | |||
| 'OAUTH_REDIRECT_PATH': '/console/api/oauth/authorize', | |||
| 'OAUTH_REDIRECT_INDEX_PATH': '/', | |||
| 'CONSOLE_WEB_URL': 'https://cloud.dify.ai', | |||
| 'CONSOLE_API_URL': 'https://cloud.dify.ai', | |||
| 'SERVICE_API_URL': 'https://api.dify.ai', | |||
| 'APP_WEB_URL': 'https://udify.app', | |||
| 'FILES_URL': '', | |||
| 'FILES_ACCESS_TIMEOUT': 300, | |||
| 'S3_USE_AWS_MANAGED_IAM': 'False', | |||
| 'S3_ADDRESS_STYLE': 'auto', | |||
| 'STORAGE_TYPE': 'local', | |||
| 'STORAGE_LOCAL_PATH': 'storage', | |||
| 'CHECK_UPDATE_URL': 'https://updates.dify.ai', | |||
| 'DEPLOY_ENV': 'PRODUCTION', | |||
| 'SQLALCHEMY_DATABASE_URI_SCHEME': 'postgresql', | |||
| 'SQLALCHEMY_POOL_SIZE': 30, | |||
| 'SQLALCHEMY_MAX_OVERFLOW': 10, | |||
| 'SQLALCHEMY_POOL_RECYCLE': 3600, | |||
| 'SQLALCHEMY_POOL_PRE_PING': 'False', | |||
| 'SQLALCHEMY_ECHO': 'False', | |||
| 'SENTRY_TRACES_SAMPLE_RATE': 1.0, | |||
| 'SENTRY_PROFILES_SAMPLE_RATE': 1.0, | |||
| 'WEAVIATE_GRPC_ENABLED': 'True', | |||
| 'WEAVIATE_BATCH_SIZE': 100, | |||
| 'QDRANT_CLIENT_TIMEOUT': 20, | |||
| 'QDRANT_GRPC_ENABLED': 'False', | |||
| 'QDRANT_GRPC_PORT': '6334', | |||
| 'CELERY_BACKEND': 'database', | |||
| 'LOG_LEVEL': 'INFO', | |||
| 'LOG_FILE': '', | |||
| 'LOG_FORMAT': '%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s', | |||
| 'LOG_DATEFORMAT': '%Y-%m-%d %H:%M:%S', | |||
| 'HOSTED_OPENAI_QUOTA_LIMIT': 200, | |||
| 'HOSTED_OPENAI_TRIAL_ENABLED': 'False', | |||
| 'HOSTED_OPENAI_TRIAL_MODELS': 'gpt-3.5-turbo,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-16k,gpt-3.5-turbo-16k-0613,gpt-3.5-turbo-0613,gpt-3.5-turbo-0125,text-davinci-003', | |||
| @@ -62,31 +37,7 @@ DEFAULTS = { | |||
| 'HOSTED_MODERATION_PROVIDERS': '', | |||
| 'HOSTED_FETCH_APP_TEMPLATES_MODE': 'remote', | |||
| 'HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN': 'https://tmpl.dify.ai', | |||
| 'CLEAN_DAY_SETTING': 30, | |||
| 'UPLOAD_FILE_SIZE_LIMIT': 15, | |||
| 'UPLOAD_FILE_BATCH_LIMIT': 5, | |||
| 'UPLOAD_IMAGE_FILE_SIZE_LIMIT': 10, | |||
| 'OUTPUT_MODERATION_BUFFER_SIZE': 300, | |||
| 'MULTIMODAL_SEND_IMAGE_FORMAT': 'base64', | |||
| 'INVITE_EXPIRY_HOURS': 72, | |||
| 'BILLING_ENABLED': 'False', | |||
| 'CAN_REPLACE_LOGO': 'False', | |||
| 'MODEL_LB_ENABLED': 'False', | |||
| 'ETL_TYPE': 'dify', | |||
| 'KEYWORD_STORE': 'jieba', | |||
| 'BATCH_UPLOAD_LIMIT': 20, | |||
| 'CODE_EXECUTION_ENDPOINT': 'http://sandbox:8194', | |||
| 'CODE_EXECUTION_API_KEY': 'dify-sandbox', | |||
| 'TOOL_ICON_CACHE_MAX_AGE': 3600, | |||
| 'MILVUS_DATABASE': 'default', | |||
| 'KEYWORD_DATA_SOURCE_TYPE': 'database', | |||
| 'INNER_API': 'False', | |||
| 'ENTERPRISE_ENABLED': 'False', | |||
| 'INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH': 1000, | |||
| 'WORKFLOW_MAX_EXECUTION_STEPS': 500, | |||
| 'WORKFLOW_MAX_EXECUTION_TIME': 1200, | |||
| 'WORKFLOW_CALL_MAX_DEPTH': 5, | |||
| 'APP_MAX_EXECUTION_TIME': 1200, | |||
| } | |||
| @@ -114,65 +65,16 @@ class Config: | |||
| """Application configuration class.""" | |||
| def __init__(self): | |||
| # ------------------------ | |||
| # General Configurations. | |||
| # ------------------------ | |||
| self.CURRENT_VERSION = "0.6.11" | |||
| self.COMMIT_SHA = get_env('COMMIT_SHA') | |||
| self.EDITION = get_env('EDITION') | |||
| self.DEPLOY_ENV = get_env('DEPLOY_ENV') | |||
| dotenv.load_dotenv() | |||
| self.TESTING = False | |||
| self.LOG_LEVEL = get_env('LOG_LEVEL') | |||
| self.LOG_FILE = get_env('LOG_FILE') | |||
| self.LOG_FORMAT = get_env('LOG_FORMAT') | |||
| self.LOG_DATEFORMAT = get_env('LOG_DATEFORMAT') | |||
| self.API_COMPRESSION_ENABLED = get_bool_env('API_COMPRESSION_ENABLED') | |||
| # The backend URL prefix of the console API. | |||
| # used to concatenate the login authorization callback or notion integration callback. | |||
| self.CONSOLE_API_URL = get_env('CONSOLE_API_URL') | |||
| # The front-end URL prefix of the console web. | |||
| # used to concatenate some front-end addresses and for CORS configuration use. | |||
| self.CONSOLE_WEB_URL = get_env('CONSOLE_WEB_URL') | |||
| # WebApp Url prefix. | |||
| # used to display WebAPP API Base Url to the front-end. | |||
| self.APP_WEB_URL = get_env('APP_WEB_URL') | |||
| # Service API Url prefix. | |||
| # used to display Service API Base Url to the front-end. | |||
| self.SERVICE_API_URL = get_env('SERVICE_API_URL') | |||
| # File preview or download Url prefix. | |||
| # used to display File preview or download Url to the front-end or as Multi-model inputs; | |||
| # Url is signed and has expiration time. | |||
| self.FILES_URL = get_env('FILES_URL') if get_env('FILES_URL') else self.CONSOLE_API_URL | |||
| # File Access Time specifies a time interval in seconds for the file to be accessed. | |||
| # The default value is 300 seconds. | |||
| self.FILES_ACCESS_TIMEOUT = int(get_env('FILES_ACCESS_TIMEOUT')) | |||
| # Your App secret key will be used for securely signing the session cookie | |||
| # Make sure you are changing this key for your deployment with a strong key. | |||
| # You can generate a strong key using `openssl rand -base64 42`. | |||
| # Alternatively you can set it with `SECRET_KEY` environment variable. | |||
| self.SECRET_KEY = get_env('SECRET_KEY') | |||
| # Enable or disable the inner API. | |||
| self.INNER_API = get_bool_env('INNER_API') | |||
| # The inner API key is used to authenticate the inner API. | |||
| self.INNER_API_KEY = get_env('INNER_API_KEY') | |||
| # cors settings | |||
| self.CONSOLE_CORS_ALLOW_ORIGINS = get_cors_allow_origins( | |||
| 'CONSOLE_CORS_ALLOW_ORIGINS', self.CONSOLE_WEB_URL) | |||
| 'CONSOLE_CORS_ALLOW_ORIGINS', get_env('CONSOLE_WEB_URL')) | |||
| self.WEB_API_CORS_ALLOW_ORIGINS = get_cors_allow_origins( | |||
| 'WEB_API_CORS_ALLOW_ORIGINS', '*') | |||
| # check update url | |||
| self.CHECK_UPDATE_URL = get_env('CHECK_UPDATE_URL') | |||
| # ------------------------ | |||
| # Database Configurations. | |||
| # ------------------------ | |||
| @@ -195,16 +97,6 @@ class Config: | |||
| self.SQLALCHEMY_ECHO = get_bool_env('SQLALCHEMY_ECHO') | |||
| # ------------------------ | |||
| # Redis Configurations. | |||
| # ------------------------ | |||
| self.REDIS_HOST = get_env('REDIS_HOST') | |||
| self.REDIS_PORT = get_env('REDIS_PORT') | |||
| self.REDIS_USERNAME = get_env('REDIS_USERNAME') | |||
| self.REDIS_PASSWORD = get_env('REDIS_PASSWORD') | |||
| self.REDIS_DB = get_env('REDIS_DB') | |||
| self.REDIS_USE_SSL = get_bool_env('REDIS_USE_SSL') | |||
| # ------------------------ | |||
| # Celery worker Configurations. | |||
| # ------------------------ | |||
| @@ -212,19 +104,8 @@ class Config: | |||
| self.CELERY_BACKEND = get_env('CELERY_BACKEND') | |||
| self.CELERY_RESULT_BACKEND = 'db+{}'.format(self.SQLALCHEMY_DATABASE_URI) \ | |||
| if self.CELERY_BACKEND == 'database' else self.CELERY_BROKER_URL | |||
| self.BROKER_USE_SSL = self.CELERY_BROKER_URL.startswith('rediss://') | |||
| self.BROKER_USE_SSL = self.CELERY_BROKER_URL.startswith('rediss://') if self.CELERY_BROKER_URL else False | |||
| # ------------------------ | |||
| # Code Execution Sandbox Configurations. | |||
| # ------------------------ | |||
| self.CODE_EXECUTION_ENDPOINT = get_env('CODE_EXECUTION_ENDPOINT') | |||
| self.CODE_EXECUTION_API_KEY = get_env('CODE_EXECUTION_API_KEY') | |||
| # ------------------------ | |||
| # File Storage Configurations. | |||
| # ------------------------ | |||
| self.STORAGE_TYPE = get_env('STORAGE_TYPE') | |||
| self.STORAGE_LOCAL_PATH = get_env('STORAGE_LOCAL_PATH') | |||
| # S3 Storage settings | |||
| self.S3_USE_AWS_MANAGED_IAM = get_bool_env('S3_USE_AWS_MANAGED_IAM') | |||
| @@ -264,8 +145,6 @@ class Config: | |||
| # Vector Store Configurations. | |||
| # Currently, only support: qdrant, milvus, zilliz, weaviate, relyt, pgvector | |||
| # ------------------------ | |||
| self.VECTOR_STORE = get_env('VECTOR_STORE') | |||
| self.KEYWORD_STORE = get_env('KEYWORD_STORE') | |||
| # qdrant settings | |||
| self.QDRANT_URL = get_env('QDRANT_URL') | |||
| @@ -302,7 +181,6 @@ class Config: | |||
| self.RELYT_PASSWORD = get_env('RELYT_PASSWORD') | |||
| self.RELYT_DATABASE = get_env('RELYT_DATABASE') | |||
| # tencent settings | |||
| self.TENCENT_VECTOR_DB_URL = get_env('TENCENT_VECTOR_DB_URL') | |||
| self.TENCENT_VECTOR_DB_API_KEY = get_env('TENCENT_VECTOR_DB_API_KEY') | |||
| @@ -341,85 +219,9 @@ class Config: | |||
| self.CHROMA_AUTH_PROVIDER = get_env('CHROMA_AUTH_PROVIDER') | |||
| self.CHROMA_AUTH_CREDENTIALS = get_env('CHROMA_AUTH_CREDENTIALS') | |||
| # ------------------------ | |||
| # Mail Configurations. | |||
| # ------------------------ | |||
| self.MAIL_TYPE = get_env('MAIL_TYPE') | |||
| self.MAIL_DEFAULT_SEND_FROM = get_env('MAIL_DEFAULT_SEND_FROM') | |||
| self.RESEND_API_KEY = get_env('RESEND_API_KEY') | |||
| self.RESEND_API_URL = get_env('RESEND_API_URL') | |||
| # SMTP settings | |||
| self.SMTP_SERVER = get_env('SMTP_SERVER') | |||
| self.SMTP_PORT = get_env('SMTP_PORT') | |||
| self.SMTP_USERNAME = get_env('SMTP_USERNAME') | |||
| self.SMTP_PASSWORD = get_env('SMTP_PASSWORD') | |||
| self.SMTP_USE_TLS = get_bool_env('SMTP_USE_TLS') | |||
| self.SMTP_OPPORTUNISTIC_TLS = get_bool_env('SMTP_OPPORTUNISTIC_TLS') | |||
| # ------------------------ | |||
| # Workspace Configurations. | |||
| # ------------------------ | |||
| self.INVITE_EXPIRY_HOURS = int(get_env('INVITE_EXPIRY_HOURS')) | |||
| # ------------------------ | |||
| # Sentry Configurations. | |||
| # ------------------------ | |||
| self.SENTRY_DSN = get_env('SENTRY_DSN') | |||
| self.SENTRY_TRACES_SAMPLE_RATE = float(get_env('SENTRY_TRACES_SAMPLE_RATE')) | |||
| self.SENTRY_PROFILES_SAMPLE_RATE = float(get_env('SENTRY_PROFILES_SAMPLE_RATE')) | |||
| # ------------------------ | |||
| # Business Configurations. | |||
| # ------------------------ | |||
| # multi model send image format, support base64, url, default is base64 | |||
| self.MULTIMODAL_SEND_IMAGE_FORMAT = get_env('MULTIMODAL_SEND_IMAGE_FORMAT') | |||
| # Dataset Configurations. | |||
| self.CLEAN_DAY_SETTING = get_env('CLEAN_DAY_SETTING') | |||
| # File upload Configurations. | |||
| self.UPLOAD_FILE_SIZE_LIMIT = int(get_env('UPLOAD_FILE_SIZE_LIMIT')) | |||
| self.UPLOAD_FILE_BATCH_LIMIT = int(get_env('UPLOAD_FILE_BATCH_LIMIT')) | |||
| self.UPLOAD_IMAGE_FILE_SIZE_LIMIT = int(get_env('UPLOAD_IMAGE_FILE_SIZE_LIMIT')) | |||
| self.BATCH_UPLOAD_LIMIT = get_env('BATCH_UPLOAD_LIMIT') | |||
| # RAG ETL Configurations. | |||
| self.ETL_TYPE = get_env('ETL_TYPE') | |||
| self.UNSTRUCTURED_API_URL = get_env('UNSTRUCTURED_API_URL') | |||
| self.UNSTRUCTURED_API_KEY = get_env('UNSTRUCTURED_API_KEY') | |||
| self.KEYWORD_DATA_SOURCE_TYPE = get_env('KEYWORD_DATA_SOURCE_TYPE') | |||
| # Indexing Configurations. | |||
| self.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = get_env('INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH') | |||
| # Tool Configurations. | |||
| self.TOOL_ICON_CACHE_MAX_AGE = get_env('TOOL_ICON_CACHE_MAX_AGE') | |||
| self.WORKFLOW_MAX_EXECUTION_STEPS = int(get_env('WORKFLOW_MAX_EXECUTION_STEPS')) | |||
| self.WORKFLOW_MAX_EXECUTION_TIME = int(get_env('WORKFLOW_MAX_EXECUTION_TIME')) | |||
| self.WORKFLOW_CALL_MAX_DEPTH = int(get_env('WORKFLOW_CALL_MAX_DEPTH')) | |||
| self.APP_MAX_EXECUTION_TIME = int(get_env('APP_MAX_EXECUTION_TIME')) | |||
| # Moderation in app Configurations. | |||
| self.OUTPUT_MODERATION_BUFFER_SIZE = int(get_env('OUTPUT_MODERATION_BUFFER_SIZE')) | |||
| # Notion integration setting | |||
| self.NOTION_CLIENT_ID = get_env('NOTION_CLIENT_ID') | |||
| self.NOTION_CLIENT_SECRET = get_env('NOTION_CLIENT_SECRET') | |||
| self.NOTION_INTEGRATION_TYPE = get_env('NOTION_INTEGRATION_TYPE') | |||
| self.NOTION_INTERNAL_SECRET = get_env('NOTION_INTERNAL_SECRET') | |||
| self.NOTION_INTEGRATION_TOKEN = get_env('NOTION_INTEGRATION_TOKEN') | |||
| # ------------------------ | |||
| # Platform Configurations. | |||
| # ------------------------ | |||
| self.GITHUB_CLIENT_ID = get_env('GITHUB_CLIENT_ID') | |||
| self.GITHUB_CLIENT_SECRET = get_env('GITHUB_CLIENT_SECRET') | |||
| self.GOOGLE_CLIENT_ID = get_env('GOOGLE_CLIENT_ID') | |||
| self.GOOGLE_CLIENT_SECRET = get_env('GOOGLE_CLIENT_SECRET') | |||
| self.OAUTH_REDIRECT_PATH = get_env('OAUTH_REDIRECT_PATH') | |||
| self.HOSTED_OPENAI_API_KEY = get_env('HOSTED_OPENAI_API_KEY') | |||
| self.HOSTED_OPENAI_API_BASE = get_env('HOSTED_OPENAI_API_BASE') | |||
| self.HOSTED_OPENAI_API_ORGANIZATION = get_env('HOSTED_OPENAI_API_ORGANIZATION') | |||
| @@ -450,16 +252,3 @@ class Config: | |||
| # fetch app templates mode, remote, builtin, db(only for dify SaaS), default: remote | |||
| self.HOSTED_FETCH_APP_TEMPLATES_MODE = get_env('HOSTED_FETCH_APP_TEMPLATES_MODE') | |||
| self.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN = get_env('HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN') | |||
| # Model Load Balancing Configurations. | |||
| self.MODEL_LB_ENABLED = get_bool_env('MODEL_LB_ENABLED') | |||
| # Platform Billing Configurations. | |||
| self.BILLING_ENABLED = get_bool_env('BILLING_ENABLED') | |||
| # ------------------------ | |||
| # Enterprise feature Configurations. | |||
| # **Before using, please contact business@dify.ai by email to inquire about licensing matters.** | |||
| # ------------------------ | |||
| self.ENTERPRISE_ENABLED = get_bool_env('ENTERPRISE_ENABLED') | |||
| self.CAN_REPLACE_LOGO = get_bool_env('CAN_REPLACE_LOGO') | |||
| @@ -0,0 +1,42 @@ | |||
| from pydantic_settings import BaseSettings, SettingsConfigDict | |||
| from configs.deploy import DeploymentConfigs | |||
| from configs.enterprise import EnterpriseFeatureConfigs | |||
| from configs.extra import ExtraServiceConfigs | |||
| from configs.feature import FeatureConfigs | |||
| from configs.middleware import MiddlewareConfigs | |||
| from configs.packaging import PackagingInfo | |||
| class DifyConfigs( | |||
| # based on pydantic-settings | |||
| BaseSettings, | |||
| # Packaging info | |||
| PackagingInfo, | |||
| # Deployment configs | |||
| DeploymentConfigs, | |||
| # Feature configs | |||
| FeatureConfigs, | |||
| # Middleware configs | |||
| MiddlewareConfigs, | |||
| # Extra service configs | |||
| ExtraServiceConfigs, | |||
| # Enterprise feature configs | |||
| # **Before using, please contact business@dify.ai by email to inquire about licensing matters.** | |||
| EnterpriseFeatureConfigs, | |||
| ): | |||
| model_config = SettingsConfigDict( | |||
| # read from dotenv format config file | |||
| env_file='.env', | |||
| env_file_encoding='utf-8', | |||
| # ignore extra attributes | |||
| extra='ignore', | |||
| ) | |||
| @@ -0,0 +1,16 @@ | |||
| from pydantic import BaseModel, Field | |||
| class DeploymentConfigs(BaseModel): | |||
| """ | |||
| Deployment configs | |||
| """ | |||
| EDITION: str = Field( | |||
| description='deployment edition', | |||
| default='SELF_HOSTED', | |||
| ) | |||
| DEPLOY_ENV: str = Field( | |||
| description='deployment environment, default to PRODUCTION.', | |||
| default='PRODUCTION', | |||
| ) | |||
| @@ -0,0 +1,18 @@ | |||
| from pydantic import BaseModel, Field | |||
| class EnterpriseFeatureConfigs(BaseModel): | |||
| """ | |||
| Enterprise feature configs. | |||
| **Before using, please contact business@dify.ai by email to inquire about licensing matters.** | |||
| """ | |||
| ENTERPRISE_ENABLED: bool = Field( | |||
| description='whether to enable enterprise features.' | |||
| 'Before using, please contact business@dify.ai by email to inquire about licensing matters.', | |||
| default=False, | |||
| ) | |||
| CAN_REPLACE_LOGO: bool = Field( | |||
| description='whether to allow replacing enterprise logo.', | |||
| default=False, | |||
| ) | |||
| @@ -0,0 +1,12 @@ | |||
| from pydantic import BaseModel | |||
| from configs.extra.notion_configs import NotionConfigs | |||
| from configs.extra.sentry_configs import SentryConfigs | |||
| class ExtraServiceConfigs( | |||
| # place the configs in alphabet order | |||
| NotionConfigs, | |||
| SentryConfigs, | |||
| ): | |||
| pass | |||
| @@ -0,0 +1,33 @@ | |||
| from typing import Optional | |||
| from pydantic import BaseModel, Field | |||
| class NotionConfigs(BaseModel): | |||
| """ | |||
| Notion integration configs | |||
| """ | |||
| NOTION_CLIENT_ID: Optional[str] = Field( | |||
| description='Notion client ID', | |||
| default=None, | |||
| ) | |||
| NOTION_CLIENT_SECRET: Optional[str] = Field( | |||
| description='Notion client secret key', | |||
| default=None, | |||
| ) | |||
| NOTION_INTEGRATION_TYPE: Optional[str] = Field( | |||
| description='Notion integration type, default to None, available values: internal.', | |||
| default=None, | |||
| ) | |||
| NOTION_INTERNAL_SECRET: Optional[str] = Field( | |||
| description='Notion internal secret key', | |||
| default=None, | |||
| ) | |||
| NOTION_INTEGRATION_TOKEN: Optional[str] = Field( | |||
| description='Notion integration token', | |||
| default=None, | |||
| ) | |||
| @@ -0,0 +1,23 @@ | |||
| from typing import Optional | |||
| from pydantic import BaseModel, Field, PositiveFloat | |||
| class SentryConfigs(BaseModel): | |||
| """ | |||
| Sentry configs | |||
| """ | |||
| SENTRY_DSN: Optional[str] = Field( | |||
| description='Sentry DSN', | |||
| default=None, | |||
| ) | |||
| SENTRY_TRACES_SAMPLE_RATE: PositiveFloat = Field( | |||
| description='Sentry trace sample rate', | |||
| default=1.0, | |||
| ) | |||
| SENTRY_PROFILES_SAMPLE_RATE: PositiveFloat = Field( | |||
| description='Sentry profiles sample rate', | |||
| default=1.0, | |||
| ) | |||
| @@ -0,0 +1,420 @@ | |||
| from typing import Optional | |||
| from pydantic import AliasChoices, BaseModel, Field, NonNegativeInt, PositiveInt | |||
| class SecurityConfigs(BaseModel): | |||
| """ | |||
| Secret Key configs | |||
| """ | |||
| SECRET_KEY: Optional[str] = Field( | |||
| description='Your App secret key will be used for securely signing the session cookie' | |||
| 'Make sure you are changing this key for your deployment with a strong key.' | |||
| 'You can generate a strong key using `openssl rand -base64 42`.' | |||
| 'Alternatively you can set it with `SECRET_KEY` environment variable.', | |||
| default=None, | |||
| ) | |||
| class AppExecutionConfigs(BaseModel): | |||
| """ | |||
| App Execution configs | |||
| """ | |||
| APP_MAX_EXECUTION_TIME: PositiveInt = Field( | |||
| description='execution timeout in seconds for app execution', | |||
| default=1200, | |||
| ) | |||
| class CodeExecutionSandboxConfigs(BaseModel): | |||
| """ | |||
| Code Execution Sandbox configs | |||
| """ | |||
| CODE_EXECUTION_ENDPOINT: str = Field( | |||
| description='whether to enable HTTP response compression of gzip', | |||
| default='http://sandbox:8194', | |||
| ) | |||
| CODE_EXECUTION_API_KEY: str = Field( | |||
| description='API key for code execution service', | |||
| default='dify-sandbox', | |||
| ) | |||
| class EndpointConfigs(BaseModel): | |||
| """ | |||
| Module URL configs | |||
| """ | |||
| CONSOLE_API_URL: str = Field( | |||
| description='The backend URL prefix of the console API.' | |||
| 'used to concatenate the login authorization callback or notion integration callback.', | |||
| default='https://cloud.dify.ai', | |||
| ) | |||
| CONSOLE_WEB_URL: str = Field( | |||
| description='The front-end URL prefix of the console web.' | |||
| 'used to concatenate some front-end addresses and for CORS configuration use.', | |||
| default='https://cloud.dify.ai', | |||
| ) | |||
| SERVICE_API_URL: str = Field( | |||
| description='Service API Url prefix.' | |||
| 'used to display Service API Base Url to the front-end.', | |||
| default='https://api.dify.ai', | |||
| ) | |||
| APP_WEB_URL: str = Field( | |||
| description='WebApp Url prefix.' | |||
| 'used to display WebAPP API Base Url to the front-end.', | |||
| default='https://udify.app', | |||
| ) | |||
| class FileAccessConfigs(BaseModel): | |||
| """ | |||
| File Access configs | |||
| """ | |||
| FILES_URL: str = Field( | |||
| description='File preview or download Url prefix.' | |||
| ' used to display File preview or download Url to the front-end or as Multi-model inputs;' | |||
| 'Url is signed and has expiration time.', | |||
| validation_alias=AliasChoices('FILES_URL', 'CONSOLE_API_URL'), | |||
| alias_priority=1, | |||
| default='https://cloud.dify.ai', | |||
| ) | |||
| FILES_ACCESS_TIMEOUT: int = Field( | |||
| description='timeout in seconds for file accessing', | |||
| default=300, | |||
| ) | |||
| class FileUploadConfigs(BaseModel): | |||
| """ | |||
| File Uploading configs | |||
| """ | |||
| UPLOAD_FILE_SIZE_LIMIT: NonNegativeInt = Field( | |||
| description='size limit in Megabytes for uploading files', | |||
| default=15, | |||
| ) | |||
| UPLOAD_FILE_BATCH_LIMIT: NonNegativeInt = Field( | |||
| description='batch size limit for uploading files', | |||
| default=5, | |||
| ) | |||
| UPLOAD_IMAGE_FILE_SIZE_LIMIT: NonNegativeInt = Field( | |||
| description='image file size limit in Megabytes for uploading files', | |||
| default=10, | |||
| ) | |||
| BATCH_UPLOAD_LIMIT: NonNegativeInt = Field( | |||
| description='', # todo: to be clarified | |||
| default=20, | |||
| ) | |||
| class HttpConfigs(BaseModel): | |||
| """ | |||
| HTTP configs | |||
| """ | |||
| API_COMPRESSION_ENABLED: bool = Field( | |||
| description='whether to enable HTTP response compression of gzip', | |||
| default=False, | |||
| ) | |||
| class InnerAPIConfigs(BaseModel): | |||
| """ | |||
| Inner API configs | |||
| """ | |||
| INNER_API: bool = Field( | |||
| description='whether to enable the inner API', | |||
| default=False, | |||
| ) | |||
| INNER_API_KEY: Optional[str] = Field( | |||
| description='The inner API key is used to authenticate the inner API', | |||
| default=None, | |||
| ) | |||
| class LoggingConfigs(BaseModel): | |||
| """ | |||
| Logging configs | |||
| """ | |||
| LOG_LEVEL: str = Field( | |||
| description='Log output level, default to INFO.' | |||
| 'It is recommended to set it to ERROR for production.', | |||
| default='INFO', | |||
| ) | |||
| LOG_FILE: Optional[str] = Field( | |||
| description='logging output file path', | |||
| default=None, | |||
| ) | |||
| LOG_FORMAT: str = Field( | |||
| description='log format', | |||
| default='%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s', | |||
| ) | |||
| LOG_DATEFORMAT: Optional[str] = Field( | |||
| description='log date format', | |||
| default=None, | |||
| ) | |||
| class ModelLoadBalanceConfigs(BaseModel): | |||
| """ | |||
| Model load balance configs | |||
| """ | |||
| MODEL_LB_ENABLED: bool = Field( | |||
| description='whether to enable model load balancing', | |||
| default=False, | |||
| ) | |||
| class BillingConfigs(BaseModel): | |||
| """ | |||
| Platform Billing Configurations | |||
| """ | |||
| BILLING_ENABLED: bool = Field( | |||
| description='whether to enable billing', | |||
| default=False, | |||
| ) | |||
| class UpdateConfigs(BaseModel): | |||
| """ | |||
| Update configs | |||
| """ | |||
| CHECK_UPDATE_URL: str = Field( | |||
| description='url for checking updates', | |||
| default='https://updates.dify.ai', | |||
| ) | |||
| class WorkflowConfigs(BaseModel): | |||
| """ | |||
| Workflow feature configs | |||
| """ | |||
| WORKFLOW_MAX_EXECUTION_STEPS: PositiveInt = Field( | |||
| description='max execution steps in single workflow execution', | |||
| default=500, | |||
| ) | |||
| WORKFLOW_MAX_EXECUTION_TIME: PositiveInt = Field( | |||
| description='max execution time in seconds in single workflow execution', | |||
| default=1200, | |||
| ) | |||
| WORKFLOW_CALL_MAX_DEPTH: PositiveInt = Field( | |||
| description='max depth of calling in single workflow execution', | |||
| default=5, | |||
| ) | |||
| class OAuthConfigs(BaseModel): | |||
| """ | |||
| oauth configs | |||
| """ | |||
| OAUTH_REDIRECT_PATH: str = Field( | |||
| description='redirect path for OAuth', | |||
| default='/console/api/oauth/authorize', | |||
| ) | |||
| GITHUB_CLIENT_ID: Optional[str] = Field( | |||
| description='GitHub client id for OAuth', | |||
| default=None, | |||
| ) | |||
| GITHUB_CLIENT_SECRET: Optional[str] = Field( | |||
| description='GitHub client secret key for OAuth', | |||
| default=None, | |||
| ) | |||
| GOOGLE_CLIENT_ID: Optional[str] = Field( | |||
| description='Google client id for OAuth', | |||
| default=None, | |||
| ) | |||
| GOOGLE_CLIENT_SECRET: Optional[str] = Field( | |||
| description='Google client secret key for OAuth', | |||
| default=None, | |||
| ) | |||
| class ModerationConfigs(BaseModel): | |||
| """ | |||
| Moderation in app configs. | |||
| """ | |||
| # todo: to be clarified in usage and unit | |||
| OUTPUT_MODERATION_BUFFER_SIZE: PositiveInt = Field( | |||
| description='buffer size for moderation', | |||
| default=300, | |||
| ) | |||
| class ToolConfigs(BaseModel): | |||
| """ | |||
| Tool configs | |||
| """ | |||
| TOOL_ICON_CACHE_MAX_AGE: PositiveInt = Field( | |||
| description='max age in seconds for tool icon caching', | |||
| default=3600, | |||
| ) | |||
| class MailConfigs(BaseModel): | |||
| """ | |||
| Mail Configurations | |||
| """ | |||
| MAIL_TYPE: Optional[str] = Field( | |||
| description='Mail provider type name, default to None, availabile values are `smtp` and `resend`.', | |||
| default=None, | |||
| ) | |||
| MAIL_DEFAULT_SEND_FROM: Optional[str] = Field( | |||
| description='default email address for sending from ', | |||
| default=None, | |||
| ) | |||
| RESEND_API_KEY: Optional[str] = Field( | |||
| description='API key for Resend', | |||
| default=None, | |||
| ) | |||
| RESEND_API_URL: Optional[str] = Field( | |||
| description='API URL for Resend', | |||
| default=None, | |||
| ) | |||
| SMTP_SERVER: Optional[str] = Field( | |||
| description='smtp server host', | |||
| default=None, | |||
| ) | |||
| SMTP_PORT: Optional[int] = Field( | |||
| description='smtp server port', | |||
| default=None, | |||
| ) | |||
| SMTP_USERNAME: Optional[str] = Field( | |||
| description='smtp server username', | |||
| default=None, | |||
| ) | |||
| SMTP_PASSWORD: Optional[str] = Field( | |||
| description='smtp server password', | |||
| default=None, | |||
| ) | |||
| SMTP_USE_TLS: bool = Field( | |||
| description='whether to use TLS connection to smtp server', | |||
| default=False, | |||
| ) | |||
| SMTP_OPPORTUNISTIC_TLS: bool = Field( | |||
| description='whether to use opportunistic TLS connection to smtp server', | |||
| default=False, | |||
| ) | |||
| class RagEtlConfigs(BaseModel): | |||
| """ | |||
| RAG ETL Configurations. | |||
| """ | |||
| ETL_TYPE: str = Field( | |||
| description='RAG ETL type name, default to `dify`, available values are `dify` and `Unstructured`. ', | |||
| default='dify', | |||
| ) | |||
| KEYWORD_DATA_SOURCE_TYPE: str = Field( | |||
| description='source type for keyword data, default to `database`, available values are `database` .', | |||
| default='database', | |||
| ) | |||
| UNSTRUCTURED_API_URL: Optional[str] = Field( | |||
| description='API URL for Unstructured', | |||
| default=None, | |||
| ) | |||
| UNSTRUCTURED_API_KEY: Optional[str] = Field( | |||
| description='API key for Unstructured', | |||
| default=None, | |||
| ) | |||
| class DataSetConfigs(BaseModel): | |||
| """ | |||
| Dataset configs | |||
| """ | |||
| CLEAN_DAY_SETTING: PositiveInt = Field( | |||
| description='interval in days for cleaning up dataset', | |||
| default=30, | |||
| ) | |||
| class WorkspaceConfigs(BaseModel): | |||
| """ | |||
| Workspace configs | |||
| """ | |||
| INVITE_EXPIRY_HOURS: PositiveInt = Field( | |||
| description='workspaces invitation expiration in hours', | |||
| default=72, | |||
| ) | |||
| class IndexingConfigs(BaseModel): | |||
| """ | |||
| Indexing configs. | |||
| """ | |||
| INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field( | |||
| description='max segmentation token length for indexing', | |||
| default=1000, | |||
| ) | |||
| class ImageFormatConfigs(BaseModel): | |||
| MULTIMODAL_SEND_IMAGE_FORMAT: str = Field( | |||
| description='multi model send image format, support base64, url, default is base64', | |||
| default='base64', | |||
| ) | |||
| class FeatureConfigs( | |||
| # place the configs in alphabet order | |||
| AppExecutionConfigs, | |||
| BillingConfigs, | |||
| CodeExecutionSandboxConfigs, | |||
| DataSetConfigs, | |||
| EndpointConfigs, | |||
| FileAccessConfigs, | |||
| FileUploadConfigs, | |||
| HttpConfigs, | |||
| ImageFormatConfigs, | |||
| InnerAPIConfigs, | |||
| IndexingConfigs, | |||
| LoggingConfigs, | |||
| MailConfigs, | |||
| ModelLoadBalanceConfigs, | |||
| ModerationConfigs, | |||
| OAuthConfigs, | |||
| RagEtlConfigs, | |||
| SecurityConfigs, | |||
| ToolConfigs, | |||
| UpdateConfigs, | |||
| WorkflowConfigs, | |||
| WorkspaceConfigs, | |||
| ): | |||
| pass | |||
| @@ -0,0 +1,43 @@ | |||
| from typing import Optional | |||
| from pydantic import BaseModel, Field | |||
| from configs.middleware.redis_configs import RedisConfigs | |||
| class StorageConfigs(BaseModel): | |||
| STORAGE_TYPE: str = Field( | |||
| description='storage type,' | |||
| ' default to `local`,' | |||
| ' available values are `local`, `s3`, `azure-blob`, `aliyun-oss`, `google-storage`.', | |||
| default='local', | |||
| ) | |||
| STORAGE_LOCAL_PATH: str = Field( | |||
| description='local storage path', | |||
| default='storage', | |||
| ) | |||
| class VectorStoreConfigs(BaseModel): | |||
| VECTOR_STORE: Optional[str] = Field( | |||
| description='vector store type', | |||
| default=None, | |||
| ) | |||
| class KeywordStoreConfigs(BaseModel): | |||
| KEYWORD_STORE: str = Field( | |||
| description='keyword store type', | |||
| default='jieba', | |||
| ) | |||
| class MiddlewareConfigs( | |||
| # place the configs in alphabet order | |||
| KeywordStoreConfigs, | |||
| RedisConfigs, | |||
| StorageConfigs, | |||
| VectorStoreConfigs, | |||
| ): | |||
| pass | |||
| @@ -0,0 +1,38 @@ | |||
| from typing import Optional | |||
| from pydantic import BaseModel, Field, NonNegativeInt, PositiveInt | |||
| class RedisConfigs(BaseModel): | |||
| """ | |||
| Redis configs | |||
| """ | |||
| REDIS_HOST: str = Field( | |||
| description='Redis host', | |||
| default='localhost', | |||
| ) | |||
| REDIS_PORT: PositiveInt = Field( | |||
| description='Redis port', | |||
| default=6379, | |||
| ) | |||
| REDIS_USERNAME: Optional[str] = Field( | |||
| description='Redis username', | |||
| default=None, | |||
| ) | |||
| REDIS_PASSWORD: Optional[str] = Field( | |||
| description='Redis password', | |||
| default=None, | |||
| ) | |||
| REDIS_DB: NonNegativeInt = Field( | |||
| description='Redis database id, default to 0', | |||
| default=0, | |||
| ) | |||
| REDIS_USE_SSL: bool = Field( | |||
| description='whether to use SSL for Redis connection', | |||
| default=False, | |||
| ) | |||
| @@ -0,0 +1,17 @@ | |||
| from pydantic import BaseModel, Field | |||
| class PackagingInfo(BaseModel): | |||
| """ | |||
| Packaging build information | |||
| """ | |||
| CURRENT_VERSION: str = Field( | |||
| description='Dify version', | |||
| default='0.6.11', | |||
| ) | |||
| COMMIT_SHA: str = Field( | |||
| description="SHA-1 checksum of the git commit used to build the app", | |||
| default='', | |||
| ) | |||
| @@ -104,7 +104,7 @@ class ToolBuiltinProviderIconApi(Resource): | |||
| @setup_required | |||
| def get(self, provider): | |||
| icon_bytes, mimetype = BuiltinToolManageService.get_builtin_tool_provider_icon(provider) | |||
| icon_cache_max_age = int(current_app.config.get('TOOL_ICON_CACHE_MAX_AGE')) | |||
| icon_cache_max_age = current_app.config.get('TOOL_ICON_CACHE_MAX_AGE') | |||
| return send_file(io.BytesIO(icon_bytes), mimetype=mimetype, max_age=icon_cache_max_age) | |||
| class ToolApiProviderAddApi(Resource): | |||
| @@ -2,7 +2,7 @@ from flask import Flask | |||
| def init_app(app: Flask): | |||
| if app.config.get('API_COMPRESSION_ENABLED', False): | |||
| if app.config.get('API_COMPRESSION_ENABLED'): | |||
| from flask_compress import Compress | |||
| app.config['COMPRESS_MIMETYPES'] = [ | |||
| @@ -6,15 +6,15 @@ redis_client = redis.Redis() | |||
| def init_app(app): | |||
| connection_class = Connection | |||
| if app.config.get('REDIS_USE_SSL', False): | |||
| if app.config.get('REDIS_USE_SSL'): | |||
| connection_class = SSLConnection | |||
| redis_client.connection_pool = redis.ConnectionPool(**{ | |||
| 'host': app.config.get('REDIS_HOST', 'localhost'), | |||
| 'port': app.config.get('REDIS_PORT', 6379), | |||
| 'username': app.config.get('REDIS_USERNAME', None), | |||
| 'password': app.config.get('REDIS_PASSWORD', None), | |||
| 'db': app.config.get('REDIS_DB', 0), | |||
| 'host': app.config.get('REDIS_HOST'), | |||
| 'port': app.config.get('REDIS_PORT'), | |||
| 'username': app.config.get('REDIS_USERNAME'), | |||
| 'password': app.config.get('REDIS_PASSWORD'), | |||
| 'db': app.config.get('REDIS_DB'), | |||
| 'encoding': 'utf-8', | |||
| 'encoding_errors': 'strict', | |||
| 'decode_responses': False | |||
| @@ -1 +1,2 @@ | |||
| Single-database configuration for Flask. | |||
| @@ -5881,6 +5881,25 @@ phonenumbers = ["phonenumbers (>=8,<9)"] | |||
| pycountry = ["pycountry (>=23)"] | |||
| python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<3)"] | |||
| [[package]] | |||
| name = "pydantic-settings" | |||
| version = "2.3.3" | |||
| description = "Settings management using Pydantic" | |||
| optional = false | |||
| python-versions = ">=3.8" | |||
| files = [ | |||
| {file = "pydantic_settings-2.3.3-py3-none-any.whl", hash = "sha256:e4ed62ad851670975ec11285141db888fd24947f9440bd4380d7d8788d4965de"}, | |||
| {file = "pydantic_settings-2.3.3.tar.gz", hash = "sha256:87fda838b64b5039b970cd47c3e8a1ee460ce136278ff672980af21516f6e6ce"}, | |||
| ] | |||
| [package.dependencies] | |||
| pydantic = ">=2.7.0" | |||
| python-dotenv = ">=0.21.0" | |||
| [package.extras] | |||
| toml = ["tomli (>=2.0.1)"] | |||
| yaml = ["pyyaml (>=6.0.1)"] | |||
| [[package]] | |||
| name = "pydub" | |||
| version = "0.25.1" | |||
| @@ -8979,4 +8998,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] | |||
| [metadata] | |||
| lock-version = "2.0" | |||
| python-versions = "^3.10" | |||
| content-hash = "367a4b0ad745a48263dd44711be28c4c076dee983e3f5d1ac56c22bbb2eed531" | |||
| content-hash = "5e63749820d62d42f8f0d38104ea135f68361bde660131a93fe7ad08141b51b1" | |||
| @@ -170,6 +170,7 @@ lxml = "5.1.0" | |||
| xlrd = "~2.0.1" | |||
| pydantic = "~2.7.4" | |||
| pydantic_extra_types = "~2.8.1" | |||
| pydantic-settings = "~2.3.3" | |||
| pgvecto-rs = "0.1.4" | |||
| firecrawl-py = "0.0.5" | |||
| oss2 = "2.18.5" | |||
| @@ -203,4 +204,3 @@ optional = true | |||
| [tool.poetry.group.lint.dependencies] | |||
| ruff = "~0.4.8" | |||
| dotenv-linter = "~0.5.0" | |||
| @@ -77,6 +77,7 @@ azure-identity==1.16.1 | |||
| lxml==5.1.0 | |||
| pydantic~=2.7.4 | |||
| pydantic_extra_types~=2.8.1 | |||
| pydantic-settings~=2.3.3 | |||
| pgvecto-rs==0.1.4 | |||
| tcvectordb==1.3.2 | |||
| firecrawl-py==0.0.5 | |||
| @@ -0,0 +1,62 @@ | |||
| from textwrap import dedent | |||
| import pytest | |||
| from flask import Flask | |||
| from configs.app_configs import DifyConfigs | |||
| EXAMPLE_ENV_FILENAME = '.env' | |||
| @pytest.fixture | |||
| def example_env_file(tmp_path, monkeypatch) -> str: | |||
| monkeypatch.chdir(tmp_path) | |||
| file_path = tmp_path.joinpath(EXAMPLE_ENV_FILENAME) | |||
| file_path.write_text(dedent( | |||
| """ | |||
| CONSOLE_API_URL=https://example.com | |||
| """)) | |||
| return str(file_path) | |||
| def test_dify_configs_undefined_entry(example_env_file): | |||
| # load dotenv file with pydantic-settings | |||
| settings = DifyConfigs(_env_file=example_env_file) | |||
| # entries not defined in app settings | |||
| with pytest.raises(TypeError): | |||
| # TypeError: 'AppSettings' object is not subscriptable | |||
| assert settings['LOG_LEVEL'] == 'INFO' | |||
| def test_dify_configs(example_env_file): | |||
| # load dotenv file with pydantic-settings | |||
| settings = DifyConfigs(_env_file=example_env_file) | |||
| # constant values | |||
| assert settings.COMMIT_SHA == '' | |||
| # default values | |||
| assert settings.EDITION == 'SELF_HOSTED' | |||
| assert settings.API_COMPRESSION_ENABLED is False | |||
| assert settings.SENTRY_TRACES_SAMPLE_RATE == 1.0 | |||
| def test_flask_configs(example_env_file): | |||
| flask_app = Flask('app') | |||
| flask_app.config.from_mapping(DifyConfigs(_env_file=example_env_file).model_dump()) | |||
| config = flask_app.config | |||
| # configs read from dotenv directly | |||
| assert config['LOG_LEVEL'] == 'INFO' | |||
| # configs read from pydantic-settings | |||
| assert config['COMMIT_SHA'] == '' | |||
| assert config['EDITION'] == 'SELF_HOSTED' | |||
| assert config['API_COMPRESSION_ENABLED'] is False | |||
| assert config['SENTRY_TRACES_SAMPLE_RATE'] == 1.0 | |||
| # value from env file | |||
| assert config['CONSOLE_API_URL'] == 'https://example.com' | |||
| # fallback to alias choices value as CONSOLE_API_URL | |||
| assert config['FILES_URL'] == 'https://example.com' | |||