| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570 |
- # Your App secret key will be used for securely signing the session cookie
- # Make sure you are changing this key for your deployment with a strong key.
- # You can generate a strong key using `openssl rand -base64 42`.
- # Alternatively you can set it with `SECRET_KEY` environment variable.
- SECRET_KEY=
-
- # Ensure UTF-8 encoding
- LANG=en_US.UTF-8
- LC_ALL=en_US.UTF-8
- PYTHONIOENCODING=utf-8
-
- # Console API base URL
- CONSOLE_API_URL=http://localhost:5001
- CONSOLE_WEB_URL=http://localhost:3000
-
- # Service API base URL
- SERVICE_API_URL=http://localhost:5001
-
- # Web APP base URL
- APP_WEB_URL=http://localhost:3000
-
- # Files URL
- FILES_URL=http://localhost:5001
-
- # INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
- # Set this to the internal Docker service URL for proper plugin file access.
- # Example: INTERNAL_FILES_URL=http://api:5001
- INTERNAL_FILES_URL=http://127.0.0.1:5001
-
- # The time in seconds after the signature is rejected
- FILES_ACCESS_TIMEOUT=300
-
- # Access token expiration time in minutes
- ACCESS_TOKEN_EXPIRE_MINUTES=60
-
- # Refresh token expiration time in days
- REFRESH_TOKEN_EXPIRE_DAYS=30
-
- # redis configuration
- REDIS_HOST=localhost
- REDIS_PORT=6379
- REDIS_USERNAME=
- REDIS_PASSWORD=difyai123456
- REDIS_USE_SSL=false
- # SSL configuration for Redis (when REDIS_USE_SSL=true)
- REDIS_SSL_CERT_REQS=CERT_NONE
- # Options: CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
- REDIS_SSL_CA_CERTS=
- # Path to CA certificate file for SSL verification
- REDIS_SSL_CERTFILE=
- # Path to client certificate file for SSL authentication
- REDIS_SSL_KEYFILE=
- # Path to client private key file for SSL authentication
- REDIS_DB=0
-
- # redis Sentinel configuration.
- REDIS_USE_SENTINEL=false
- REDIS_SENTINELS=
- REDIS_SENTINEL_SERVICE_NAME=
- REDIS_SENTINEL_USERNAME=
- REDIS_SENTINEL_PASSWORD=
- REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
-
- # redis Cluster configuration.
- REDIS_USE_CLUSTERS=false
- REDIS_CLUSTERS=
- REDIS_CLUSTERS_PASSWORD=
-
- # celery configuration
- CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
- CELERY_BACKEND=redis
- # PostgreSQL database configuration
- DB_USERNAME=postgres
- DB_PASSWORD=difyai123456
- DB_HOST=localhost
- DB_PORT=5432
- DB_DATABASE=dify
-
- # Storage configuration
- # use for store upload files, private keys...
- # storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
- STORAGE_TYPE=opendal
-
- # Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal
- OPENDAL_SCHEME=fs
- OPENDAL_FS_ROOT=storage
-
- # S3 Storage configuration
- S3_USE_AWS_MANAGED_IAM=false
- S3_ENDPOINT=https://your-bucket-name.storage.s3.cloudflare.com
- S3_BUCKET_NAME=your-bucket-name
- S3_ACCESS_KEY=your-access-key
- S3_SECRET_KEY=your-secret-key
- S3_REGION=your-region
-
- # Azure Blob Storage configuration
- AZURE_BLOB_ACCOUNT_NAME=your-account-name
- AZURE_BLOB_ACCOUNT_KEY=your-account-key
- AZURE_BLOB_CONTAINER_NAME=your-container-name
- AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
-
- # Aliyun oss Storage configuration
- ALIYUN_OSS_BUCKET_NAME=your-bucket-name
- ALIYUN_OSS_ACCESS_KEY=your-access-key
- ALIYUN_OSS_SECRET_KEY=your-secret-key
- ALIYUN_OSS_ENDPOINT=your-endpoint
- ALIYUN_OSS_AUTH_VERSION=v1
- ALIYUN_OSS_REGION=your-region
- # Don't start with '/'. OSS doesn't support leading slash in object names.
- ALIYUN_OSS_PATH=your-path
-
- # Google Storage configuration
- GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
- GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
-
- # Tencent COS Storage configuration
- TENCENT_COS_BUCKET_NAME=your-bucket-name
- TENCENT_COS_SECRET_KEY=your-secret-key
- TENCENT_COS_SECRET_ID=your-secret-id
- TENCENT_COS_REGION=your-region
- TENCENT_COS_SCHEME=your-scheme
-
- # Huawei OBS Storage Configuration
- HUAWEI_OBS_BUCKET_NAME=your-bucket-name
- HUAWEI_OBS_SECRET_KEY=your-secret-key
- HUAWEI_OBS_ACCESS_KEY=your-access-key
- HUAWEI_OBS_SERVER=your-server-url
-
- # Baidu OBS Storage Configuration
- BAIDU_OBS_BUCKET_NAME=your-bucket-name
- BAIDU_OBS_SECRET_KEY=your-secret-key
- BAIDU_OBS_ACCESS_KEY=your-access-key
- BAIDU_OBS_ENDPOINT=your-server-url
-
- # OCI Storage configuration
- OCI_ENDPOINT=your-endpoint
- OCI_BUCKET_NAME=your-bucket-name
- OCI_ACCESS_KEY=your-access-key
- OCI_SECRET_KEY=your-secret-key
- OCI_REGION=your-region
-
- # Volcengine tos Storage configuration
- VOLCENGINE_TOS_ENDPOINT=your-endpoint
- VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name
- VOLCENGINE_TOS_ACCESS_KEY=your-access-key
- VOLCENGINE_TOS_SECRET_KEY=your-secret-key
- VOLCENGINE_TOS_REGION=your-region
-
- # Supabase Storage Configuration
- SUPABASE_BUCKET_NAME=your-bucket-name
- SUPABASE_API_KEY=your-access-key
- SUPABASE_URL=your-server-url
-
- # CORS configuration
- WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
- CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
-
- # Vector database configuration
- # Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
- VECTOR_STORE=weaviate
- # Prefix used to create collection name in vector database
- VECTOR_INDEX_NAME_PREFIX=Vector_index
-
- # Weaviate configuration
- WEAVIATE_ENDPOINT=http://localhost:8080
- WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
- WEAVIATE_GRPC_ENABLED=false
- WEAVIATE_BATCH_SIZE=100
-
- # Qdrant configuration, use `http://localhost:6333` for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode
- QDRANT_URL=http://localhost:6333
- QDRANT_API_KEY=difyai123456
- QDRANT_CLIENT_TIMEOUT=20
- QDRANT_GRPC_ENABLED=false
- QDRANT_GRPC_PORT=6334
- QDRANT_REPLICATION_FACTOR=1
-
- #Couchbase configuration
- COUCHBASE_CONNECTION_STRING=127.0.0.1
- COUCHBASE_USER=Administrator
- COUCHBASE_PASSWORD=password
- COUCHBASE_BUCKET_NAME=Embeddings
- COUCHBASE_SCOPE_NAME=_default
-
- # Milvus configuration
- MILVUS_URI=http://127.0.0.1:19530
- MILVUS_TOKEN=
- MILVUS_USER=root
- MILVUS_PASSWORD=Milvus
- MILVUS_ANALYZER_PARAMS=
-
- # MyScale configuration
- MYSCALE_HOST=127.0.0.1
- MYSCALE_PORT=8123
- MYSCALE_USER=default
- MYSCALE_PASSWORD=
- MYSCALE_DATABASE=default
- MYSCALE_FTS_PARAMS=
-
- # Relyt configuration
- RELYT_HOST=127.0.0.1
- RELYT_PORT=5432
- RELYT_USER=postgres
- RELYT_PASSWORD=postgres
- RELYT_DATABASE=postgres
-
- # Tencent configuration
- TENCENT_VECTOR_DB_URL=http://127.0.0.1
- TENCENT_VECTOR_DB_API_KEY=dify
- TENCENT_VECTOR_DB_TIMEOUT=30
- TENCENT_VECTOR_DB_USERNAME=dify
- TENCENT_VECTOR_DB_DATABASE=dify
- TENCENT_VECTOR_DB_SHARD=1
- TENCENT_VECTOR_DB_REPLICAS=2
- TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false
-
- # ElasticSearch configuration
- ELASTICSEARCH_HOST=127.0.0.1
- ELASTICSEARCH_PORT=9200
- ELASTICSEARCH_USERNAME=elastic
- ELASTICSEARCH_PASSWORD=elastic
-
- # PGVECTO_RS configuration
- PGVECTO_RS_HOST=localhost
- PGVECTO_RS_PORT=5431
- PGVECTO_RS_USER=postgres
- PGVECTO_RS_PASSWORD=difyai123456
- PGVECTO_RS_DATABASE=postgres
-
- # PGVector configuration
- PGVECTOR_HOST=127.0.0.1
- PGVECTOR_PORT=5433
- PGVECTOR_USER=postgres
- PGVECTOR_PASSWORD=postgres
- PGVECTOR_DATABASE=postgres
- PGVECTOR_MIN_CONNECTION=1
- PGVECTOR_MAX_CONNECTION=5
-
- # TableStore Vector configuration
- TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
- TABLESTORE_INSTANCE_NAME=instance-name
- TABLESTORE_ACCESS_KEY_ID=xxx
- TABLESTORE_ACCESS_KEY_SECRET=xxx
- TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
-
- # Tidb Vector configuration
- TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
- TIDB_VECTOR_PORT=4000
- TIDB_VECTOR_USER=xxx.root
- TIDB_VECTOR_PASSWORD=xxxxxx
- TIDB_VECTOR_DATABASE=dify
-
- # Tidb on qdrant configuration
- TIDB_ON_QDRANT_URL=http://127.0.0.1
- TIDB_ON_QDRANT_API_KEY=dify
- TIDB_ON_QDRANT_CLIENT_TIMEOUT=20
- TIDB_ON_QDRANT_GRPC_ENABLED=false
- TIDB_ON_QDRANT_GRPC_PORT=6334
- TIDB_PUBLIC_KEY=dify
- TIDB_PRIVATE_KEY=dify
- TIDB_API_URL=http://127.0.0.1
- TIDB_IAM_API_URL=http://127.0.0.1
- TIDB_REGION=regions/aws-us-east-1
- TIDB_PROJECT_ID=dify
- TIDB_SPEND_LIMIT=100
-
- # Chroma configuration
- CHROMA_HOST=127.0.0.1
- CHROMA_PORT=8000
- CHROMA_TENANT=default_tenant
- CHROMA_DATABASE=default_database
- CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
- CHROMA_AUTH_CREDENTIALS=difyai123456
-
- # AnalyticDB configuration
- ANALYTICDB_KEY_ID=your-ak
- ANALYTICDB_KEY_SECRET=your-sk
- ANALYTICDB_REGION_ID=cn-hangzhou
- ANALYTICDB_INSTANCE_ID=gp-ab123456
- ANALYTICDB_ACCOUNT=testaccount
- ANALYTICDB_PASSWORD=testpassword
- ANALYTICDB_NAMESPACE=dify
- ANALYTICDB_NAMESPACE_PASSWORD=difypassword
- ANALYTICDB_HOST=gp-test.aliyuncs.com
- ANALYTICDB_PORT=5432
- ANALYTICDB_MIN_CONNECTION=1
- ANALYTICDB_MAX_CONNECTION=5
-
- # OpenSearch configuration
- OPENSEARCH_HOST=127.0.0.1
- OPENSEARCH_PORT=9200
- OPENSEARCH_USER=admin
- OPENSEARCH_PASSWORD=admin
- OPENSEARCH_SECURE=true
- OPENSEARCH_VERIFY_CERTS=true
-
- # Baidu configuration
- BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
- BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000
- BAIDU_VECTOR_DB_ACCOUNT=root
- BAIDU_VECTOR_DB_API_KEY=dify
- BAIDU_VECTOR_DB_DATABASE=dify
- BAIDU_VECTOR_DB_SHARD=1
- BAIDU_VECTOR_DB_REPLICAS=3
-
- # Upstash configuration
- UPSTASH_VECTOR_URL=your-server-url
- UPSTASH_VECTOR_TOKEN=your-access-token
-
- # ViKingDB configuration
- VIKINGDB_ACCESS_KEY=your-ak
- VIKINGDB_SECRET_KEY=your-sk
- VIKINGDB_REGION=cn-shanghai
- VIKINGDB_HOST=api-vikingdb.xxx.volces.com
- VIKINGDB_SCHEMA=http
- VIKINGDB_CONNECTION_TIMEOUT=30
- VIKINGDB_SOCKET_TIMEOUT=30
-
- # Matrixone configration
- MATRIXONE_HOST=127.0.0.1
- MATRIXONE_PORT=6001
- MATRIXONE_USER=dump
- MATRIXONE_PASSWORD=111
- MATRIXONE_DATABASE=dify
-
- # Lindorm configuration
- LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
- LINDORM_USERNAME=admin
- LINDORM_PASSWORD=admin
- USING_UGC_INDEX=False
- LINDORM_QUERY_TIMEOUT=1
-
- # OceanBase Vector configuration
- OCEANBASE_VECTOR_HOST=127.0.0.1
- OCEANBASE_VECTOR_PORT=2881
- OCEANBASE_VECTOR_USER=root@test
- OCEANBASE_VECTOR_PASSWORD=difyai123456
- OCEANBASE_VECTOR_DATABASE=test
- OCEANBASE_MEMORY_LIMIT=6G
- OCEANBASE_ENABLE_HYBRID_SEARCH=false
-
- # openGauss configuration
- OPENGAUSS_HOST=127.0.0.1
- OPENGAUSS_PORT=6600
- OPENGAUSS_USER=postgres
- OPENGAUSS_PASSWORD=Dify@123
- OPENGAUSS_DATABASE=dify
- OPENGAUSS_MIN_CONNECTION=1
- OPENGAUSS_MAX_CONNECTION=5
-
- # Upload configuration
- UPLOAD_FILE_SIZE_LIMIT=15
- UPLOAD_FILE_BATCH_LIMIT=5
- UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
- UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
- UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
-
- # Model configuration
- MULTIMODAL_SEND_FORMAT=base64
- PROMPT_GENERATION_MAX_TOKENS=512
- CODE_GENERATION_MAX_TOKENS=1024
- PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
-
- # Mail configuration, support: resend, smtp, sendgrid
- MAIL_TYPE=
- # If using SendGrid, use the 'from' field for authentication if necessary.
- MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
- # resend configuration
- RESEND_API_KEY=
- RESEND_API_URL=https://api.resend.com
- # smtp configuration
- SMTP_SERVER=smtp.gmail.com
- SMTP_PORT=465
- SMTP_USERNAME=123
- SMTP_PASSWORD=abc
- SMTP_USE_TLS=true
- SMTP_OPPORTUNISTIC_TLS=false
- # Sendgid configuration
- SENDGRID_API_KEY=
- # Sentry configuration
- SENTRY_DSN=
-
- # DEBUG
- DEBUG=false
- ENABLE_REQUEST_LOGGING=False
- SQLALCHEMY_ECHO=false
-
- # Notion import configuration, support public and internal
- NOTION_INTEGRATION_TYPE=public
- NOTION_CLIENT_SECRET=you-client-secret
- NOTION_CLIENT_ID=you-client-id
- NOTION_INTERNAL_SECRET=you-internal-secret
-
- ETL_TYPE=dify
- UNSTRUCTURED_API_URL=
- UNSTRUCTURED_API_KEY=
- SCARF_NO_ANALYTICS=true
-
- #ssrf
- SSRF_PROXY_HTTP_URL=
- SSRF_PROXY_HTTPS_URL=
- SSRF_DEFAULT_MAX_RETRIES=3
- SSRF_DEFAULT_TIME_OUT=5
- SSRF_DEFAULT_CONNECT_TIME_OUT=5
- SSRF_DEFAULT_READ_TIME_OUT=5
- SSRF_DEFAULT_WRITE_TIME_OUT=5
-
- BATCH_UPLOAD_LIMIT=10
- KEYWORD_DATA_SOURCE_TYPE=database
-
- # Workflow file upload limit
- WORKFLOW_FILE_UPLOAD_LIMIT=10
-
- # CODE EXECUTION CONFIGURATION
- CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
- CODE_EXECUTION_API_KEY=dify-sandbox
- CODE_MAX_NUMBER=9223372036854775807
- CODE_MIN_NUMBER=-9223372036854775808
- CODE_MAX_STRING_LENGTH=80000
- TEMPLATE_TRANSFORM_MAX_LENGTH=80000
- CODE_MAX_STRING_ARRAY_LENGTH=30
- CODE_MAX_OBJECT_ARRAY_LENGTH=30
- CODE_MAX_NUMBER_ARRAY_LENGTH=1000
-
- # API Tool configuration
- API_TOOL_DEFAULT_CONNECT_TIMEOUT=10
- API_TOOL_DEFAULT_READ_TIMEOUT=60
-
- # HTTP Node configuration
- HTTP_REQUEST_MAX_CONNECT_TIMEOUT=300
- HTTP_REQUEST_MAX_READ_TIMEOUT=600
- HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
- HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
- HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
- HTTP_REQUEST_NODE_SSL_VERIFY=True
-
- # Respect X-* headers to redirect clients
- RESPECT_XFORWARD_HEADERS_ENABLED=false
-
- # Log file path
- LOG_FILE=
- # Log file max size, the unit is MB
- LOG_FILE_MAX_SIZE=20
- # Log file max backup count
- LOG_FILE_BACKUP_COUNT=5
- # Log dateformat
- LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
- # Log Timezone
- LOG_TZ=UTC
- # Log format
- LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
-
- # Indexing configuration
- INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
-
- # Workflow runtime configuration
- WORKFLOW_MAX_EXECUTION_STEPS=500
- WORKFLOW_MAX_EXECUTION_TIME=1200
- WORKFLOW_CALL_MAX_DEPTH=5
- WORKFLOW_PARALLEL_DEPTH_LIMIT=3
- MAX_VARIABLE_SIZE=204800
-
- # Workflow storage configuration
- # Options: rdbms, hybrid
- # rdbms: Use only the relational database (default)
- # hybrid: Save new data to object storage, read from both object storage and RDBMS
- WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
-
- # Repository configuration
- # Core workflow execution repository implementation
- CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository
-
- # Core workflow node execution repository implementation
- CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository
-
- # API workflow node execution repository implementation
- API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
-
- # API workflow run repository implementation
- API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
- # Workflow log cleanup configuration
- # Enable automatic cleanup of workflow run logs to manage database size
- WORKFLOW_LOG_CLEANUP_ENABLED=true
- # Number of days to retain workflow run logs (default: 30 days)
- WORKFLOW_LOG_RETENTION_DAYS=30
- # Batch size for workflow log cleanup operations (default: 100)
- WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
-
- # App configuration
- APP_MAX_EXECUTION_TIME=1200
- APP_MAX_ACTIVE_REQUESTS=0
-
- # Celery beat configuration
- CELERY_BEAT_SCHEDULER_TIME=1
-
- # Celery schedule tasks configuration
- ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
- ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
- ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
- ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
- ENABLE_CLEAN_MESSAGES=false
- ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
- ENABLE_DATASETS_QUEUE_MONITOR=false
- ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
-
- # Position configuration
- POSITION_TOOL_PINS=
- POSITION_TOOL_INCLUDES=
- POSITION_TOOL_EXCLUDES=
-
- POSITION_PROVIDER_PINS=
- POSITION_PROVIDER_INCLUDES=
- POSITION_PROVIDER_EXCLUDES=
-
- # Plugin configuration
- PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
- PLUGIN_DAEMON_URL=http://127.0.0.1:5002
- PLUGIN_REMOTE_INSTALL_PORT=5003
- PLUGIN_REMOTE_INSTALL_HOST=localhost
- PLUGIN_MAX_PACKAGE_SIZE=15728640
- INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
-
- # Marketplace configuration
- MARKETPLACE_ENABLED=true
- MARKETPLACE_API_URL=https://marketplace.dify.ai
-
- # Endpoint configuration
- ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
-
- # Reset password token expiry minutes
- RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
- CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
- OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
-
- CREATE_TIDB_SERVICE_JOB_ENABLED=false
-
- # Maximum number of submitted thread count in a ThreadPool for parallel node execution
- MAX_SUBMIT_COUNT=100
- # Lockout duration in seconds
- LOGIN_LOCKOUT_DURATION=86400
-
- # Enable OpenTelemetry
- ENABLE_OTEL=false
- OTLP_TRACE_ENDPOINT=
- OTLP_METRIC_ENDPOINT=
- OTLP_BASE_ENDPOINT=http://localhost:4318
- OTLP_API_KEY=
- OTEL_EXPORTER_OTLP_PROTOCOL=
- OTEL_EXPORTER_TYPE=otlp
- OTEL_SAMPLING_RATE=0.1
- OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
- OTEL_MAX_QUEUE_SIZE=2048
- OTEL_MAX_EXPORT_BATCH_SIZE=512
- OTEL_METRIC_EXPORT_INTERVAL=60000
- OTEL_BATCH_EXPORT_TIMEOUT=10000
- OTEL_METRIC_EXPORT_TIMEOUT=30000
-
- # Prevent Clickjacking
- ALLOW_EMBED=false
-
- # Dataset queue monitor configuration
- QUEUE_MONITOR_THRESHOLD=200
- # You can configure multiple ones, separated by commas. eg: test1@dify.ai,test2@dify.ai
- QUEUE_MONITOR_ALERT_EMAILS=
- # Monitor interval in minutes, default is 30 minutes
- QUEUE_MONITOR_INTERVAL=30
-
- # Swagger UI configuration
- SWAGGER_UI_ENABLED=true
- SWAGGER_UI_PATH=/swagger-ui.html
|