Browse Source

chore: bump minimum supported Python version to 3.11 (#10386)

tags/0.12.0
Bowen Liang 11 months ago
parent
commit
6c8e208ef3
No account linked to committer's email address
81 changed files with 271 additions and 300 deletions
  1. 1
    1
      .github/actions/setup-poetry/action.yml
  2. 0
    1
      .github/workflows/api-tests.yml
  3. 0
    1
      .github/workflows/vdb-tests.yml
  4. 5
    3
      api/app.py
  5. 2
    2
      api/controllers/console/app/conversation.py
  6. 3
    3
      api/controllers/console/app/site.py
  7. 1
    1
      api/controllers/console/auth/activate.py
  8. 2
    2
      api/controllers/console/auth/oauth.py
  9. 2
    2
      api/controllers/console/datasets/data_source.py
  10. 9
    9
      api/controllers/console/datasets/datasets_document.py
  11. 2
    2
      api/controllers/console/datasets/datasets_segments.py
  12. 3
    3
      api/controllers/console/explore/completion.py
  13. 2
    2
      api/controllers/console/explore/installed_app.py
  14. 2
    2
      api/controllers/console/workspace/account.py
  15. 2
    2
      api/controllers/service_api/wraps.py
  16. 2
    2
      api/core/agent/base_agent_runner.py
  17. 2
    2
      api/core/app/app_config/entities.py
  18. 2
    2
      api/core/app/apps/message_based_app_generator.py
  19. 2
    2
      api/core/app/entities/queue_entities.py
  20. 8
    8
      api/core/app/task_pipeline/workflow_cycle_manage.py
  21. 6
    6
      api/core/entities/provider_configuration.py
  22. 6
    6
      api/core/file/enums.py
  23. 2
    2
      api/core/helper/code_executor/code_executor.py
  24. 14
    14
      api/core/indexing_runner.py
  25. 3
    3
      api/core/model_runtime/entities/message_entities.py
  26. 2
    2
      api/core/model_runtime/entities/model_entities.py
  27. 2
    2
      api/core/ops/entities/trace_entity.py
  28. 3
    3
      api/core/ops/langfuse_trace/entities/langfuse_trace_entity.py
  29. 2
    2
      api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py
  30. 1
    1
      api/core/prompt/simple_prompt_transform.py
  31. 2
    2
      api/core/rag/datasource/keyword/keyword_type.py
  32. 2
    2
      api/core/rag/datasource/vdb/vector_type.py
  33. 2
    2
      api/core/rag/extractor/word_extractor.py
  34. 2
    2
      api/core/rag/rerank/rerank_type.py
  35. 2
    2
      api/core/tools/entities/tool_entities.py
  36. 2
    2
      api/core/tools/provider/builtin/time/tools/current_time.py
  37. 2
    2
      api/core/tools/tool/tool.py
  38. 3
    3
      api/core/tools/tool_engine.py
  39. 2
    2
      api/core/variables/types.py
  40. 2
    2
      api/core/workflow/entities/node_entities.py
  41. 2
    2
      api/core/workflow/enums.py
  42. 3
    3
      api/core/workflow/graph_engine/entities/runtime_route_state.py
  43. 2
    2
      api/core/workflow/nodes/enums.py
  44. 2
    2
      api/core/workflow/nodes/iteration/entities.py
  45. 6
    6
      api/core/workflow/nodes/iteration/iteration_node.py
  46. 2
    2
      api/core/workflow/nodes/variable_assigner/node_data.py
  47. 1
    1
      api/events/event_handlers/create_document_index.py
  48. 2
    2
      api/events/event_handlers/update_provider_last_used_at_when_message_created.py
  49. 2
    2
      api/extensions/storage/azure_blob_storage.py
  50. 2
    2
      api/extensions/storage/storage_type.py
  51. 3
    3
      api/libs/oauth_data_source.py
  52. 3
    3
      api/models/account.py
  53. 1
    1
      api/models/dataset.py
  54. 4
    4
      api/models/enums.py
  55. 2
    2
      api/models/model.py
  56. 4
    4
      api/models/task.py
  57. 2
    2
      api/models/workflow.py
  58. 46
    75
      api/poetry.lock
  59. 2
    2
      api/pyproject.toml
  60. 9
    9
      api/services/account_service.py
  61. 1
    1
      api/services/annotation_service.py
  62. 3
    3
      api/services/app_dsl_service.py
  63. 6
    6
      api/services/app_service.py
  64. 2
    2
      api/services/auth/auth_type.py
  65. 2
    2
      api/services/conversation_service.py
  66. 13
    13
      api/services/dataset_service.py
  67. 2
    2
      api/services/external_knowledge_service.py
  68. 2
    2
      api/services/feature_service.py
  69. 3
    3
      api/services/file_service.py
  70. 1
    1
      api/services/model_load_balancing_service.py
  71. 2
    2
      api/services/recommend_app/recommend_app_type.py
  72. 5
    5
      api/services/workflow_service.py
  73. 1
    1
      api/tasks/add_document_to_index_task.py
  74. 1
    1
      api/tasks/annotation/enable_annotation_reply_task.py
  75. 2
    2
      api/tasks/batch_create_segment_to_index_task.py
  76. 3
    3
      api/tasks/create_segment_to_index_task.py
  77. 1
    1
      api/tasks/document_indexing_sync_task.py
  78. 2
    2
      api/tasks/document_indexing_task.py
  79. 1
    1
      api/tasks/document_indexing_update_task.py
  80. 1
    1
      api/tasks/enable_segment_to_index_task.py
  81. 3
    3
      api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py

+ 1
- 1
.github/actions/setup-poetry/action.yml View File

python-version: python-version:
description: Python version to use and the Poetry installed with description: Python version to use and the Poetry installed with
required: true required: true
default: '3.10'
default: '3.11'
poetry-version: poetry-version:
description: Poetry version to set up description: Poetry version to set up
required: true required: true

+ 0
- 1
.github/workflows/api-tests.yml View File

strategy: strategy:
matrix: matrix:
python-version: python-version:
- "3.10"
- "3.11" - "3.11"
- "3.12" - "3.12"



+ 0
- 1
.github/workflows/vdb-tests.yml View File

strategy: strategy:
matrix: matrix:
python-version: python-version:
- "3.10"
- "3.11" - "3.11"
- "3.12" - "3.12"



+ 5
- 3
api/app.py View File

import os import os
import sys import sys


python_version = sys.version_info
if not ((3, 11) <= python_version < (3, 13)):
print(f"Python 3.11 or 3.12 is required, current version is {python_version.major}.{python_version.minor}")
raise SystemExit(1)

from configs import dify_config from configs import dify_config


if not dify_config.DEBUG: if not dify_config.DEBUG:


# DO NOT REMOVE ABOVE # DO NOT REMOVE ABOVE


if sys.version_info[:2] == (3, 10):
print("Warning: Python 3.10 will not be supported in the next version.")



warnings.simplefilter("ignore", ResourceWarning) warnings.simplefilter("ignore", ResourceWarning)



+ 2
- 2
api/controllers/console/app/conversation.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime


import pytz import pytz
from flask_login import current_user from flask_login import current_user
raise NotFound("Conversation Not Exists.") raise NotFound("Conversation Not Exists.")


if not conversation.read_at: if not conversation.read_at:
conversation.read_at = datetime.now(timezone.utc).replace(tzinfo=None)
conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
conversation.read_account_id = current_user.id conversation.read_account_id = current_user.id
db.session.commit() db.session.commit()



+ 3
- 3
api/controllers/console/app/site.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime


from flask_login import current_user from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse from flask_restful import Resource, marshal_with, reqparse
setattr(site, attr_name, value) setattr(site, attr_name, value)


site.updated_by = current_user.id site.updated_by = current_user.id
site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return site return site


site.code = Site.generate_code(16) site.code = Site.generate_code(16)
site.updated_by = current_user.id site.updated_by = current_user.id
site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return site return site

+ 1
- 1
api/controllers/console/auth/activate.py View File

account.timezone = args["timezone"] account.timezone = args["timezone"]
account.interface_theme = "light" account.interface_theme = "light"
account.status = AccountStatus.ACTIVE.value account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


token_pair = AccountService.login(account, ip_address=extract_remote_ip(request)) token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))

+ 2
- 2
api/controllers/console/auth/oauth.py View File

import logging import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Optional from typing import Optional


import requests import requests


if account.status == AccountStatus.PENDING.value: if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


try: try:

+ 2
- 2
api/controllers/console/datasets/data_source.py View File

if action == "enable": if action == "enable":
if data_source_binding.disabled: if data_source_binding.disabled:
data_source_binding.disabled = False data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(data_source_binding) db.session.add(data_source_binding)
db.session.commit() db.session.commit()
else: else:
if action == "disable": if action == "disable":
if not data_source_binding.disabled: if not data_source_binding.disabled:
data_source_binding.disabled = True data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(data_source_binding) db.session.add(data_source_binding)
db.session.commit() db.session.commit()
else: else:

+ 9
- 9
api/controllers/console/datasets/datasets_document.py View File

import logging import logging
from argparse import ArgumentTypeError from argparse import ArgumentTypeError
from datetime import datetime, timezone
from datetime import UTC, datetime


from flask import request from flask import request
from flask_login import current_user from flask_login import current_user
raise InvalidActionError("Document not in indexing state.") raise InvalidActionError("Document not in indexing state.")


document.paused_by = current_user.id document.paused_by = current_user.id
document.paused_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.paused_at = datetime.now(UTC).replace(tzinfo=None)
document.is_paused = True document.is_paused = True
db.session.commit() db.session.commit()


document.doc_metadata[key] = value document.doc_metadata[key] = value


document.doc_type = doc_type document.doc_type = doc_type
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return {"result": "success", "message": "Document metadata updated."}, 200 return {"result": "success", "message": "Document metadata updated."}, 200
document.enabled = True document.enabled = True
document.disabled_at = None document.disabled_at = None
document.disabled_by = None document.disabled_by = None
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


# Set cache to prevent indexing the same document multiple times # Set cache to prevent indexing the same document multiple times
raise InvalidActionError("Document already disabled.") raise InvalidActionError("Document already disabled.")


document.enabled = False document.enabled = False
document.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.disabled_at = datetime.now(UTC).replace(tzinfo=None)
document.disabled_by = current_user.id document.disabled_by = current_user.id
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


# Set cache to prevent indexing the same document multiple times # Set cache to prevent indexing the same document multiple times
raise InvalidActionError("Document already archived.") raise InvalidActionError("Document already archived.")


document.archived = True document.archived = True
document.archived_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.archived_at = datetime.now(UTC).replace(tzinfo=None)
document.archived_by = current_user.id document.archived_by = current_user.id
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


if document.enabled: if document.enabled:
document.archived = False document.archived = False
document.archived_at = None document.archived_at = None
document.archived_by = None document.archived_by = None
document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


# Set cache to prevent indexing the same document multiple times # Set cache to prevent indexing the same document multiple times

+ 2
- 2
api/controllers/console/datasets/datasets_segments.py View File

import uuid import uuid
from datetime import datetime, timezone
from datetime import UTC, datetime


import pandas as pd import pandas as pd
from flask import request from flask import request
raise InvalidActionError("Segment is already disabled.") raise InvalidActionError("Segment is already disabled.")


segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.now(UTC).replace(tzinfo=None)
segment.disabled_by = current_user.id segment.disabled_by = current_user.id
db.session.commit() db.session.commit()



+ 3
- 3
api/controllers/console/explore/completion.py View File

import logging import logging
from datetime import datetime, timezone
from datetime import UTC, datetime


from flask_login import current_user from flask_login import current_user
from flask_restful import reqparse from flask_restful import reqparse
streaming = args["response_mode"] == "streaming" streaming = args["response_mode"] == "streaming"
args["auto_generate_name"] = False args["auto_generate_name"] = False


installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


try: try:


args["auto_generate_name"] = False args["auto_generate_name"] = False


installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


try: try:

+ 2
- 2
api/controllers/console/explore/installed_app.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime


from flask_login import current_user from flask_login import current_user
from flask_restful import Resource, inputs, marshal_with, reqparse from flask_restful import Resource, inputs, marshal_with, reqparse
tenant_id=current_tenant_id, tenant_id=current_tenant_id,
app_owner_tenant_id=app.tenant_id, app_owner_tenant_id=app.tenant_id,
is_pinned=False, is_pinned=False,
last_used_at=datetime.now(timezone.utc).replace(tzinfo=None),
last_used_at=datetime.now(UTC).replace(tzinfo=None),
) )
db.session.add(new_installed_app) db.session.add(new_installed_app)
db.session.commit() db.session.commit()

+ 2
- 2
api/controllers/console/workspace/account.py View File

raise InvalidInvitationCodeError() raise InvalidInvitationCodeError()


invitation_code.status = "used" invitation_code.status = "used"
invitation_code.used_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
invitation_code.used_by_tenant_id = account.current_tenant_id invitation_code.used_by_tenant_id = account.current_tenant_id
invitation_code.used_by_account_id = account.id invitation_code.used_by_account_id = account.id


account.timezone = args["timezone"] account.timezone = args["timezone"]
account.interface_theme = "light" account.interface_theme = "light"
account.status = "active" account.status = "active"
account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return {"result": "success"} return {"result": "success"}

+ 2
- 2
api/controllers/service_api/wraps.py View File

from collections.abc import Callable from collections.abc import Callable
from datetime import datetime, timezone
from datetime import UTC, datetime
from enum import Enum from enum import Enum
from functools import wraps from functools import wraps
from typing import Optional from typing import Optional
if not api_token: if not api_token:
raise Unauthorized("Access token is invalid") raise Unauthorized("Access token is invalid")


api_token.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
api_token.last_used_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return api_token return api_token

+ 2
- 2
api/core/agent/base_agent_runner.py View File

import logging import logging
import uuid import uuid
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Optional, Union, cast from typing import Optional, Union, cast


from core.agent.entities import AgentEntity, AgentToolEntity from core.agent.entities import AgentEntity, AgentToolEntity
.first() .first()
) )


db_variables.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db_variables.updated_at = datetime.now(UTC).replace(tzinfo=None)
db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool)) db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool))
db.session.commit() db.session.commit()
db.session.close() db.session.close()

+ 2
- 2
api/core/app/app_config/entities.py View File

from collections.abc import Sequence from collections.abc import Sequence
from enum import Enum
from enum import Enum, StrEnum
from typing import Any, Optional from typing import Any, Optional


from pydantic import BaseModel, Field, field_validator from pydantic import BaseModel, Field, field_validator
advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None




class VariableEntityType(str, Enum):
class VariableEntityType(StrEnum):
TEXT_INPUT = "text-input" TEXT_INPUT = "text-input"
SELECT = "select" SELECT = "select"
PARAGRAPH = "paragraph" PARAGRAPH = "paragraph"

+ 2
- 2
api/core/app/apps/message_based_app_generator.py View File

import json import json
import logging import logging
from collections.abc import Generator from collections.abc import Generator
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Optional, Union from typing import Optional, Union


from sqlalchemy import and_ from sqlalchemy import and_
db.session.commit() db.session.commit()
db.session.refresh(conversation) db.session.refresh(conversation)
else: else:
conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


message = Message( message = Message(

+ 2
- 2
api/core/app/entities/queue_entities.py View File

from datetime import datetime from datetime import datetime
from enum import Enum
from enum import Enum, StrEnum
from typing import Any, Optional from typing import Any, Optional


from pydantic import BaseModel, field_validator from pydantic import BaseModel, field_validator
from core.workflow.nodes.base import BaseNodeData from core.workflow.nodes.base import BaseNodeData




class QueueEvent(str, Enum):
class QueueEvent(StrEnum):
""" """
QueueEvent enum QueueEvent enum
""" """

+ 8
- 8
api/core/app/task_pipeline/workflow_cycle_manage.py View File

import json import json
import time import time
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any, Optional, Union, cast from typing import Any, Optional, Union, cast


from sqlalchemy.orm import Session from sqlalchemy.orm import Session
workflow_run.elapsed_time = time.perf_counter() - start_at workflow_run.elapsed_time = time.perf_counter() - start_at
workflow_run.total_tokens = total_tokens workflow_run.total_tokens = total_tokens
workflow_run.total_steps = total_steps workflow_run.total_steps = total_steps
workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)


db.session.commit() db.session.commit()
db.session.refresh(workflow_run) db.session.refresh(workflow_run)
workflow_run.elapsed_time = time.perf_counter() - start_at workflow_run.elapsed_time = time.perf_counter() - start_at
workflow_run.total_tokens = total_tokens workflow_run.total_tokens = total_tokens
workflow_run.total_steps = total_steps workflow_run.total_steps = total_steps
workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)


db.session.commit() db.session.commit()


for workflow_node_execution in running_workflow_node_executions: for workflow_node_execution in running_workflow_node_executions:
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
workflow_node_execution.error = error workflow_node_execution.error = error
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
workflow_node_execution.elapsed_time = ( workflow_node_execution.elapsed_time = (
workflow_node_execution.finished_at - workflow_node_execution.created_at workflow_node_execution.finished_at - workflow_node_execution.created_at
).total_seconds() ).total_seconds()
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id, NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
} }
) )
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)


session.add(workflow_node_execution) session.add(workflow_node_execution)
session.commit() session.commit()
execution_metadata = ( execution_metadata = (
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
) )
finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
finished_at = datetime.now(UTC).replace(tzinfo=None)
elapsed_time = (finished_at - event.start_at).total_seconds() elapsed_time = (finished_at - event.start_at).total_seconds()


db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update( db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
inputs = WorkflowEntry.handle_special_values(event.inputs) inputs = WorkflowEntry.handle_special_values(event.inputs)
process_data = WorkflowEntry.handle_special_values(event.process_data) process_data = WorkflowEntry.handle_special_values(event.process_data)
outputs = WorkflowEntry.handle_special_values(event.outputs) outputs = WorkflowEntry.handle_special_values(event.outputs)
finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
finished_at = datetime.now(UTC).replace(tzinfo=None)
elapsed_time = (finished_at - event.start_at).total_seconds() elapsed_time = (finished_at - event.start_at).total_seconds()
execution_metadata = ( execution_metadata = (
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
if event.error is None if event.error is None
else WorkflowNodeExecutionStatus.FAILED, else WorkflowNodeExecutionStatus.FAILED,
error=None, error=None,
elapsed_time=(datetime.now(timezone.utc).replace(tzinfo=None) - event.start_at).total_seconds(),
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
execution_metadata=event.metadata, execution_metadata=event.metadata,
finished_at=int(time.time()), finished_at=int(time.time()),

+ 6
- 6
api/core/entities/provider_configuration.py View File

if provider_record: if provider_record:
provider_record.encrypted_config = json.dumps(credentials) provider_record.encrypted_config = json.dumps(credentials)
provider_record.is_valid = True provider_record.is_valid = True
provider_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
provider_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
provider_record = Provider( provider_record = Provider(
if provider_model_record: if provider_model_record:
provider_model_record.encrypted_config = json.dumps(credentials) provider_model_record.encrypted_config = json.dumps(credentials)
provider_model_record.is_valid = True provider_model_record.is_valid = True
provider_model_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
provider_model_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
provider_model_record = ProviderModel( provider_model_record = ProviderModel(


if model_setting: if model_setting:
model_setting.enabled = True model_setting.enabled = True
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
model_setting = ProviderModelSetting( model_setting = ProviderModelSetting(


if model_setting: if model_setting:
model_setting.enabled = False model_setting.enabled = False
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
model_setting = ProviderModelSetting( model_setting = ProviderModelSetting(


if model_setting: if model_setting:
model_setting.load_balancing_enabled = True model_setting.load_balancing_enabled = True
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
model_setting = ProviderModelSetting( model_setting = ProviderModelSetting(


if model_setting: if model_setting:
model_setting.load_balancing_enabled = False model_setting.load_balancing_enabled = False
model_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
model_setting = ProviderModelSetting( model_setting = ProviderModelSetting(

+ 6
- 6
api/core/file/enums.py View File

from enum import Enum
from enum import StrEnum




class FileType(str, Enum):
class FileType(StrEnum):
IMAGE = "image" IMAGE = "image"
DOCUMENT = "document" DOCUMENT = "document"
AUDIO = "audio" AUDIO = "audio"
raise ValueError(f"No matching enum found for value '{value}'") raise ValueError(f"No matching enum found for value '{value}'")




class FileTransferMethod(str, Enum):
class FileTransferMethod(StrEnum):
REMOTE_URL = "remote_url" REMOTE_URL = "remote_url"
LOCAL_FILE = "local_file" LOCAL_FILE = "local_file"
TOOL_FILE = "tool_file" TOOL_FILE = "tool_file"
raise ValueError(f"No matching enum found for value '{value}'") raise ValueError(f"No matching enum found for value '{value}'")




class FileBelongsTo(str, Enum):
class FileBelongsTo(StrEnum):
USER = "user" USER = "user"
ASSISTANT = "assistant" ASSISTANT = "assistant"


raise ValueError(f"No matching enum found for value '{value}'") raise ValueError(f"No matching enum found for value '{value}'")




class FileAttribute(str, Enum):
class FileAttribute(StrEnum):
TYPE = "type" TYPE = "type"
SIZE = "size" SIZE = "size"
NAME = "name" NAME = "name"
EXTENSION = "extension" EXTENSION = "extension"




class ArrayFileAttribute(str, Enum):
class ArrayFileAttribute(StrEnum):
LENGTH = "length" LENGTH = "length"

+ 2
- 2
api/core/helper/code_executor/code_executor.py View File

import logging import logging
from collections.abc import Mapping from collections.abc import Mapping
from enum import Enum
from enum import StrEnum
from threading import Lock from threading import Lock
from typing import Any, Optional from typing import Any, Optional


data: Data data: Data




class CodeLanguage(str, Enum):
class CodeLanguage(StrEnum):
PYTHON3 = "python3" PYTHON3 = "python3"
JINJA2 = "jinja2" JINJA2 = "jinja2"
JAVASCRIPT = "javascript" JAVASCRIPT = "javascript"

+ 14
- 14
api/core/indexing_runner.py View File

except ProviderTokenNotInitError as e: except ProviderTokenNotInitError as e:
dataset_document.indexing_status = "error" dataset_document.indexing_status = "error"
dataset_document.error = str(e.description) dataset_document.error = str(e.description)
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
except ObjectDeletedError: except ObjectDeletedError:
logging.warning("Document deleted, document id: {}".format(dataset_document.id)) logging.warning("Document deleted, document id: {}".format(dataset_document.id))
logging.exception("consume document failed") logging.exception("consume document failed")
dataset_document.indexing_status = "error" dataset_document.indexing_status = "error"
dataset_document.error = str(e) dataset_document.error = str(e)
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


def run_in_splitting_status(self, dataset_document: DatasetDocument): def run_in_splitting_status(self, dataset_document: DatasetDocument):
except ProviderTokenNotInitError as e: except ProviderTokenNotInitError as e:
dataset_document.indexing_status = "error" dataset_document.indexing_status = "error"
dataset_document.error = str(e.description) dataset_document.error = str(e.description)
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
except Exception as e: except Exception as e:
logging.exception("consume document failed") logging.exception("consume document failed")
dataset_document.indexing_status = "error" dataset_document.indexing_status = "error"
dataset_document.error = str(e) dataset_document.error = str(e)
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


def run_in_indexing_status(self, dataset_document: DatasetDocument): def run_in_indexing_status(self, dataset_document: DatasetDocument):
except ProviderTokenNotInitError as e: except ProviderTokenNotInitError as e:
dataset_document.indexing_status = "error" dataset_document.indexing_status = "error"
dataset_document.error = str(e.description) dataset_document.error = str(e.description)
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
except Exception as e: except Exception as e:
logging.exception("consume document failed") logging.exception("consume document failed")
dataset_document.indexing_status = "error" dataset_document.indexing_status = "error"
dataset_document.error = str(e) dataset_document.error = str(e)
dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


def indexing_estimate( def indexing_estimate(
after_indexing_status="splitting", after_indexing_status="splitting",
extra_update_params={ extra_update_params={
DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs), DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs),
DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
}, },
) )


doc_store.add_documents(documents) doc_store.add_documents(documents)


# update document status to indexing # update document status to indexing
cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
self._update_document_index_status( self._update_document_index_status(
document_id=dataset_document.id, document_id=dataset_document.id,
after_indexing_status="indexing", after_indexing_status="indexing",
dataset_document_id=dataset_document.id, dataset_document_id=dataset_document.id,
update_params={ update_params={
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
}, },
) )


after_indexing_status="completed", after_indexing_status="completed",
extra_update_params={ extra_update_params={
DatasetDocument.tokens: tokens, DatasetDocument.tokens: tokens,
DatasetDocument.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DatasetDocument.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at, DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at,
DatasetDocument.error: None, DatasetDocument.error: None,
}, },
{ {
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.enabled: True, DocumentSegment.enabled: True,
DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
} }
) )


{ {
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.enabled: True, DocumentSegment.enabled: True,
DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
} }
) )


doc_store.add_documents(documents) doc_store.add_documents(documents)


# update document status to indexing # update document status to indexing
cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
self._update_document_index_status( self._update_document_index_status(
document_id=dataset_document.id, document_id=dataset_document.id,
after_indexing_status="indexing", after_indexing_status="indexing",
dataset_document_id=dataset_document.id, dataset_document_id=dataset_document.id,
update_params={ update_params={
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
}, },
) )
pass pass

+ 3
- 3
api/core/model_runtime/entities/message_entities.py View File

from abc import ABC from abc import ABC
from collections.abc import Sequence from collections.abc import Sequence
from enum import Enum
from enum import Enum, StrEnum
from typing import Literal, Optional from typing import Literal, Optional


from pydantic import BaseModel, Field, field_validator from pydantic import BaseModel, Field, field_validator
function: PromptMessageTool function: PromptMessageTool




class PromptMessageContentType(str, Enum):
class PromptMessageContentType(StrEnum):
""" """
Enum class for prompt message content type. Enum class for prompt message content type.
""" """
Model class for image prompt message content. Model class for image prompt message content.
""" """


class DETAIL(str, Enum):
class DETAIL(StrEnum):
LOW = "low" LOW = "low"
HIGH = "high" HIGH = "high"



+ 2
- 2
api/core/model_runtime/entities/model_entities.py View File

from decimal import Decimal from decimal import Decimal
from enum import Enum
from enum import Enum, StrEnum
from typing import Any, Optional from typing import Any, Optional


from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict
AUDIO = "audio" AUDIO = "audio"




class DefaultParameterName(str, Enum):
class DefaultParameterName(StrEnum):
""" """
Enum class for parameter template variable. Enum class for parameter template variable.
""" """

+ 2
- 2
api/core/ops/entities/trace_entity.py View File

from datetime import datetime from datetime import datetime
from enum import Enum
from enum import StrEnum
from typing import Any, Optional, Union from typing import Any, Optional, Union


from pydantic import BaseModel, ConfigDict, field_validator from pydantic import BaseModel, ConfigDict, field_validator
} }




class TraceTaskName(str, Enum):
class TraceTaskName(StrEnum):
CONVERSATION_TRACE = "conversation" CONVERSATION_TRACE = "conversation"
WORKFLOW_TRACE = "workflow" WORKFLOW_TRACE = "workflow"
MESSAGE_TRACE = "message" MESSAGE_TRACE = "message"

+ 3
- 3
api/core/ops/langfuse_trace/entities/langfuse_trace_entity.py View File

from datetime import datetime from datetime import datetime
from enum import Enum
from enum import StrEnum
from typing import Any, Optional, Union from typing import Any, Optional, Union


from pydantic import BaseModel, ConfigDict, Field, field_validator from pydantic import BaseModel, ConfigDict, Field, field_validator
return v return v




class LevelEnum(str, Enum):
class LevelEnum(StrEnum):
DEBUG = "DEBUG" DEBUG = "DEBUG"
WARNING = "WARNING" WARNING = "WARNING"
ERROR = "ERROR" ERROR = "ERROR"
return validate_input_output(v, field_name) return validate_input_output(v, field_name)




class UnitEnum(str, Enum):
class UnitEnum(StrEnum):
CHARACTERS = "CHARACTERS" CHARACTERS = "CHARACTERS"
TOKENS = "TOKENS" TOKENS = "TOKENS"
SECONDS = "SECONDS" SECONDS = "SECONDS"

+ 2
- 2
api/core/ops/langsmith_trace/entities/langsmith_trace_entity.py View File

from datetime import datetime from datetime import datetime
from enum import Enum
from enum import StrEnum
from typing import Any, Optional, Union from typing import Any, Optional, Union


from pydantic import BaseModel, Field, field_validator from pydantic import BaseModel, Field, field_validator
from core.ops.utils import replace_text_with_content from core.ops.utils import replace_text_with_content




class LangSmithRunType(str, Enum):
class LangSmithRunType(StrEnum):
tool = "tool" tool = "tool"
chain = "chain" chain = "chain"
llm = "llm" llm = "llm"

+ 1
- 1
api/core/prompt/simple_prompt_transform.py View File

from core.file.models import File from core.file.models import File




class ModelMode(str, enum.Enum):
class ModelMode(enum.StrEnum):
COMPLETION = "completion" COMPLETION = "completion"
CHAT = "chat" CHAT = "chat"



+ 2
- 2
api/core/rag/datasource/keyword/keyword_type.py View File

from enum import Enum
from enum import StrEnum




class KeyWordType(str, Enum):
class KeyWordType(StrEnum):
JIEBA = "jieba" JIEBA = "jieba"

+ 2
- 2
api/core/rag/datasource/vdb/vector_type.py View File

from enum import Enum
from enum import StrEnum




class VectorType(str, Enum):
class VectorType(StrEnum):
ANALYTICDB = "analyticdb" ANALYTICDB = "analyticdb"
CHROMA = "chroma" CHROMA = "chroma"
MILVUS = "milvus" MILVUS = "milvus"

+ 2
- 2
api/core/rag/extractor/word_extractor.py View File

mime_type=mime_type or "", mime_type=mime_type or "",
created_by=self.user_id, created_by=self.user_id,
created_by_role=CreatedByRole.ACCOUNT, created_by_role=CreatedByRole.ACCOUNT,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
used=True, used=True,
used_by=self.user_id, used_by=self.user_id,
used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
) )


db.session.add(upload_file) db.session.add(upload_file)

+ 2
- 2
api/core/rag/rerank/rerank_type.py View File

from enum import Enum
from enum import StrEnum




class RerankMode(str, Enum):
class RerankMode(StrEnum):
RERANKING_MODEL = "reranking_model" RERANKING_MODEL = "reranking_model"
WEIGHTED_SCORE = "weighted_score" WEIGHTED_SCORE = "weighted_score"

+ 2
- 2
api/core/tools/entities/tool_entities.py View File

from enum import Enum
from enum import Enum, StrEnum
from typing import Any, Optional, Union, cast from typing import Any, Optional, Union, cast


from pydantic import BaseModel, Field, field_validator from pydantic import BaseModel, Field, field_validator




class ToolParameter(BaseModel): class ToolParameter(BaseModel):
class ToolParameterType(str, Enum):
class ToolParameterType(StrEnum):
STRING = "string" STRING = "string"
NUMBER = "number" NUMBER = "number"
BOOLEAN = "boolean" BOOLEAN = "boolean"

+ 2
- 2
api/core/tools/provider/builtin/time/tools/current_time.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any, Union from typing import Any, Union


from pytz import timezone as pytz_timezone from pytz import timezone as pytz_timezone
tz = tool_parameters.get("timezone", "UTC") tz = tool_parameters.get("timezone", "UTC")
fm = tool_parameters.get("format") or "%Y-%m-%d %H:%M:%S %Z" fm = tool_parameters.get("format") or "%Y-%m-%d %H:%M:%S %Z"
if tz == "UTC": if tz == "UTC":
return self.create_text_message(f"{datetime.now(timezone.utc).strftime(fm)}")
return self.create_text_message(f"{datetime.now(UTC).strftime(fm)}")


try: try:
tz = pytz_timezone(tz) tz = pytz_timezone(tz)

+ 2
- 2
api/core/tools/tool/tool.py View File

from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections.abc import Mapping from collections.abc import Mapping
from copy import deepcopy from copy import deepcopy
from enum import Enum
from enum import Enum, StrEnum
from typing import TYPE_CHECKING, Any, Optional, Union from typing import TYPE_CHECKING, Any, Optional, Union


from pydantic import BaseModel, ConfigDict, field_validator from pydantic import BaseModel, ConfigDict, field_validator
def __init__(self, **data: Any): def __init__(self, **data: Any):
super().__init__(**data) super().__init__(**data)


class VariableKey(str, Enum):
class VariableKey(StrEnum):
IMAGE = "image" IMAGE = "image"
DOCUMENT = "document" DOCUMENT = "document"
VIDEO = "video" VIDEO = "video"

+ 3
- 3
api/core/tools/tool_engine.py View File

import json import json
from collections.abc import Mapping from collections.abc import Mapping
from copy import deepcopy from copy import deepcopy
from datetime import datetime, timezone
from datetime import UTC, datetime
from mimetypes import guess_type from mimetypes import guess_type
from typing import Any, Optional, Union from typing import Any, Optional, Union


""" """
Invoke the tool with the given arguments. Invoke the tool with the given arguments.
""" """
started_at = datetime.now(timezone.utc)
started_at = datetime.now(UTC)
meta = ToolInvokeMeta( meta = ToolInvokeMeta(
time_cost=0.0, time_cost=0.0,
error=None, error=None,
meta.error = str(e) meta.error = str(e)
raise ToolEngineInvokeError(meta) raise ToolEngineInvokeError(meta)
finally: finally:
ended_at = datetime.now(timezone.utc)
ended_at = datetime.now(UTC)
meta.time_cost = (ended_at - started_at).total_seconds() meta.time_cost = (ended_at - started_at).total_seconds()


return meta, response return meta, response

+ 2
- 2
api/core/variables/types.py View File

from enum import Enum
from enum import StrEnum




class SegmentType(str, Enum):
class SegmentType(StrEnum):
NONE = "none" NONE = "none"
NUMBER = "number" NUMBER = "number"
STRING = "string" STRING = "string"

+ 2
- 2
api/core/workflow/entities/node_entities.py View File

from collections.abc import Mapping from collections.abc import Mapping
from enum import Enum
from enum import StrEnum
from typing import Any, Optional from typing import Any, Optional


from pydantic import BaseModel from pydantic import BaseModel
from models.workflow import WorkflowNodeExecutionStatus from models.workflow import WorkflowNodeExecutionStatus




class NodeRunMetadataKey(str, Enum):
class NodeRunMetadataKey(StrEnum):
""" """
Node Run Metadata Key. Node Run Metadata Key.
""" """

+ 2
- 2
api/core/workflow/enums.py View File

from enum import Enum
from enum import StrEnum




class SystemVariableKey(str, Enum):
class SystemVariableKey(StrEnum):
""" """
System Variables. System Variables.
""" """

+ 3
- 3
api/core/workflow/graph_engine/entities/runtime_route_state.py View File

import uuid import uuid
from datetime import datetime, timezone
from datetime import UTC, datetime
from enum import Enum from enum import Enum
from typing import Optional from typing import Optional


raise Exception(f"Invalid route status {run_result.status}") raise Exception(f"Invalid route status {run_result.status}")


self.node_run_result = run_result self.node_run_result = run_result
self.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
self.finished_at = datetime.now(UTC).replace(tzinfo=None)




class RuntimeRouteState(BaseModel): class RuntimeRouteState(BaseModel):


:param node_id: node id :param node_id: node id
""" """
state = RouteNodeState(node_id=node_id, start_at=datetime.now(timezone.utc).replace(tzinfo=None))
state = RouteNodeState(node_id=node_id, start_at=datetime.now(UTC).replace(tzinfo=None))
self.node_state_mapping[state.id] = state self.node_state_mapping[state.id] = state
return state return state



+ 2
- 2
api/core/workflow/nodes/enums.py View File

from enum import Enum
from enum import StrEnum




class NodeType(str, Enum):
class NodeType(StrEnum):
START = "start" START = "start"
END = "end" END = "end"
ANSWER = "answer" ANSWER = "answer"

+ 2
- 2
api/core/workflow/nodes/iteration/entities.py View File

from enum import Enum
from enum import StrEnum
from typing import Any, Optional from typing import Any, Optional


from pydantic import Field from pydantic import Field
from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState, BaseNodeData from core.workflow.nodes.base import BaseIterationNodeData, BaseIterationState, BaseNodeData




class ErrorHandleMode(str, Enum):
class ErrorHandleMode(StrEnum):
TERMINATED = "terminated" TERMINATED = "terminated"
CONTINUE_ON_ERROR = "continue-on-error" CONTINUE_ON_ERROR = "continue-on-error"
REMOVE_ABNORMAL_OUTPUT = "remove-abnormal-output" REMOVE_ABNORMAL_OUTPUT = "remove-abnormal-output"

+ 6
- 6
api/core/workflow/nodes/iteration/iteration_node.py View File

import uuid import uuid
from collections.abc import Generator, Mapping, Sequence from collections.abc import Generator, Mapping, Sequence
from concurrent.futures import Future, wait from concurrent.futures import Future, wait
from datetime import datetime, timezone
from datetime import UTC, datetime
from queue import Empty, Queue from queue import Empty, Queue
from typing import TYPE_CHECKING, Any, Optional, cast from typing import TYPE_CHECKING, Any, Optional, cast


thread_pool_id=self.thread_pool_id, thread_pool_id=self.thread_pool_id,
) )


start_at = datetime.now(timezone.utc).replace(tzinfo=None)
start_at = datetime.now(UTC).replace(tzinfo=None)


yield IterationRunStartedEvent( yield IterationRunStartedEvent(
iteration_id=self.id, iteration_id=self.id,
""" """
run single iteration run single iteration
""" """
iter_start_at = datetime.now(timezone.utc).replace(tzinfo=None)
iter_start_at = datetime.now(UTC).replace(tzinfo=None)


try: try:
rst = graph_engine.run() rst = graph_engine.run()
variable_pool.add([self.node_id, "index"], next_index) variable_pool.add([self.node_id, "index"], next_index)
if next_index < len(iterator_list_value): if next_index < len(iterator_list_value):
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index]) variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
iter_run_map[iteration_run_id] = duration iter_run_map[iteration_run_id] = duration
yield IterationRunNextEvent( yield IterationRunNextEvent(
iteration_id=self.id, iteration_id=self.id,


if next_index < len(iterator_list_value): if next_index < len(iterator_list_value):
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index]) variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
iter_run_map[iteration_run_id] = duration iter_run_map[iteration_run_id] = duration
yield IterationRunNextEvent( yield IterationRunNextEvent(
iteration_id=self.id, iteration_id=self.id,


if next_index < len(iterator_list_value): if next_index < len(iterator_list_value):
variable_pool.add([self.node_id, "item"], iterator_list_value[next_index]) variable_pool.add([self.node_id, "item"], iterator_list_value[next_index])
duration = (datetime.now(timezone.utc).replace(tzinfo=None) - iter_start_at).total_seconds()
duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds()
iter_run_map[iteration_run_id] = duration iter_run_map[iteration_run_id] = duration
yield IterationRunNextEvent( yield IterationRunNextEvent(
iteration_id=self.id, iteration_id=self.id,

+ 2
- 2
api/core/workflow/nodes/variable_assigner/node_data.py View File

from collections.abc import Sequence from collections.abc import Sequence
from enum import Enum
from enum import StrEnum
from typing import Optional from typing import Optional


from core.workflow.nodes.base import BaseNodeData from core.workflow.nodes.base import BaseNodeData




class WriteMode(str, Enum):
class WriteMode(StrEnum):
OVER_WRITE = "over-write" OVER_WRITE = "over-write"
APPEND = "append" APPEND = "append"
CLEAR = "clear" CLEAR = "clear"

+ 1
- 1
api/events/event_handlers/create_document_index.py View File

raise NotFound("Document not found") raise NotFound("Document not found")


document.indexing_status = "parsing" document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
documents.append(document) documents.append(document)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()

+ 2
- 2
api/events/event_handlers/update_provider_last_used_at_when_message_created.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime


from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity
from events.message_event import message_was_created from events.message_event import message_was_created
db.session.query(Provider).filter( db.session.query(Provider).filter(
Provider.tenant_id == application_generate_entity.app_config.tenant_id, Provider.tenant_id == application_generate_entity.app_config.tenant_id,
Provider.provider_name == application_generate_entity.model_conf.provider, Provider.provider_name == application_generate_entity.model_conf.provider,
).update({"last_used": datetime.now(timezone.utc).replace(tzinfo=None)})
).update({"last_used": datetime.now(UTC).replace(tzinfo=None)})
db.session.commit() db.session.commit()

+ 2
- 2
api/extensions/storage/azure_blob_storage.py View File

from collections.abc import Generator from collections.abc import Generator
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta


from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas


account_key=self.account_key, account_key=self.account_key,
resource_types=ResourceTypes(service=True, container=True, object=True), resource_types=ResourceTypes(service=True, container=True, object=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(hours=1),
expiry=datetime.now(UTC).replace(tzinfo=None) + timedelta(hours=1),
) )
redis_client.set(cache_key, sas_token, ex=3000) redis_client.set(cache_key, sas_token, ex=3000)
return BlobServiceClient(account_url=self.account_url, credential=sas_token) return BlobServiceClient(account_url=self.account_url, credential=sas_token)

+ 2
- 2
api/extensions/storage/storage_type.py View File

from enum import Enum
from enum import StrEnum




class StorageType(str, Enum):
class StorageType(StrEnum):
ALIYUN_OSS = "aliyun-oss" ALIYUN_OSS = "aliyun-oss"
AZURE_BLOB = "azure-blob" AZURE_BLOB = "azure-blob"
BAIDU_OBS = "baidu-obs" BAIDU_OBS = "baidu-obs"

+ 3
- 3
api/libs/oauth_data_source.py View File

if data_source_binding: if data_source_binding:
data_source_binding.source_info = source_info data_source_binding.source_info = source_info
data_source_binding.disabled = False data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
new_data_source_binding = DataSourceOauthBinding( new_data_source_binding = DataSourceOauthBinding(
if data_source_binding: if data_source_binding:
data_source_binding.source_info = source_info data_source_binding.source_info = source_info
data_source_binding.disabled = False data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
new_data_source_binding = DataSourceOauthBinding( new_data_source_binding = DataSourceOauthBinding(
} }
data_source_binding.source_info = new_source_info data_source_binding.source_info = new_source_info
data_source_binding.disabled = False data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
raise ValueError("Data source binding not found") raise ValueError("Data source binding not found")

+ 3
- 3
api/models/account.py View File

from .types import StringUUID from .types import StringUUID




class AccountStatus(str, enum.Enum):
class AccountStatus(enum.StrEnum):
PENDING = "pending" PENDING = "pending"
UNINITIALIZED = "uninitialized" UNINITIALIZED = "uninitialized"
ACTIVE = "active" ACTIVE = "active"
return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR




class TenantStatus(str, enum.Enum):
class TenantStatus(enum.StrEnum):
NORMAL = "normal" NORMAL = "normal"
ARCHIVE = "archive" ARCHIVE = "archive"




class TenantAccountRole(str, enum.Enum):
class TenantAccountRole(enum.StrEnum):
OWNER = "owner" OWNER = "owner"
ADMIN = "admin" ADMIN = "admin"
EDITOR = "editor" EDITOR = "editor"

+ 1
- 1
api/models/dataset.py View File

from .types import StringUUID from .types import StringUUID




class DatasetPermissionEnum(str, enum.Enum):
class DatasetPermissionEnum(enum.StrEnum):
ONLY_ME = "only_me" ONLY_ME = "only_me"
ALL_TEAM = "all_team_members" ALL_TEAM = "all_team_members"
PARTIAL_TEAM = "partial_members" PARTIAL_TEAM = "partial_members"

+ 4
- 4
api/models/enums.py View File

from enum import Enum
from enum import StrEnum




class CreatedByRole(str, Enum):
class CreatedByRole(StrEnum):
ACCOUNT = "account" ACCOUNT = "account"
END_USER = "end_user" END_USER = "end_user"




class UserFrom(str, Enum):
class UserFrom(StrEnum):
ACCOUNT = "account" ACCOUNT = "account"
END_USER = "end-user" END_USER = "end-user"




class WorkflowRunTriggeredFrom(str, Enum):
class WorkflowRunTriggeredFrom(StrEnum):
DEBUGGING = "debugging" DEBUGGING = "debugging"
APP_RUN = "app-run" APP_RUN = "app-run"

+ 2
- 2
api/models/model.py View File

import uuid import uuid
from collections.abc import Mapping from collections.abc import Mapping
from datetime import datetime from datetime import datetime
from enum import Enum
from enum import Enum, StrEnum
from typing import Any, Literal, Optional from typing import Any, Literal, Optional


import sqlalchemy as sa import sqlalchemy as sa
setup_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) setup_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)"))




class AppMode(str, Enum):
class AppMode(StrEnum):
COMPLETION = "completion" COMPLETION = "completion"
WORKFLOW = "workflow" WORKFLOW = "workflow"
CHAT = "chat" CHAT = "chat"

+ 4
- 4
api/models/task.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime


from celery import states from celery import states


result = db.Column(db.PickleType, nullable=True) result = db.Column(db.PickleType, nullable=True)
date_done = db.Column( date_done = db.Column(
db.DateTime, db.DateTime,
default=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
onupdate=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
default=lambda: datetime.now(UTC).replace(tzinfo=None),
onupdate=lambda: datetime.now(UTC).replace(tzinfo=None),
nullable=True, nullable=True,
) )
traceback = db.Column(db.Text, nullable=True) traceback = db.Column(db.Text, nullable=True)
id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True) id = db.Column(db.Integer, db.Sequence("taskset_id_sequence"), autoincrement=True, primary_key=True)
taskset_id = db.Column(db.String(155), unique=True) taskset_id = db.Column(db.String(155), unique=True)
result = db.Column(db.PickleType, nullable=True) result = db.Column(db.PickleType, nullable=True)
date_done = db.Column(db.DateTime, default=lambda: datetime.now(timezone.utc).replace(tzinfo=None), nullable=True)
date_done = db.Column(db.DateTime, default=lambda: datetime.now(UTC).replace(tzinfo=None), nullable=True)

+ 2
- 2
api/models/workflow.py View File

import json import json
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
from datetime import datetime, timezone
from datetime import UTC, datetime
from enum import Enum from enum import Enum
from typing import Any, Optional, Union from typing import Any, Optional, Union


) )
updated_by: Mapped[Optional[str]] = mapped_column(StringUUID) updated_by: Mapped[Optional[str]] = mapped_column(StringUUID)
updated_at: Mapped[datetime] = mapped_column( updated_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, default=datetime.now(tz=timezone.utc), server_onupdate=func.current_timestamp()
sa.DateTime, nullable=False, default=datetime.now(tz=UTC), server_onupdate=func.current_timestamp()
) )
_environment_variables: Mapped[str] = mapped_column( _environment_variables: Mapped[str] = mapped_column(
"environment_variables", db.Text, nullable=False, server_default="{}" "environment_variables", db.Text, nullable=False, server_default="{}"

+ 46
- 75
api/poetry.lock View File

[package.dependencies] [package.dependencies]
aiohappyeyeballs = ">=2.3.0" aiohappyeyeballs = ">=2.3.0"
aiosignal = ">=1.1.2" aiosignal = ">=1.1.2"
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
attrs = ">=17.3.0" attrs = ">=17.3.0"
frozenlist = ">=1.1.1" frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0" multidict = ">=4.5,<7.0"
] ]


[package.dependencies] [package.dependencies]
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
idna = ">=2.8" idna = ">=2.8"
sniffio = ">=1.1" sniffio = ">=1.1"
typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}


[package.extras] [package.extras]
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
{file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
] ]


[package.dependencies]
typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}

[package.extras] [package.extras]
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]


{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},


[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "os_name == \"nt\""} colorama = {version = "*", markers = "os_name == \"nt\""}
importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""}
packaging = ">=19.1" packaging = ">=19.1"
pyproject_hooks = "*" pyproject_hooks = "*"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}


[package.extras] [package.extras]
docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
{file = "dataclass_wizard-0.28.0-py2.py3-none-any.whl", hash = "sha256:996fa46475b9192a48a057c34f04597bc97be5bc2f163b99cb1de6f778ca1f7f"}, {file = "dataclass_wizard-0.28.0-py2.py3-none-any.whl", hash = "sha256:996fa46475b9192a48a057c34f04597bc97be5bc2f163b99cb1de6f778ca1f7f"},
] ]


[package.dependencies]
typing-extensions = {version = ">=4", markers = "python_version == \"3.9\" or python_version == \"3.10\""}

[package.extras] [package.extras]
dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.0)"] dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.0)"]
timedelta = ["pytimeparse (>=1.1.7)"] timedelta = ["pytimeparse (>=1.1.7)"]
[package.extras] [package.extras]
tests = ["pytest"] tests = ["pytest"]


[[package]]
name = "exceptiongroup"
version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
]

[package.extras]
test = ["pytest (>=6)"]

[[package]] [[package]]
name = "faker" name = "faker"
version = "32.1.0" version = "32.1.0"
[package.dependencies] [package.dependencies]
google-auth = ">=2.14.1,<3.0.dev0" google-auth = ">=2.14.1,<3.0.dev0"
googleapis-common-protos = ">=1.56.2,<2.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0"
grpcio = [
{version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
{version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
]
grpcio-status = [
{version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""},
{version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""},
]
grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}
proto-plus = ">=1.22.3,<2.0.0dev" proto-plus = ">=1.22.3,<2.0.0dev"
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
requests = ">=2.18.0,<3.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0"
{file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
] ]


[package.dependencies]
typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}

[[package]] [[package]]
name = "multiprocess" name = "multiprocess"
version = "0.70.17" version = "0.70.17"
numba = {version = ">=0.56.4", optional = true, markers = "extra == \"performance\""} numba = {version = ">=0.56.4", optional = true, markers = "extra == \"performance\""}
numexpr = {version = ">=2.8.4", optional = true, markers = "extra == \"performance\""} numexpr = {version = ">=2.8.4", optional = true, markers = "extra == \"performance\""}
numpy = [ numpy = [
{version = ">=1.22.4", markers = "python_version < \"3.11\""},
{version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""},
{version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
] ]
deprecation = ">=2.1.0,<3.0.0" deprecation = ">=2.1.0,<3.0.0"
httpx = {version = ">=0.26,<0.28", extras = ["http2"]} httpx = {version = ">=0.26,<0.28", extras = ["http2"]}
pydantic = ">=1.9,<3.0" pydantic = ">=1.9,<3.0"
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}


[[package]] [[package]]
name = "posthog" name = "posthog"
{file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"}, {file = "pypdf-5.1.0.tar.gz", hash = "sha256:425a129abb1614183fd1aca6982f650b47f8026867c0ce7c4b9f281c443d2740"},
] ]


[package.dependencies]
typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}

[package.extras] [package.extras]
crypto = ["cryptography"] crypto = ["cryptography"]
cryptodome = ["PyCryptodome"] cryptodome = ["PyCryptodome"]


[package.dependencies] [package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""} colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*" iniconfig = "*"
packaging = "*" packaging = "*"
pluggy = ">=1.5,<2" pluggy = ">=1.5,<2"
tomli = {version = ">=1", markers = "python_version < \"3.11\""}


[package.extras] [package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]


[package.dependencies] [package.dependencies]
pytest = ">=8.3.3" pytest = ">=8.3.3"
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}


[package.extras] [package.extras]
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"]
[package.dependencies] [package.dependencies]
markdown-it-py = ">=2.2.0" markdown-it-py = ">=2.2.0"
pygments = ">=2.13.0,<3.0.0" pygments = ">=2.13.0,<3.0.0"
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}


[package.extras] [package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"] jupyter = ["ipywidgets (>=7.5.1,<9)"]
python-dateutil = ">=2.8.2,<3.0.0" python-dateutil = ">=2.8.2,<3.0.0"
typing-extensions = ">=4.2.0,<5.0.0" typing-extensions = ">=4.2.0,<5.0.0"


[[package]]
name = "strenum"
version = "0.4.15"
description = "An Enum that inherits from str."
optional = false
python-versions = "*"
files = [
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
]

[package.extras]
docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
release = ["twine"]
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]

[[package]] [[package]]
name = "strictyaml" name = "strictyaml"
version = "1.7.3" version = "1.7.3"
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
] ]


[[package]]
name = "tomli"
version = "2.1.0"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
files = [
{file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
{file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
]

[[package]] [[package]]
name = "tos" name = "tos"
version = "2.7.2" version = "2.7.2"
httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}


[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.10,<3.13"
content-hash = "152b8e11ceffaa482fee6920b7991f52427aa1ffed75614e78ec1065dd5f6898"
python-versions = ">=3.11,<3.13"
content-hash = "75175c3427d13c41d84374ff2bb6f5c6cb157e3783107f9d22fad15c9eb8c177"

+ 2
- 2
api/pyproject.toml View File

[project] [project]
requires-python = ">=3.10,<3.13"
requires-python = ">=3.11,<3.13"


[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]
pydantic_extra_types = "~2.9.0" pydantic_extra_types = "~2.9.0"
pyjwt = "~2.8.0" pyjwt = "~2.8.0"
pypdfium2 = "~4.17.0" pypdfium2 = "~4.17.0"
python = ">=3.10,<3.13"
python = ">=3.11,<3.13"
python-docx = "~1.1.0" python-docx = "~1.1.0"
python-dotenv = "1.0.0" python-dotenv = "1.0.0"
pyyaml = "~6.0.1" pyyaml = "~6.0.1"

+ 9
- 9
api/services/account_service.py View File

import random import random
import secrets import secrets
import uuid import uuid
from datetime import datetime, timedelta, timezone
from datetime import UTC, datetime, timedelta
from hashlib import sha256 from hashlib import sha256
from typing import Any, Optional from typing import Any, Optional


available_ta.current = True available_ta.current = True
db.session.commit() db.session.commit()


if datetime.now(timezone.utc).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.now(timezone.utc).replace(tzinfo=None)
if datetime.now(UTC).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return account return account


@staticmethod @staticmethod
def get_account_jwt_token(account: Account) -> str: def get_account_jwt_token(account: Account) -> str:
exp_dt = datetime.now(timezone.utc) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES)
exp = int(exp_dt.timestamp()) exp = int(exp_dt.timestamp())
payload = { payload = {
"user_id": account.id, "user_id": account.id,


if account.status == AccountStatus.PENDING.value: if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)


db.session.commit() db.session.commit()


# If it exists, update the record # If it exists, update the record
account_integrate.open_id = open_id account_integrate.open_id = open_id
account_integrate.encrypted_token = "" # todo account_integrate.encrypted_token = "" # todo
account_integrate.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
account_integrate.updated_at = datetime.now(UTC).replace(tzinfo=None)
else: else:
# If it does not exist, create a new record # If it does not exist, create a new record
account_integrate = AccountIntegrate( account_integrate = AccountIntegrate(
@staticmethod @staticmethod
def update_login_info(account: Account, *, ip_address: str) -> None: def update_login_info(account: Account, *, ip_address: str) -> None:
"""Update last login time and ip""" """Update last login time and ip"""
account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.last_login_at = datetime.now(UTC).replace(tzinfo=None)
account.last_login_ip = ip_address account.last_login_ip = ip_address
db.session.add(account) db.session.add(account)
db.session.commit() db.session.commit()
) )


account.last_login_ip = ip_address account.last_login_ip = ip_address
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)


TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True) TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)


is_setup=is_setup, is_setup=is_setup,
) )
account.status = AccountStatus.ACTIVE.value if not status else status.value account.status = AccountStatus.ACTIVE.value if not status else status.value
account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)


if open_id is not None or provider is not None: if open_id is not None or provider is not None:
AccountService.link_account_integrate(provider, open_id, account) AccountService.link_account_integrate(provider, open_id, account)

+ 1
- 1
api/services/annotation_service.py View File

raise NotFound("App annotation not found") raise NotFound("App annotation not found")
annotation_setting.score_threshold = args["score_threshold"] annotation_setting.score_threshold = args["score_threshold"]
annotation_setting.updated_user_id = current_user.id annotation_setting.updated_user_id = current_user.id
annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(annotation_setting) db.session.add(annotation_setting)
db.session.commit() db.session.commit()



+ 3
- 3
api/services/app_dsl_service.py View File

import logging import logging
import uuid import uuid
from enum import Enum
from enum import StrEnum
from typing import Optional from typing import Optional
from uuid import uuid4 from uuid import uuid4


CURRENT_DSL_VERSION = "0.1.3" CURRENT_DSL_VERSION = "0.1.3"




class ImportMode(str, Enum):
class ImportMode(StrEnum):
YAML_CONTENT = "yaml-content" YAML_CONTENT = "yaml-content"
YAML_URL = "yaml-url" YAML_URL = "yaml-url"




class ImportStatus(str, Enum):
class ImportStatus(StrEnum):
COMPLETED = "completed" COMPLETED = "completed"
COMPLETED_WITH_WARNINGS = "completed-with-warnings" COMPLETED_WITH_WARNINGS = "completed-with-warnings"
PENDING = "pending" PENDING = "pending"

+ 6
- 6
api/services/app_service.py View File

import json import json
import logging import logging
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import cast from typing import cast


from flask_login import current_user from flask_login import current_user
app.icon_background = args.get("icon_background") app.icon_background = args.get("icon_background")
app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False) app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False)
app.updated_by = current_user.id app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


if app.max_active_requests is not None: if app.max_active_requests is not None:
""" """
app.name = name app.name = name
app.updated_by = current_user.id app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return app return app
app.icon = icon app.icon = icon
app.icon_background = icon_background app.icon_background = icon_background
app.updated_by = current_user.id app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return app return app


app.enable_site = enable_site app.enable_site = enable_site
app.updated_by = current_user.id app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return app return app


app.enable_api = enable_api app.enable_api = enable_api
app.updated_by = current_user.id app.updated_by = current_user.id
app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
app.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return app return app

+ 2
- 2
api/services/auth/auth_type.py View File

from enum import Enum
from enum import StrEnum




class AuthType(str, Enum):
class AuthType(StrEnum):
FIRECRAWL = "firecrawl" FIRECRAWL = "firecrawl"
JINA = "jinareader" JINA = "jinareader"

+ 2
- 2
api/services/conversation_service.py View File

from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Optional, Union from typing import Optional, Union


from sqlalchemy import asc, desc, or_ from sqlalchemy import asc, desc, or_
return cls.auto_generate_name(app_model, conversation) return cls.auto_generate_name(app_model, conversation)
else: else:
conversation.name = name conversation.name = name
conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
conversation.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return conversation return conversation

+ 13
- 13
api/services/dataset_service.py View File

# update document to be paused # update document to be paused
document.is_paused = True document.is_paused = True
document.paused_by = current_user.id document.paused_by = current_user.id
document.paused_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.paused_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)


db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()
document.parsing_completed_at = None document.parsing_completed_at = None
document.cleaning_completed_at = None document.cleaning_completed_at = None
document.splitting_completed_at = None document.splitting_completed_at = None
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.created_from = created_from document.created_from = created_from
document.doc_form = document_data["doc_form"] document.doc_form = document_data["doc_form"]
db.session.add(document) db.session.add(document)
word_count=len(content), word_count=len(content),
tokens=tokens, tokens=tokens,
status="completed", status="completed",
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
created_by=current_user.id, created_by=current_user.id,
) )
if document.doc_form == "qa_model": if document.doc_form == "qa_model":
except Exception as e: except Exception as e:
logging.exception("create segment index failed") logging.exception("create segment index failed")
segment_document.enabled = False segment_document.enabled = False
segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment_document.status = "error" segment_document.status = "error"
segment_document.error = str(e) segment_document.error = str(e)
db.session.commit() db.session.commit()
word_count=len(content), word_count=len(content),
tokens=tokens, tokens=tokens,
status="completed", status="completed",
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
created_by=current_user.id, created_by=current_user.id,
) )
if document.doc_form == "qa_model": if document.doc_form == "qa_model":
logging.exception("create segment index failed") logging.exception("create segment index failed")
for segment_document in segment_data_list: for segment_document in segment_data_list:
segment_document.enabled = False segment_document.enabled = False
segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment_document.status = "error" segment_document.status = "error"
segment_document.error = str(e) segment_document.error = str(e)
db.session.commit() db.session.commit()
if segment.enabled != action: if segment.enabled != action:
if not action: if not action:
segment.enabled = action segment.enabled = action
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.disabled_by = current_user.id segment.disabled_by = current_user.id
db.session.add(segment) db.session.add(segment)
db.session.commit() db.session.commit()
segment.word_count = len(content) segment.word_count = len(content)
segment.tokens = tokens segment.tokens = tokens
segment.status = "completed" segment.status = "completed"
segment.indexing_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.completed_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.indexing_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.completed_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.updated_by = current_user.id segment.updated_by = current_user.id
segment.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.enabled = True segment.enabled = True
segment.disabled_at = None segment.disabled_at = None
segment.disabled_by = None segment.disabled_by = None
except Exception as e: except Exception as e:
logging.exception("update segment index failed") logging.exception("update segment index failed")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.status = "error" segment.status = "error"
segment.error = str(e) segment.error = str(e)
db.session.commit() db.session.commit()

+ 2
- 2
api/services/external_knowledge_service.py View File

import json import json
from copy import deepcopy from copy import deepcopy
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any, Optional, Union from typing import Any, Optional, Union


import httpx import httpx
external_knowledge_api.description = args.get("description", "") external_knowledge_api.description = args.get("description", "")
external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False) external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False)
external_knowledge_api.updated_by = user_id external_knowledge_api.updated_by = user_id
external_knowledge_api.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
external_knowledge_api.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


return external_knowledge_api return external_knowledge_api

+ 2
- 2
api/services/feature_service.py View File

from enum import Enum
from enum import StrEnum


from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict


limit: int = 0 limit: int = 0




class LicenseStatus(str, Enum):
class LicenseStatus(StrEnum):
NONE = "none" NONE = "none"
INACTIVE = "inactive" INACTIVE = "inactive"
ACTIVE = "active" ACTIVE = "active"

+ 3
- 3
api/services/file_service.py View File

mime_type=mimetype, mime_type=mimetype,
created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER), created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
created_by=user.id, created_by=user.id,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
used=False, used=False,
hash=hashlib.sha3_256(content).hexdigest(), hash=hashlib.sha3_256(content).hexdigest(),
source_url=source_url, source_url=source_url,
mime_type="text/plain", mime_type="text/plain",
created_by=current_user.id, created_by=current_user.id,
created_by_role=CreatedByRole.ACCOUNT, created_by_role=CreatedByRole.ACCOUNT,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
used=True, used=True,
used_by=current_user.id, used_by=current_user.id,
used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
) )


db.session.add(upload_file) db.session.add(upload_file)

+ 1
- 1
api/services/model_load_balancing_service.py View File



load_balancing_config.name = name load_balancing_config.name = name
load_balancing_config.enabled = enabled load_balancing_config.enabled = enabled
load_balancing_config.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
load_balancing_config.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


self._clear_credentials_cache(tenant_id, config_id) self._clear_credentials_cache(tenant_id, config_id)

+ 2
- 2
api/services/recommend_app/recommend_app_type.py View File

from enum import Enum
from enum import StrEnum




class RecommendAppType(str, Enum):
class RecommendAppType(StrEnum):
REMOTE = "remote" REMOTE = "remote"
BUILDIN = "builtin" BUILDIN = "builtin"
DATABASE = "db" DATABASE = "db"

+ 5
- 5
api/services/workflow_service.py View File

import json import json
import time import time
from collections.abc import Sequence from collections.abc import Sequence
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Optional from typing import Optional


from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
workflow.graph = json.dumps(graph) workflow.graph = json.dumps(graph)
workflow.features = json.dumps(features) workflow.features = json.dumps(features)
workflow.updated_by = account.id workflow.updated_by = account.id
workflow.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
workflow.environment_variables = environment_variables workflow.environment_variables = environment_variables
workflow.conversation_variables = conversation_variables workflow.conversation_variables = conversation_variables


tenant_id=app_model.tenant_id, tenant_id=app_model.tenant_id,
app_id=app_model.id, app_id=app_model.id,
type=draft_workflow.type, type=draft_workflow.type,
version=str(datetime.now(timezone.utc).replace(tzinfo=None)),
version=str(datetime.now(UTC).replace(tzinfo=None)),
graph=draft_workflow.graph, graph=draft_workflow.graph,
features=draft_workflow.features, features=draft_workflow.features,
created_by=account.id, created_by=account.id,
workflow_node_execution.elapsed_time = time.perf_counter() - start_at workflow_node_execution.elapsed_time = time.perf_counter() - start_at
workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value
workflow_node_execution.created_by = account.id workflow_node_execution.created_by = account.id
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)


if run_succeeded and node_run_result: if run_succeeded and node_run_result:
# create workflow node execution # create workflow node execution

+ 1
- 1
api/tasks/add_document_to_index_task.py View File

except Exception as e: except Exception as e:
logging.exception("add document to index failed") logging.exception("add document to index failed")
dataset_document.enabled = False dataset_document.enabled = False
dataset_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
dataset_document.status = "error" dataset_document.status = "error"
dataset_document.error = str(e) dataset_document.error = str(e)
db.session.commit() db.session.commit()

+ 1
- 1
api/tasks/annotation/enable_annotation_reply_task.py View File

annotation_setting.score_threshold = score_threshold annotation_setting.score_threshold = score_threshold
annotation_setting.collection_binding_id = dataset_collection_binding.id annotation_setting.collection_binding_id = dataset_collection_binding.id
annotation_setting.updated_user_id = user_id annotation_setting.updated_user_id = user_id
annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(annotation_setting) db.session.add(annotation_setting)
else: else:
new_app_annotation_setting = AppAnnotationSetting( new_app_annotation_setting = AppAnnotationSetting(

+ 2
- 2
api/tasks/batch_create_segment_to_index_task.py View File

word_count=len(content), word_count=len(content),
tokens=tokens, tokens=tokens,
created_by=user_id, created_by=user_id,
indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
status="completed", status="completed",
completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
) )
if dataset_document.doc_form == "qa_model": if dataset_document.doc_form == "qa_model":
segment_document.answer = segment["answer"] segment_document.answer = segment["answer"]

+ 3
- 3
api/tasks/create_segment_to_index_task.py View File

# update segment status to indexing # update segment status to indexing
update_params = { update_params = {
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
} }
DocumentSegment.query.filter_by(id=segment.id).update(update_params) DocumentSegment.query.filter_by(id=segment.id).update(update_params)
db.session.commit() db.session.commit()
# update segment to completed # update segment to completed
update_params = { update_params = {
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
} }
DocumentSegment.query.filter_by(id=segment.id).update(update_params) DocumentSegment.query.filter_by(id=segment.id).update(update_params)
db.session.commit() db.session.commit()
except Exception as e: except Exception as e:
logging.exception("create segment to index failed") logging.exception("create segment to index failed")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.status = "error" segment.status = "error"
segment.error = str(e) segment.error = str(e)
db.session.commit() db.session.commit()

+ 1
- 1
api/tasks/document_indexing_sync_task.py View File

# check the page is updated # check the page is updated
if last_edited_time != page_edited_time: if last_edited_time != page_edited_time:
document.indexing_status = "parsing" document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


# delete all document segment and index # delete all document segment and index

+ 2
- 2
api/tasks/document_indexing_task.py View File

if document: if document:
document.indexing_status = "error" document.indexing_status = "error"
document.error = str(e) document.error = str(e)
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()
return return


if document: if document:
document.indexing_status = "parsing" document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
documents.append(document) documents.append(document)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()

+ 1
- 1
api/tasks/document_indexing_update_task.py View File

raise NotFound("Document not found") raise NotFound("Document not found")


document.indexing_status = "parsing" document.indexing_status = "parsing"
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.commit() db.session.commit()


# delete all document segment and index # delete all document segment and index

+ 1
- 1
api/tasks/enable_segment_to_index_task.py View File

except Exception as e: except Exception as e:
logging.exception("enable segment to index failed") logging.exception("enable segment to index failed")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.status = "error" segment.status = "error"
segment.error = str(e) segment.error = str(e)
db.session.commit() db.session.commit()

+ 3
- 3
api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py View File

import uuid import uuid
from collections.abc import Generator from collections.abc import Generator
from datetime import datetime, timezone
from datetime import UTC, datetime, timezone


from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.variable_pool import VariablePool
from core.workflow.enums import SystemVariableKey from core.workflow.enums import SystemVariableKey




def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]: def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]:
route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(timezone.utc).replace(tzinfo=None))
route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(UTC).replace(tzinfo=None))


parallel_id = graph.node_parallel_mapping.get(next_node_id) parallel_id = graph.node_parallel_mapping.get(next_node_id)
parallel_start_node_id = None parallel_start_node_id = None
) )


route_node_state.status = RouteNodeState.Status.SUCCESS route_node_state.status = RouteNodeState.Status.SUCCESS
route_node_state.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
route_node_state.finished_at = datetime.now(UTC).replace(tzinfo=None)
yield NodeRunSucceededEvent( yield NodeRunSucceededEvent(
id=node_execution_id, id=node_execution_id,
node_id=next_node_id, node_id=next_node_id,

Loading…
Cancel
Save