Ver código fonte

Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine

tags/2.0.0-beta.2^2
-LAN- 1 mês atrás
pai
commit
6c3302a192
Nenhuma conta vinculada ao e-mail do autor do commit
100 arquivos alterados com 1210 adições e 204 exclusões
  1. 4
    0
      .github/workflows/style.yml
  2. 3
    0
      api/app_factory.py
  3. 4
    4
      api/controllers/service_api/dataset/segment.py
  4. 11
    8
      api/core/indexing_runner.py
  5. 1
    0
      api/core/model_runtime/entities/llm_entities.py
  6. 4
    1
      api/core/ops/weave_trace/weave_trace.py
  7. 24
    3
      api/core/plugin/entities/plugin.py
  8. 2
    2
      api/core/plugin/utils/chunk_merger.py
  9. 1
    3
      api/core/rag/extractor/firecrawl/firecrawl_app.py
  10. 1
    1
      api/core/rag/index_processor/processor/parent_child_index_processor.py
  11. 1
    1
      api/core/tools/tool_manager.py
  12. 1
    0
      api/core/workflow/node_events/node.py
  13. 18
    1
      api/core/workflow/nodes/llm/entities.py
  14. 98
    4
      api/core/workflow/nodes/llm/node.py
  15. 0
    25
      api/mypy.ini
  16. 5
    3
      api/pyproject.toml
  17. 23
    42
      api/pyrightconfig.json
  18. 2
    0
      api/services/account_service.py
  19. 1
    1
      api/services/dataset_service.py
  20. 1
    1
      api/services/tools/builtin_tools_manage_service.py
  21. 16
    5
      api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py
  22. 0
    0
      api/tests/test_containers_integration_tests/tasks/__init__.py
  23. 786
    0
      api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py
  24. 64
    0
      api/tests/unit_tests/core/workflow/nodes/llm/test_node.py
  25. 42
    3
      api/uv.lock
  26. 0
    10
      dev/mypy-check
  27. 25
    14
      sdks/python-client/dify_client/client.py
  28. 1
    1
      web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx
  29. 1
    1
      web/app/components/app/type-selector/index.tsx
  30. 1
    1
      web/app/components/apps/app-card.tsx
  31. 1
    1
      web/app/components/base/icons/IconBase.tsx
  32. 1
    1
      web/app/components/base/icons/script.mjs
  33. 1
    1
      web/app/components/base/icons/src/public/avatar/Robot.tsx
  34. 1
    1
      web/app/components/base/icons/src/public/avatar/User.tsx
  35. 1
    1
      web/app/components/base/icons/src/public/billing/ArCube1.tsx
  36. 1
    1
      web/app/components/base/icons/src/public/billing/Asterisk.tsx
  37. 1
    1
      web/app/components/base/icons/src/public/billing/AwsMarketplace.tsx
  38. 1
    1
      web/app/components/base/icons/src/public/billing/Azure.tsx
  39. 1
    1
      web/app/components/base/icons/src/public/billing/Buildings.tsx
  40. 1
    1
      web/app/components/base/icons/src/public/billing/Diamond.tsx
  41. 1
    1
      web/app/components/base/icons/src/public/billing/GoogleCloud.tsx
  42. 1
    1
      web/app/components/base/icons/src/public/billing/Group2.tsx
  43. 1
    1
      web/app/components/base/icons/src/public/billing/Keyframe.tsx
  44. 1
    1
      web/app/components/base/icons/src/public/billing/Sparkles.tsx
  45. 1
    1
      web/app/components/base/icons/src/public/billing/SparklesSoft.tsx
  46. 1
    1
      web/app/components/base/icons/src/public/common/D.tsx
  47. 1
    1
      web/app/components/base/icons/src/public/common/DiagonalDividingLine.tsx
  48. 1
    1
      web/app/components/base/icons/src/public/common/Dify.tsx
  49. 1
    1
      web/app/components/base/icons/src/public/common/Gdpr.tsx
  50. 1
    1
      web/app/components/base/icons/src/public/common/Github.tsx
  51. 1
    1
      web/app/components/base/icons/src/public/common/Highlight.tsx
  52. 1
    1
      web/app/components/base/icons/src/public/common/Iso.tsx
  53. 1
    1
      web/app/components/base/icons/src/public/common/Line3.tsx
  54. 1
    1
      web/app/components/base/icons/src/public/common/Lock.tsx
  55. 1
    1
      web/app/components/base/icons/src/public/common/MessageChatSquare.tsx
  56. 1
    1
      web/app/components/base/icons/src/public/common/MultiPathRetrieval.tsx
  57. 1
    1
      web/app/components/base/icons/src/public/common/NTo1Retrieval.tsx
  58. 1
    1
      web/app/components/base/icons/src/public/common/Notion.tsx
  59. 1
    1
      web/app/components/base/icons/src/public/common/Soc2.tsx
  60. 1
    1
      web/app/components/base/icons/src/public/common/SparklesSoft.tsx
  61. 1
    1
      web/app/components/base/icons/src/public/common/SparklesSoftAccent.tsx
  62. 1
    1
      web/app/components/base/icons/src/public/education/Triangle.tsx
  63. 1
    1
      web/app/components/base/icons/src/public/files/Csv.tsx
  64. 1
    1
      web/app/components/base/icons/src/public/files/Doc.tsx
  65. 1
    1
      web/app/components/base/icons/src/public/files/Docx.tsx
  66. 1
    1
      web/app/components/base/icons/src/public/files/Html.tsx
  67. 1
    1
      web/app/components/base/icons/src/public/files/Json.tsx
  68. 1
    1
      web/app/components/base/icons/src/public/files/Md.tsx
  69. 1
    1
      web/app/components/base/icons/src/public/files/Pdf.tsx
  70. 1
    1
      web/app/components/base/icons/src/public/files/Txt.tsx
  71. 1
    1
      web/app/components/base/icons/src/public/files/Unknown.tsx
  72. 1
    1
      web/app/components/base/icons/src/public/files/Xlsx.tsx
  73. 1
    1
      web/app/components/base/icons/src/public/files/Yaml.tsx
  74. 1
    1
      web/app/components/base/icons/src/public/knowledge/Chunk.tsx
  75. 1
    1
      web/app/components/base/icons/src/public/knowledge/Collapse.tsx
  76. 1
    1
      web/app/components/base/icons/src/public/knowledge/GeneralType.tsx
  77. 1
    1
      web/app/components/base/icons/src/public/knowledge/LayoutRight2LineMod.tsx
  78. 1
    1
      web/app/components/base/icons/src/public/knowledge/ParentChildType.tsx
  79. 1
    1
      web/app/components/base/icons/src/public/knowledge/SelectionMod.tsx
  80. 1
    1
      web/app/components/base/icons/src/public/llm/Anthropic.tsx
  81. 1
    1
      web/app/components/base/icons/src/public/llm/AnthropicDark.tsx
  82. 1
    1
      web/app/components/base/icons/src/public/llm/AnthropicLight.tsx
  83. 1
    1
      web/app/components/base/icons/src/public/llm/AnthropicText.tsx
  84. 1
    1
      web/app/components/base/icons/src/public/llm/AzureOpenaiService.tsx
  85. 1
    1
      web/app/components/base/icons/src/public/llm/AzureOpenaiServiceText.tsx
  86. 1
    1
      web/app/components/base/icons/src/public/llm/Azureai.tsx
  87. 1
    1
      web/app/components/base/icons/src/public/llm/AzureaiText.tsx
  88. 1
    1
      web/app/components/base/icons/src/public/llm/Baichuan.tsx
  89. 1
    1
      web/app/components/base/icons/src/public/llm/BaichuanText.tsx
  90. 1
    1
      web/app/components/base/icons/src/public/llm/Chatglm.tsx
  91. 1
    1
      web/app/components/base/icons/src/public/llm/ChatglmText.tsx
  92. 1
    1
      web/app/components/base/icons/src/public/llm/Cohere.tsx
  93. 1
    1
      web/app/components/base/icons/src/public/llm/CohereText.tsx
  94. 1
    1
      web/app/components/base/icons/src/public/llm/Gpt3.tsx
  95. 1
    1
      web/app/components/base/icons/src/public/llm/Gpt4.tsx
  96. 1
    1
      web/app/components/base/icons/src/public/llm/Huggingface.tsx
  97. 1
    1
      web/app/components/base/icons/src/public/llm/HuggingfaceText.tsx
  98. 1
    1
      web/app/components/base/icons/src/public/llm/HuggingfaceTextHub.tsx
  99. 1
    1
      web/app/components/base/icons/src/public/llm/IflytekSpark.tsx
  100. 0
    0
      web/app/components/base/icons/src/public/llm/IflytekSparkText.tsx

+ 4
- 0
.github/workflows/style.yml Ver arquivo

@@ -47,6 +47,10 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
run: dev/basedpyright-check

- name: Run Mypy Type Checks
if: steps.changed-files.outputs.any_changed == 'true'
run: uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .

- name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true'
run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example

+ 3
- 0
api/app_factory.py Ver arquivo

@@ -25,6 +25,9 @@ def create_flask_app_with_configs() -> DifyApp:
# add an unique identifier to each request
RecyclableContextVar.increment_thread_recycles()

# Capture the decorator's return value to avoid pyright reportUnusedFunction
_ = before_request

return dify_app



+ 4
- 4
api/controllers/service_api/dataset/segment.py Ver arquivo

@@ -440,7 +440,7 @@ class DatasetChildChunkApi(DatasetApiResource):
raise NotFound("Segment not found.")

# validate segment belongs to the specified document
if segment.document_id != document_id:
if str(segment.document_id) != str(document_id):
raise NotFound("Document not found.")

# check child chunk
@@ -451,7 +451,7 @@ class DatasetChildChunkApi(DatasetApiResource):
raise NotFound("Child chunk not found.")

# validate child chunk belongs to the specified segment
if child_chunk.segment_id != segment.id:
if str(child_chunk.segment_id) != str(segment.id):
raise NotFound("Child chunk not found.")

try:
@@ -500,7 +500,7 @@ class DatasetChildChunkApi(DatasetApiResource):
raise NotFound("Segment not found.")

# validate segment belongs to the specified document
if segment.document_id != document_id:
if str(segment.document_id) != str(document_id):
raise NotFound("Segment not found.")

# get child chunk
@@ -511,7 +511,7 @@ class DatasetChildChunkApi(DatasetApiResource):
raise NotFound("Child chunk not found.")

# validate child chunk belongs to the specified segment
if child_chunk.segment_id != segment.id:
if str(child_chunk.segment_id) != str(segment.id):
raise NotFound("Child chunk not found.")

# validate args

+ 11
- 8
api/core/indexing_runner.py Ver arquivo

@@ -270,7 +270,9 @@ class IndexingRunner:
tenant_id=tenant_id,
model_type=ModelType.TEXT_EMBEDDING,
)
preview_texts = [] # type: ignore
# keep separate, avoid union-list ambiguity
preview_texts: list[PreviewDetail] = []
qa_preview_texts: list[QAPreviewDetail] = []

total_segments = 0
index_type = doc_form
@@ -293,14 +295,14 @@ class IndexingRunner:
for document in documents:
if len(preview_texts) < 10:
if doc_form and doc_form == "qa_model":
preview_detail = QAPreviewDetail(
qa_detail = QAPreviewDetail(
question=document.page_content, answer=document.metadata.get("answer") or ""
)
preview_texts.append(preview_detail)
qa_preview_texts.append(qa_detail)
else:
preview_detail = PreviewDetail(content=document.page_content) # type: ignore
preview_detail = PreviewDetail(content=document.page_content)
if document.children:
preview_detail.child_chunks = [child.page_content for child in document.children] # type: ignore
preview_detail.child_chunks = [child.page_content for child in document.children]
preview_texts.append(preview_detail)

# delete image files and related db records
@@ -321,8 +323,8 @@ class IndexingRunner:
db.session.delete(image_file)

if doc_form and doc_form == "qa_model":
return IndexingEstimate(total_segments=total_segments * 20, qa_preview=preview_texts, preview=[])
return IndexingEstimate(total_segments=total_segments, preview=preview_texts) # type: ignore
return IndexingEstimate(total_segments=total_segments * 20, qa_preview=qa_preview_texts, preview=[])
return IndexingEstimate(total_segments=total_segments, preview=preview_texts)

def _extract(
self, index_processor: BaseIndexProcessor, dataset_document: DatasetDocument, process_rule: dict
@@ -424,6 +426,7 @@ class IndexingRunner:
"""
Get the NodeParser object according to the processing rule.
"""
character_splitter: TextSplitter
if processing_rule_mode in ["custom", "hierarchical"]:
# The user-defined segmentation rule
max_segmentation_tokens_length = dify_config.INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
@@ -450,7 +453,7 @@ class IndexingRunner:
embedding_model_instance=embedding_model_instance,
)

return character_splitter # type: ignore
return character_splitter

def _split_to_documents_for_estimate(
self, text_docs: list[Document], splitter: TextSplitter, processing_rule: DatasetProcessRule

+ 1
- 0
api/core/model_runtime/entities/llm_entities.py Ver arquivo

@@ -156,6 +156,7 @@ class LLMResult(BaseModel):
message: AssistantPromptMessage
usage: LLMUsage
system_fingerprint: Optional[str] = None
reasoning_content: Optional[str] = None


class LLMStructuredOutput(BaseModel):

+ 4
- 1
api/core/ops/weave_trace/weave_trace.py Ver arquivo

@@ -119,7 +119,7 @@ class WeaveDataTrace(BaseTraceInstance):
workflow_attributes["trace_id"] = trace_id
workflow_attributes["start_time"] = trace_info.start_time
workflow_attributes["end_time"] = trace_info.end_time
workflow_attributes["tags"] = ["workflow"]
workflow_attributes["tags"] = ["dify_workflow"]

workflow_run = WeaveTraceModel(
file_list=trace_info.file_list,
@@ -155,6 +155,9 @@ class WeaveDataTrace(BaseTraceInstance):
workflow_run_id=trace_info.workflow_run_id
)

# rearrange workflow_node_executions by starting time
workflow_node_executions = sorted(workflow_node_executions, key=lambda x: x.created_at)

for node_execution in workflow_node_executions:
node_execution_id = node_execution.id
tenant_id = trace_info.tenant_id # Use from trace_info instead

+ 24
- 3
api/core/plugin/entities/plugin.py Ver arquivo

@@ -3,7 +3,8 @@ import enum
from collections.abc import Mapping
from typing import Any, Optional

from pydantic import BaseModel, Field, model_validator
from packaging.version import InvalidVersion, Version
from pydantic import BaseModel, Field, field_validator, model_validator

from core.agent.plugin_entities import AgentStrategyProviderEntity
from core.model_runtime.entities.provider_entities import ProviderEntity
@@ -69,10 +70,21 @@ class PluginDeclaration(BaseModel):
endpoints: Optional[list[str]] = Field(default_factory=list[str])

class Meta(BaseModel):
minimum_dify_version: Optional[str] = Field(default=None, pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$")
minimum_dify_version: Optional[str] = Field(default=None)
version: Optional[str] = Field(default=None)

version: str = Field(..., pattern=r"^\d{1,4}(\.\d{1,4}){1,3}(-\w{1,16})?$")
@field_validator("minimum_dify_version")
@classmethod
def validate_minimum_dify_version(cls, v: Optional[str]) -> Optional[str]:
if v is None:
return v
try:
Version(v)
return v
except InvalidVersion as e:
raise ValueError(f"Invalid version format: {v}") from e

version: str = Field(...)
author: Optional[str] = Field(..., pattern=r"^[a-zA-Z0-9_-]{1,64}$")
name: str = Field(..., pattern=r"^[a-z0-9_-]{1,128}$")
description: I18nObject
@@ -92,6 +104,15 @@ class PluginDeclaration(BaseModel):
agent_strategy: Optional[AgentStrategyProviderEntity] = None
meta: Meta

@field_validator("version")
@classmethod
def validate_version(cls, v: str) -> str:
try:
Version(v)
return v
except InvalidVersion as e:
raise ValueError(f"Invalid version format: {v}") from e

@model_validator(mode="before")
@classmethod
def validate_category(cls, values: dict) -> dict:

+ 2
- 2
api/core/plugin/utils/chunk_merger.py Ver arquivo

@@ -1,6 +1,6 @@
from collections.abc import Generator
from dataclasses import dataclass, field
from typing import TypeVar, Union
from typing import TypeVar, Union, cast

from core.agent.entities import AgentInvokeMessage
from core.tools.entities.tool_entities import ToolInvokeMessage
@@ -85,7 +85,7 @@ def merge_blob_chunks(
message=ToolInvokeMessage.BlobMessage(blob=files[chunk_id].data[: files[chunk_id].bytes_written]),
meta=resp.meta,
)
yield merged_message
yield cast(MessageType, merged_message)
# Clean up the buffer
del files[chunk_id]
else:

+ 1
- 3
api/core/rag/extractor/firecrawl/firecrawl_app.py Ver arquivo

@@ -22,7 +22,6 @@ class FirecrawlApp:
"formats": ["markdown"],
"onlyMainContent": True,
"timeout": 30000,
"integration": "dify",
}
if params:
json_data.update(params)
@@ -40,7 +39,7 @@ class FirecrawlApp:
def crawl_url(self, url, params=None) -> str:
# Documentation: https://docs.firecrawl.dev/api-reference/endpoint/crawl-post
headers = self._prepare_headers()
json_data = {"url": url, "integration": "dify"}
json_data = {"url": url}
if params:
json_data.update(params)
response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers)
@@ -138,7 +137,6 @@ class FirecrawlApp:
"timeout": 60000,
"ignoreInvalidURLs": False,
"scrapeOptions": {},
"integration": "dify",
}
if params:
json_data.update(params)

+ 1
- 1
api/core/rag/index_processor/processor/parent_child_index_processor.py Ver arquivo

@@ -36,7 +36,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor):
if not process_rule.get("rules"):
raise ValueError("No rules found in process rule.")
rules = Rule(**process_rule.get("rules"))
all_documents = [] # type: ignore
all_documents: list[Document] = []
if rules.parent_mode == ParentMode.PARAGRAPH:
# Split the text documents into nodes.
if not rules.segmentation:

+ 1
- 1
api/core/tools/tool_manager.py Ver arquivo

@@ -646,7 +646,7 @@ class ToolManager:
include_set=dify_config.POSITION_TOOL_INCLUDES_SET,
exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET,
data=provider,
name_func=lambda x: x.identity.name,
name_func=lambda x: x.entity.identity.name,
):
continue
user_provider = ToolTransformService.builtin_provider_to_user_provider(

+ 1
- 0
api/core/workflow/node_events/node.py Ver arquivo

@@ -19,6 +19,7 @@ class ModelInvokeCompletedEvent(NodeEventBase):
text: str
usage: LLMUsage
finish_reason: str | None = None
reasoning_content: str | None = None


class RunRetryEvent(NodeEventBase):

+ 18
- 1
api/core/workflow/nodes/llm/entities.py Ver arquivo

@@ -1,5 +1,5 @@
from collections.abc import Mapping, Sequence
from typing import Any, Optional
from typing import Any, Literal, Optional

from pydantic import BaseModel, Field, field_validator

@@ -68,6 +68,23 @@ class LLMNodeData(BaseNodeData):
structured_output: Mapping[str, Any] | None = None
# We used 'structured_output_enabled' in the past, but it's not a good name.
structured_output_switch_on: bool = Field(False, alias="structured_output_enabled")
reasoning_format: Literal["separated", "tagged"] = Field(
# Keep tagged as default for backward compatibility
default="tagged",
description=(
"""
Strategy for handling model reasoning output.

separated: Return clean text (without <think> tags) + reasoning_content field.
Recommended for new workflows. Enables safe downstream parsing and
workflow variable access: {{#node_id.reasoning_content#}}

tagged : Return original text (with <think> tags) + reasoning_content field.
Maintains full backward compatibility while still providing reasoning_content
for workflow automation. Frontend thinking panels work as before.
"""
),
)

@field_validator("prompt_config", mode="before")
@classmethod

+ 98
- 4
api/core/workflow/nodes/llm/node.py Ver arquivo

@@ -2,8 +2,9 @@ import base64
import io
import json
import logging
import re
from collections.abc import Generator, Mapping, Sequence
from typing import TYPE_CHECKING, Any, Optional
from typing import TYPE_CHECKING, Any, Literal, Optional

from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
from core.file import FileType, file_manager
@@ -101,6 +102,9 @@ class LLMNode(Node):

_node_data: LLMNodeData

# Compiled regex for extracting <think> blocks (with compatibility for attributes)
_THINK_PATTERN = re.compile(r"<think[^>]*>(.*?)</think>", re.IGNORECASE | re.DOTALL)

# Instance attributes specific to LLMNode.
# Output variable for file
_file_outputs: list["File"]
@@ -163,6 +167,7 @@ class LLMNode(Node):
result_text = ""
usage = LLMUsage.empty_usage()
finish_reason = None
reasoning_content = None
variable_pool = self.graph_runtime_state.variable_pool

try:
@@ -250,6 +255,7 @@ class LLMNode(Node):
file_outputs=self._file_outputs,
node_id=self._node_id,
node_type=self.node_type,
reasoning_format=self._node_data.reasoning_format,
)

structured_output: LLMStructuredOutput | None = None
@@ -258,9 +264,20 @@ class LLMNode(Node):
if isinstance(event, StreamChunkEvent):
yield event
elif isinstance(event, ModelInvokeCompletedEvent):
# Raw text
result_text = event.text
usage = event.usage
finish_reason = event.finish_reason
reasoning_content = event.reasoning_content or ""

# For downstream nodes, determine clean text based on reasoning_format
if self._node_data.reasoning_format == "tagged":
# Keep <think> tags for backward compatibility
clean_text = result_text
else:
# Extract clean text from <think> tags
clean_text, _ = LLMNode._split_reasoning(result_text, self._node_data.reasoning_format)

# deduct quota
llm_utils.deduct_llm_quota(tenant_id=self.tenant_id, model_instance=model_instance, usage=usage)
break
@@ -278,7 +295,12 @@ class LLMNode(Node):
"model_name": model_config.model,
}

outputs = {"text": result_text, "usage": jsonable_encoder(usage), "finish_reason": finish_reason}
outputs = {
"text": clean_text,
"reasoning_content": reasoning_content,
"usage": jsonable_encoder(usage),
"finish_reason": finish_reason,
}
if structured_output:
outputs["structured_output"] = structured_output.structured_output
if self._file_outputs:
@@ -340,6 +362,7 @@ class LLMNode(Node):
file_outputs: list["File"],
node_id: str,
node_type: NodeType,
reasoning_format: Literal["separated", "tagged"] = "tagged",
) -> Generator[NodeEventBase | LLMStructuredOutput, None, None]:
model_schema = model_instance.model_type_instance.get_model_schema(
node_data_model.name, model_instance.credentials
@@ -377,6 +400,7 @@ class LLMNode(Node):
file_outputs=file_outputs,
node_id=node_id,
node_type=node_type,
reasoning_format=reasoning_format,
)

@staticmethod
@@ -387,6 +411,7 @@ class LLMNode(Node):
file_outputs: list["File"],
node_id: str,
node_type: NodeType,
reasoning_format: Literal["separated", "tagged"] = "tagged",
) -> Generator[NodeEventBase | LLMStructuredOutput, None, None]:
# For blocking mode
if isinstance(invoke_result, LLMResult):
@@ -394,6 +419,7 @@ class LLMNode(Node):
invoke_result=invoke_result,
saver=file_saver,
file_outputs=file_outputs,
reasoning_format=reasoning_format,
)
yield event
return
@@ -438,13 +464,66 @@ class LLMNode(Node):
except OutputParserError as e:
raise LLMNodeError(f"Failed to parse structured output: {e}")

yield ModelInvokeCompletedEvent(text=full_text_buffer.getvalue(), usage=usage, finish_reason=finish_reason)
# Extract reasoning content from <think> tags in the main text
full_text = full_text_buffer.getvalue()

if reasoning_format == "tagged":
# Keep <think> tags in text for backward compatibility
clean_text = full_text
reasoning_content = ""
else:
# Extract clean text and reasoning from <think> tags
clean_text, reasoning_content = LLMNode._split_reasoning(full_text, reasoning_format)

yield ModelInvokeCompletedEvent(
# Use clean_text for separated mode, full_text for tagged mode
text=clean_text if reasoning_format == "separated" else full_text,
usage=usage,
finish_reason=finish_reason,
# Reasoning content for workflow variables and downstream nodes
reasoning_content=reasoning_content,
)

@staticmethod
def _image_file_to_markdown(file: "File", /):
text_chunk = f"![]({file.generate_url()})"
return text_chunk

@classmethod
def _split_reasoning(
cls, text: str, reasoning_format: Literal["separated", "tagged"] = "tagged"
) -> tuple[str, str]:
"""
Split reasoning content from text based on reasoning_format strategy.

Args:
text: Full text that may contain <think> blocks
reasoning_format: Strategy for handling reasoning content
- "separated": Remove <think> tags and return clean text + reasoning_content field
- "tagged": Keep <think> tags in text, return empty reasoning_content

Returns:
tuple of (clean_text, reasoning_content)
"""

if reasoning_format == "tagged":
return text, ""

# Find all <think>...</think> blocks (case-insensitive)
matches = cls._THINK_PATTERN.findall(text)

# Extract reasoning content from all <think> blocks
reasoning_content = "\n".join(match.strip() for match in matches) if matches else ""

# Remove all <think>...</think> blocks from original text
clean_text = cls._THINK_PATTERN.sub("", text)

# Clean up extra whitespace
clean_text = re.sub(r"\n\s*\n", "\n\n", clean_text).strip()

# Separated mode: always return clean text and reasoning_content
return clean_text, reasoning_content or ""

def _transform_chat_messages(
self, messages: Sequence[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate, /
) -> Sequence[LLMNodeChatModelMessage] | LLMNodeCompletionModelPromptTemplate:
@@ -972,6 +1051,7 @@ class LLMNode(Node):
invoke_result: LLMResult,
saver: LLMFileSaver,
file_outputs: list["File"],
reasoning_format: Literal["separated", "tagged"] = "tagged",
) -> ModelInvokeCompletedEvent:
buffer = io.StringIO()
for text_part in LLMNode._save_multimodal_output_and_convert_result_to_markdown(
@@ -981,10 +1061,24 @@ class LLMNode(Node):
):
buffer.write(text_part)

# Extract reasoning content from <think> tags in the main text
full_text = buffer.getvalue()

if reasoning_format == "tagged":
# Keep <think> tags in text for backward compatibility
clean_text = full_text
reasoning_content = ""
else:
# Extract clean text and reasoning from <think> tags
clean_text, reasoning_content = LLMNode._split_reasoning(full_text, reasoning_format)

return ModelInvokeCompletedEvent(
text=buffer.getvalue(),
# Use clean_text for separated mode, full_text for tagged mode
text=clean_text if reasoning_format == "separated" else full_text,
usage=invoke_result.usage,
finish_reason=None,
# Reasoning content for workflow variables and downstream nodes
reasoning_content=reasoning_content,
)

@staticmethod

+ 0
- 25
api/mypy.ini Ver arquivo

@@ -1,25 +0,0 @@
[mypy]
warn_return_any = True
warn_unused_configs = True
check_untyped_defs = True
cache_fine_grained = True
sqlite_cache = True
exclude = (?x)(
tests/
| migrations/
)

[mypy-flask_login]
ignore_missing_imports=True

[mypy-flask_restx]
ignore_missing_imports=True

[mypy-flask_restx.api]
ignore_missing_imports=True

[mypy-flask_restx.inputs]
ignore_missing_imports=True

[mypy-google.cloud.storage]
ignore_missing_imports=True

+ 5
- 3
api/pyproject.toml Ver arquivo

@@ -84,10 +84,11 @@ dependencies = [
"weave~=0.51.0",
"yarl~=1.18.3",
"webvtt-py~=0.5.1",
"sseclient-py>=1.8.0",
"httpx-sse>=0.4.0",
"sseclient-py~=1.8.0",
"httpx-sse~=0.4.0",
"sendgrid~=6.12.3",
"flask-restx>=1.3.0",
"flask-restx~=1.3.0",
"packaging~=23.2",
]
# Before adding new dependency, consider place it in
# alphabet order (a-z) and suitable group.
@@ -167,6 +168,7 @@ dev = [
"import-linter>=2.3",
"types-redis>=4.6.0.20241004",
"celery-types>=0.23.0",
"mypy~=1.17.1",
]

############################################################

+ 23
- 42
api/pyrightconfig.json Ver arquivo

@@ -1,47 +1,28 @@
{
"include": ["."],
"exclude": ["tests/", "migrations/", ".venv/"],
"exclude": [
"tests/",
"migrations/",
".venv/",
"models/",
"core/",
"controllers/",
"tasks/",
"services/",
"schedule/",
"extensions/",
"utils/",
"repositories/",
"libs/",
"fields/",
"factories/",
"events/",
"contexts/",
"constants/",
"configs/",
"commands.py"
],
"typeCheckingMode": "strict",
"pythonVersion": "3.11",
"pythonPlatform": "All",
"reportMissingTypeStubs": false,
"reportOptionalMemberAccess": "none",
"reportOptionalIterable": "none",
"reportOptionalOperand": "none",
"reportOptionalSubscript": "none",
"reportTypedDictNotRequiredAccess": "none",
"reportPrivateImportUsage": "none",
"reportUnsupportedDunderAll": "none",
"reportUnnecessaryTypeIgnoreComment": "none",
"reportMatchNotExhaustive": "none",
"reportImplicitOverride": "none",
"reportCallInDefaultInitializer": "none",
"reportUnnecessaryIsInstance": "none",
"reportUnnecessaryComparison": "none",
"reportUnknownParameterType": "none",
"reportMissingParameterType": "none",
"reportUnknownArgumentType": "none",
"reportUnknownVariableType": "none",
"reportUnknownMemberType": "none",
"reportMissingTypeArgument": "none",
"reportUntypedFunctionDecorator": "none",
"reportUnknownLambdaType": "none",
"reportPrivateUsage": "none",
"reportConstantRedefinition": "none",
"reportIncompatibleMethodOverride": "none",
"reportIncompatibleVariableOverride": "none",
"reportOverlappingOverload": "none",
"reportPossiblyUnboundVariable": "none",
"reportUnusedImport": "none",
"reportUnusedFunction": "none",
"reportArgumentType": "none",
"reportAssignmentType": "none",
"reportAttributeAccessIssue": "none",
"reportCallIssue": "none",
"reportIndexIssue": "none",
"reportRedeclaration": "none",
"reportReturnType": "none",
"reportOperatorIssue": "none",
"reportTypeCommentUsage": "none",
"reportDeprecated": "none"
"pythonPlatform": "All"
}

+ 2
- 0
api/services/account_service.py Ver arquivo

@@ -214,6 +214,7 @@ class AccountService:
base64_password_hashed = base64.b64encode(password_hashed).decode()
account.password = base64_password_hashed
account.password_salt = base64_salt
db.session.add(account)
db.session.commit()
return account

@@ -351,6 +352,7 @@ class AccountService:
@staticmethod
def update_account(account, **kwargs):
"""Update account fields"""
account = db.session.merge(account)
for field, value in kwargs.items():
if hasattr(account, field):
setattr(account, field, value)

+ 1
- 1
api/services/dataset_service.py Ver arquivo

@@ -1198,7 +1198,7 @@ class DocumentService:
"Invalid process rule mode: %s, can not find dataset process rule",
process_rule.mode,
)
return
return [], ""
db.session.add(dataset_process_rule)
db.session.commit()
lock_name = f"add_document_lock_dataset_id_{dataset.id}"

+ 1
- 1
api/services/tools/builtin_tools_manage_service.py Ver arquivo

@@ -573,7 +573,7 @@ class BuiltinToolManageService:
include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore
exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore
data=provider_controller,
name_func=lambda x: x.identity.name,
name_func=lambda x: x.entity.identity.name,
):
continue


+ 16
- 5
api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py Ver arquivo

@@ -57,10 +57,12 @@ class TestWebAppAuthService:
tuple: (account, tenant) - Created account and tenant instances
"""
fake = Faker()
import uuid

# Create account
# Create account with unique email to avoid collisions
unique_email = f"test_{uuid.uuid4().hex[:8]}@example.com"
account = Account(
email=fake.email(),
email=unique_email,
name=fake.name(),
interface_language="en-US",
status="active",
@@ -109,8 +111,11 @@ class TestWebAppAuthService:
password = fake.password(length=12)

# Create account with password
import uuid

unique_email = f"test_{uuid.uuid4().hex[:8]}@example.com"
account = Account(
email=fake.email(),
email=unique_email,
name=fake.name(),
interface_language="en-US",
status="active",
@@ -322,9 +327,12 @@ class TestWebAppAuthService:
"""
# Arrange: Create account without password
fake = Faker()
import uuid

unique_email = f"test_{uuid.uuid4().hex[:8]}@example.com"

account = Account(
email=fake.email(),
email=unique_email,
name=fake.name(),
interface_language="en-US",
status="active",
@@ -431,9 +439,12 @@ class TestWebAppAuthService:
"""
# Arrange: Create banned account
fake = Faker()
import uuid

unique_email = f"test_{uuid.uuid4().hex[:8]}@example.com"

account = Account(
email=fake.email(),
email=unique_email,
name=fake.name(),
interface_language="en-US",
status=AccountStatus.BANNED.value,

+ 0
- 0
api/tests/test_containers_integration_tests/tasks/__init__.py Ver arquivo


+ 786
- 0
api/tests/test_containers_integration_tests/tasks/test_add_document_to_index_task.py Ver arquivo

@@ -0,0 +1,786 @@
from unittest.mock import MagicMock, patch

import pytest
from faker import Faker

from core.rag.index_processor.constant.index_type import IndexType
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, DatasetAutoDisableLog, Document, DocumentSegment
from tasks.add_document_to_index_task import add_document_to_index_task


class TestAddDocumentToIndexTask:
"""Integration tests for add_document_to_index_task using testcontainers."""

@pytest.fixture
def mock_external_service_dependencies(self):
"""Mock setup for external service dependencies."""
with (
patch("tasks.add_document_to_index_task.IndexProcessorFactory") as mock_index_processor_factory,
):
# Setup mock index processor
mock_processor = MagicMock()
mock_index_processor_factory.return_value.init_index_processor.return_value = mock_processor

yield {
"index_processor_factory": mock_index_processor_factory,
"index_processor": mock_processor,
}

def _create_test_dataset_and_document(self, db_session_with_containers, mock_external_service_dependencies):
"""
Helper method to create a test dataset and document for testing.

Args:
db_session_with_containers: Database session from testcontainers infrastructure
mock_external_service_dependencies: Mock dependencies

Returns:
tuple: (dataset, document) - Created dataset and document instances
"""
fake = Faker()

# Create account and tenant
account = Account(
email=fake.email(),
name=fake.name(),
interface_language="en-US",
status="active",
)
db.session.add(account)
db.session.commit()

tenant = Tenant(
name=fake.company(),
status="normal",
)
db.session.add(tenant)
db.session.commit()

# Create tenant-account join
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER.value,
current=True,
)
db.session.add(join)
db.session.commit()

# Create dataset
dataset = Dataset(
id=fake.uuid4(),
tenant_id=tenant.id,
name=fake.company(),
description=fake.text(max_nb_chars=100),
data_source_type="upload_file",
indexing_technique="high_quality",
created_by=account.id,
)
db.session.add(dataset)
db.session.commit()

# Create document
document = Document(
id=fake.uuid4(),
tenant_id=tenant.id,
dataset_id=dataset.id,
position=1,
data_source_type="upload_file",
batch="test_batch",
name=fake.file_name(),
created_from="upload_file",
created_by=account.id,
indexing_status="completed",
enabled=True,
doc_form=IndexType.PARAGRAPH_INDEX,
)
db.session.add(document)
db.session.commit()

# Refresh dataset to ensure doc_form property works correctly
db.session.refresh(dataset)

return dataset, document

def _create_test_segments(self, db_session_with_containers, document, dataset):
"""
Helper method to create test document segments.

Args:
db_session_with_containers: Database session from testcontainers infrastructure
document: Document instance
dataset: Dataset instance

Returns:
list: List of created DocumentSegment instances
"""
fake = Faker()
segments = []

for i in range(3):
segment = DocumentSegment(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=200),
word_count=len(fake.text(max_nb_chars=200).split()),
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
index_node_id=f"node_{i}",
index_node_hash=f"hash_{i}",
enabled=False,
status="completed",
created_by=document.created_by,
)
db.session.add(segment)
segments.append(segment)

db.session.commit()
return segments

def test_add_document_to_index_success(self, db_session_with_containers, mock_external_service_dependencies):
"""
Test successful document indexing with paragraph index type.

This test verifies:
- Proper document retrieval from database
- Correct segment processing and document creation
- Index processor integration
- Database state updates
- Segment status changes
- Redis cache key deletion
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)
segments = self._create_test_segments(db_session_with_containers, document, dataset)

# Set up Redis cache key to simulate indexing in progress
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300) # 5 minutes expiry

# Verify cache key exists
assert redis_client.exists(indexing_cache_key) == 1

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify the expected outcomes
# Verify index processor was called correctly
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX)
mock_external_service_dependencies["index_processor"].load.assert_called_once()

# Verify database state changes
db.session.refresh(document)
for segment in segments:
db.session.refresh(segment)
assert segment.enabled is True
assert segment.disabled_at is None
assert segment.disabled_by is None

# Verify Redis cache key was deleted
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_with_different_index_type(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test document indexing with different index types.

This test verifies:
- Proper handling of different index types
- Index processor factory integration
- Document processing with various configurations
- Redis cache key deletion
"""
# Arrange: Create test data with different index type
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)

# Update document to use different index type
document.doc_form = IndexType.QA_INDEX
db.session.commit()

# Refresh dataset to ensure doc_form property reflects the updated document
db.session.refresh(dataset)

# Create segments
segments = self._create_test_segments(db_session_with_containers, document, dataset)

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify different index type handling
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.QA_INDEX)
mock_external_service_dependencies["index_processor"].load.assert_called_once()

# Verify the load method was called with correct parameters
call_args = mock_external_service_dependencies["index_processor"].load.call_args
assert call_args is not None
documents = call_args[0][1] # Second argument should be documents list
assert len(documents) == 3

# Verify database state changes
db.session.refresh(document)
for segment in segments:
db.session.refresh(segment)
assert segment.enabled is True
assert segment.disabled_at is None
assert segment.disabled_by is None

# Verify Redis cache key was deleted
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_document_not_found(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test handling of non-existent document.

This test verifies:
- Proper error handling for missing documents
- Early return without processing
- Database session cleanup
- No unnecessary index processor calls
- Redis cache key not affected (since it was never created)
"""
# Arrange: Use non-existent document ID
fake = Faker()
non_existent_id = fake.uuid4()

# Act: Execute the task with non-existent document
add_document_to_index_task(non_existent_id)

# Assert: Verify no processing occurred
mock_external_service_dependencies["index_processor_factory"].assert_not_called()
mock_external_service_dependencies["index_processor"].load.assert_not_called()

# Note: redis_client.delete is not called when document is not found
# because indexing_cache_key is not defined in that case

def test_add_document_to_index_invalid_indexing_status(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test handling of document with invalid indexing status.

This test verifies:
- Early return when indexing_status is not "completed"
- No index processing for documents not ready for indexing
- Proper database session cleanup
- No unnecessary external service calls
- Redis cache key not affected
"""
# Arrange: Create test data with invalid indexing status
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)

# Set invalid indexing status
document.indexing_status = "processing"
db.session.commit()

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify no processing occurred
mock_external_service_dependencies["index_processor_factory"].assert_not_called()
mock_external_service_dependencies["index_processor"].load.assert_not_called()

def test_add_document_to_index_dataset_not_found(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test handling when document's dataset doesn't exist.

This test verifies:
- Proper error handling when dataset is missing
- Document status is set to error
- Document is disabled
- Error information is recorded
- Redis cache is cleared despite error
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Delete the dataset to simulate dataset not found scenario
db.session.delete(dataset)
db.session.commit()

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify error handling
db.session.refresh(document)
assert document.enabled is False
assert document.indexing_status == "error"
assert document.error is not None
assert "doesn't exist" in document.error
assert document.disabled_at is not None

# Verify no index processing occurred
mock_external_service_dependencies["index_processor_factory"].assert_not_called()
mock_external_service_dependencies["index_processor"].load.assert_not_called()

# Verify redis cache was cleared despite error
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_with_parent_child_structure(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test document indexing with parent-child structure.

This test verifies:
- Proper handling of PARENT_CHILD_INDEX type
- Child document creation from segments
- Correct document structure for parent-child indexing
- Index processor receives properly structured documents
- Redis cache key deletion
"""
# Arrange: Create test data with parent-child index type
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)

# Update document to use parent-child index type
document.doc_form = IndexType.PARENT_CHILD_INDEX
db.session.commit()

# Refresh dataset to ensure doc_form property reflects the updated document
db.session.refresh(dataset)

# Create segments with mock child chunks
segments = self._create_test_segments(db_session_with_containers, document, dataset)

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Mock the get_child_chunks method for each segment
with patch.object(DocumentSegment, "get_child_chunks") as mock_get_child_chunks:
# Setup mock to return child chunks for each segment
mock_child_chunks = []
for i in range(2): # Each segment has 2 child chunks
mock_child = MagicMock()
mock_child.content = f"child_content_{i}"
mock_child.index_node_id = f"child_node_{i}"
mock_child.index_node_hash = f"child_hash_{i}"
mock_child_chunks.append(mock_child)

mock_get_child_chunks.return_value = mock_child_chunks

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify parent-child index processing
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(
IndexType.PARENT_CHILD_INDEX
)
mock_external_service_dependencies["index_processor"].load.assert_called_once()

# Verify the load method was called with correct parameters
call_args = mock_external_service_dependencies["index_processor"].load.call_args
assert call_args is not None
documents = call_args[0][1] # Second argument should be documents list
assert len(documents) == 3 # 3 segments

# Verify each document has children
for doc in documents:
assert hasattr(doc, "children")
assert len(doc.children) == 2 # Each document has 2 children

# Verify database state changes
db.session.refresh(document)
for segment in segments:
db.session.refresh(segment)
assert segment.enabled is True
assert segment.disabled_at is None
assert segment.disabled_by is None

# Verify redis cache was cleared
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_with_no_segments_to_process(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test document indexing when no segments need processing.

This test verifies:
- Proper handling when all segments are already enabled
- Index processing still occurs but with empty documents list
- Auto disable log deletion still occurs
- Redis cache is cleared
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)

# Create segments that are already enabled
fake = Faker()
segments = []
for i in range(3):
segment = DocumentSegment(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=200),
word_count=len(fake.text(max_nb_chars=200).split()),
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
index_node_id=f"node_{i}",
index_node_hash=f"hash_{i}",
enabled=True, # Already enabled
status="completed",
created_by=document.created_by,
)
db.session.add(segment)
segments.append(segment)

db.session.commit()

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify index processing occurred but with empty documents list
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX)
mock_external_service_dependencies["index_processor"].load.assert_called_once()

# Verify the load method was called with empty documents list
call_args = mock_external_service_dependencies["index_processor"].load.call_args
assert call_args is not None
documents = call_args[0][1] # Second argument should be documents list
assert len(documents) == 0 # No segments to process

# Verify redis cache was cleared
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_auto_disable_log_deletion(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test that auto disable logs are properly deleted during indexing.

This test verifies:
- Auto disable log entries are deleted for the document
- Database state is properly managed
- Index processing continues normally
- Redis cache key deletion
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)
segments = self._create_test_segments(db_session_with_containers, document, dataset)

# Create some auto disable log entries
fake = Faker()
auto_disable_logs = []
for i in range(2):
log_entry = DatasetAutoDisableLog(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
)
db.session.add(log_entry)
auto_disable_logs.append(log_entry)

db.session.commit()

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Verify logs exist before processing
existing_logs = (
db.session.query(DatasetAutoDisableLog).where(DatasetAutoDisableLog.document_id == document.id).all()
)
assert len(existing_logs) == 2

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify auto disable logs were deleted
remaining_logs = (
db.session.query(DatasetAutoDisableLog).where(DatasetAutoDisableLog.document_id == document.id).all()
)
assert len(remaining_logs) == 0

# Verify index processing occurred normally
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX)
mock_external_service_dependencies["index_processor"].load.assert_called_once()

# Verify segments were enabled
for segment in segments:
db.session.refresh(segment)
assert segment.enabled is True

# Verify redis cache was cleared
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_general_exception_handling(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test general exception handling during indexing process.

This test verifies:
- Exceptions are properly caught and handled
- Document status is set to error
- Document is disabled
- Error information is recorded
- Redis cache is still cleared
- Database session is properly closed
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)
segments = self._create_test_segments(db_session_with_containers, document, dataset)

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Mock the index processor to raise an exception
mock_external_service_dependencies["index_processor"].load.side_effect = Exception("Index processing failed")

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify error handling
db.session.refresh(document)
assert document.enabled is False
assert document.indexing_status == "error"
assert document.error is not None
assert "Index processing failed" in document.error
assert document.disabled_at is not None

# Verify segments were not enabled due to error
for segment in segments:
db.session.refresh(segment)
assert segment.enabled is False # Should remain disabled due to error

# Verify redis cache was still cleared despite error
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_segment_filtering_edge_cases(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test segment filtering with various edge cases.

This test verifies:
- Only segments with enabled=False and status="completed" are processed
- Segments are ordered by position correctly
- Mixed segment states are handled properly
- Redis cache key deletion
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)

# Create segments with mixed states
fake = Faker()
segments = []

# Segment 1: Should be processed (enabled=False, status="completed")
segment1 = DocumentSegment(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
position=0,
content=fake.text(max_nb_chars=200),
word_count=len(fake.text(max_nb_chars=200).split()),
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
index_node_id="node_0",
index_node_hash="hash_0",
enabled=False,
status="completed",
created_by=document.created_by,
)
db.session.add(segment1)
segments.append(segment1)

# Segment 2: Should NOT be processed (enabled=True, status="completed")
segment2 = DocumentSegment(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
position=1,
content=fake.text(max_nb_chars=200),
word_count=len(fake.text(max_nb_chars=200).split()),
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
index_node_id="node_1",
index_node_hash="hash_1",
enabled=True, # Already enabled
status="completed",
created_by=document.created_by,
)
db.session.add(segment2)
segments.append(segment2)

# Segment 3: Should NOT be processed (enabled=False, status="processing")
segment3 = DocumentSegment(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
position=2,
content=fake.text(max_nb_chars=200),
word_count=len(fake.text(max_nb_chars=200).split()),
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
index_node_id="node_2",
index_node_hash="hash_2",
enabled=False,
status="processing", # Not completed
created_by=document.created_by,
)
db.session.add(segment3)
segments.append(segment3)

# Segment 4: Should be processed (enabled=False, status="completed")
segment4 = DocumentSegment(
id=fake.uuid4(),
tenant_id=document.tenant_id,
dataset_id=dataset.id,
document_id=document.id,
position=3,
content=fake.text(max_nb_chars=200),
word_count=len(fake.text(max_nb_chars=200).split()),
tokens=len(fake.text(max_nb_chars=200).split()) * 2,
index_node_id="node_3",
index_node_hash="hash_3",
enabled=False,
status="completed",
created_by=document.created_by,
)
db.session.add(segment4)
segments.append(segment4)

db.session.commit()

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify only eligible segments were processed
mock_external_service_dependencies["index_processor_factory"].assert_called_once_with(IndexType.PARAGRAPH_INDEX)
mock_external_service_dependencies["index_processor"].load.assert_called_once()

# Verify the load method was called with correct parameters
call_args = mock_external_service_dependencies["index_processor"].load.call_args
assert call_args is not None
documents = call_args[0][1] # Second argument should be documents list
assert len(documents) == 2 # Only 2 segments should be processed

# Verify correct segments were processed (by position order)
assert documents[0].metadata["doc_id"] == "node_0" # position 0
assert documents[1].metadata["doc_id"] == "node_3" # position 3

# Verify database state changes
db.session.refresh(document)
db.session.refresh(segment1)
db.session.refresh(segment2)
db.session.refresh(segment3)
db.session.refresh(segment4)

# All segments should be enabled because the task updates ALL segments for the document
assert segment1.enabled is True
assert segment2.enabled is True # Was already enabled, now updated to True
assert segment3.enabled is True # Was not processed but still updated to True
assert segment4.enabled is True

# Verify redis cache was cleared
assert redis_client.exists(indexing_cache_key) == 0

def test_add_document_to_index_comprehensive_error_scenarios(
self, db_session_with_containers, mock_external_service_dependencies
):
"""
Test comprehensive error scenarios and recovery.

This test verifies:
- Multiple types of exceptions are handled properly
- Error state is consistently managed
- Resource cleanup occurs in all error cases
- Database session management is robust
- Redis cache key deletion in all scenarios
"""
# Arrange: Create test data
dataset, document = self._create_test_dataset_and_document(
db_session_with_containers, mock_external_service_dependencies
)
segments = self._create_test_segments(db_session_with_containers, document, dataset)

# Test different exception types
test_exceptions = [
("Database connection error", Exception("Database connection failed")),
("Index processor error", RuntimeError("Index processor initialization failed")),
("Memory error", MemoryError("Out of memory")),
("Value error", ValueError("Invalid index type")),
]

for error_name, exception in test_exceptions:
# Reset mocks for each test
mock_external_service_dependencies["index_processor"].load.side_effect = exception

# Reset document state
document.enabled = True
document.indexing_status = "completed"
document.error = None
document.disabled_at = None
db.session.commit()

# Set up Redis cache key
indexing_cache_key = f"document_{document.id}_indexing"
redis_client.set(indexing_cache_key, "processing", ex=300)

# Act: Execute the task
add_document_to_index_task(document.id)

# Assert: Verify consistent error handling
db.session.refresh(document)
assert document.enabled is False, f"Document should be disabled for {error_name}"
assert document.indexing_status == "error", f"Document status should be error for {error_name}"
assert document.error is not None, f"Error should be recorded for {error_name}"
assert str(exception) in document.error, f"Error message should contain exception for {error_name}"
assert document.disabled_at is not None, f"Disabled timestamp should be set for {error_name}"

# Verify segments remain disabled due to error
for segment in segments:
db.session.refresh(segment)
assert segment.enabled is False, f"Segments should remain disabled for {error_name}"

# Verify redis cache was still cleared despite error
assert redis_client.exists(indexing_cache_key) == 0, f"Redis cache should be cleared for {error_name}"

+ 64
- 0
api/tests/unit_tests/core/workflow/nodes/llm/test_node.py Ver arquivo

@@ -66,6 +66,7 @@ def llm_node_data() -> LLMNodeData:
detail=ImagePromptMessageContent.DETAIL.HIGH,
),
),
reasoning_format="tagged",
)


@@ -676,3 +677,66 @@ class TestSaveMultimodalOutputAndConvertResultToMarkdown:
assert list(gen) == []
mock_file_saver.save_binary_string.assert_not_called()
mock_file_saver.save_remote_url.assert_not_called()


class TestReasoningFormat:
"""Test cases for reasoning_format functionality"""

def test_split_reasoning_separated_mode(self):
"""Test separated mode: tags are removed and content is extracted"""

text_with_think = """
<think>I need to explain what Dify is. It's an open source AI platform.
</think>Dify is an open source AI platform.
"""

clean_text, reasoning_content = LLMNode._split_reasoning(text_with_think, "separated")

assert clean_text == "Dify is an open source AI platform."
assert reasoning_content == "I need to explain what Dify is. It's an open source AI platform."

def test_split_reasoning_tagged_mode(self):
"""Test tagged mode: original text is preserved"""

text_with_think = """
<think>I need to explain what Dify is. It's an open source AI platform.
</think>Dify is an open source AI platform.
"""

clean_text, reasoning_content = LLMNode._split_reasoning(text_with_think, "tagged")

# Original text unchanged
assert clean_text == text_with_think
# Empty reasoning content in tagged mode
assert reasoning_content == ""

def test_split_reasoning_no_think_blocks(self):
"""Test behavior when no <think> tags are present"""

text_without_think = "This is a simple answer without any thinking blocks."

clean_text, reasoning_content = LLMNode._split_reasoning(text_without_think, "separated")

assert clean_text == text_without_think
assert reasoning_content == ""

def test_reasoning_format_default_value(self):
"""Test that reasoning_format defaults to 'tagged' for backward compatibility"""

node_data = LLMNodeData(
title="Test LLM",
model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode="chat", completion_params={}),
prompt_template=[],
context=ContextConfig(enabled=False),
)

assert node_data.reasoning_format == "tagged"

text_with_think = """
<think>I need to explain what Dify is. It's an open source AI platform.
</think>Dify is an open source AI platform.
"""
clean_text, reasoning_content = LLMNode._split_reasoning(text_with_think, node_data.reasoning_format)

assert clean_text == text_with_think
assert reasoning_content == ""

+ 42
- 3
api/uv.lock Ver arquivo

@@ -1318,6 +1318,7 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
{ name = "opik" },
{ name = "packaging" },
{ name = "pandas", extra = ["excel", "output-formatting", "performance"] },
{ name = "pandoc" },
{ name = "psycogreen" },
@@ -1358,6 +1359,7 @@ dev = [
{ name = "hypothesis" },
{ name = "import-linter" },
{ name = "lxml-stubs" },
{ name = "mypy" },
{ name = "pandas-stubs" },
{ name = "pytest" },
{ name = "pytest-benchmark" },
@@ -1469,7 +1471,7 @@ requires-dist = [
{ name = "flask-login", specifier = "~=0.6.3" },
{ name = "flask-migrate", specifier = "~=4.0.7" },
{ name = "flask-orjson", specifier = "~=2.0.0" },
{ name = "flask-restx", specifier = ">=1.3.0" },
{ name = "flask-restx", specifier = "~=1.3.0" },
{ name = "flask-sqlalchemy", specifier = "~=3.1.1" },
{ name = "gevent", specifier = "~=24.11.1" },
{ name = "gmpy2", specifier = "~=2.2.1" },
@@ -1481,7 +1483,7 @@ requires-dist = [
{ name = "googleapis-common-protos", specifier = "==1.63.0" },
{ name = "gunicorn", specifier = "~=23.0.0" },
{ name = "httpx", extras = ["socks"], specifier = "~=0.27.0" },
{ name = "httpx-sse", specifier = ">=0.4.0" },
{ name = "httpx-sse", specifier = "~=0.4.0" },
{ name = "jieba", specifier = "==0.42.1" },
{ name = "json-repair", specifier = ">=0.41.1" },
{ name = "langfuse", specifier = "~=2.51.3" },
@@ -1509,6 +1511,7 @@ requires-dist = [
{ name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" },
{ name = "opentelemetry-util-http", specifier = "==0.48b0" },
{ name = "opik", specifier = "~=1.7.25" },
{ name = "packaging", specifier = "~=23.2" },
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" },
{ name = "pandoc", specifier = "~=2.4" },
{ name = "psycogreen", specifier = "~=1.0.2" },
@@ -1528,7 +1531,7 @@ requires-dist = [
{ name = "sendgrid", specifier = "~=6.12.3" },
{ name = "sentry-sdk", extras = ["flask"], specifier = "~=2.28.0" },
{ name = "sqlalchemy", specifier = "~=2.0.29" },
{ name = "sseclient-py", specifier = ">=1.8.0" },
{ name = "sseclient-py", specifier = "~=1.8.0" },
{ name = "starlette", specifier = "==0.47.2" },
{ name = "tiktoken", specifier = "~=0.9.0" },
{ name = "transformers", specifier = "~=4.53.0" },
@@ -1549,6 +1552,7 @@ dev = [
{ name = "hypothesis", specifier = ">=6.131.15" },
{ name = "import-linter", specifier = ">=2.3" },
{ name = "lxml-stubs", specifier = "~=0.5.1" },
{ name = "mypy", specifier = "~=1.17.1" },
{ name = "pandas-stubs", specifier = "~=2.2.3" },
{ name = "pytest", specifier = "~=8.3.2" },
{ name = "pytest-benchmark", specifier = "~=4.0.0" },
@@ -3353,6 +3357,32 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" },
]

[[package]]
name = "mypy"
version = "1.17.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mypy-extensions" },
{ name = "pathspec" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" },
{ url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" },
{ url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" },
{ url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" },
{ url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" },
{ url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" },
{ url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" },
{ url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" },
{ url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" },
{ url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" },
{ url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" },
{ url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" },
{ url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" },
]

[[package]]
name = "mypy-boto3-bedrock-runtime"
version = "1.39.0"
@@ -4104,6 +4134,15 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/10/9a/e3186e760c57ee5f1c27ea5cea577a0ff9abfca51eefcb4d9a4cd39aff2e/pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a", size = 34635, upload-time = "2024-08-07T14:33:58.016Z" }

[[package]]
name = "pathspec"
version = "0.12.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
]

[[package]]
name = "pgvecto-rs"
version = "0.2.2"

+ 0
- 10
dev/mypy-check Ver arquivo

@@ -1,10 +0,0 @@
#!/bin/bash

set -x

SCRIPT_DIR="$(dirname "$(realpath "$0")")"
cd "$SCRIPT_DIR/.."

# run mypy checks
uv run --directory api --dev --with pip \
python -m mypy --install-types --non-interactive --exclude venv --show-error-context --show-column-numbers ./

+ 25
- 14
sdks/python-client/dify_client/client.py Ver arquivo

@@ -73,12 +73,12 @@ class CompletionClient(DifyClient):
class ChatClient(DifyClient):
def create_chat_message(
self,
inputs,
query,
user,
response_mode="blocking",
conversation_id=None,
files=None,
inputs: dict,
query: str,
user: str,
response_mode: str = "blocking",
conversation_id: str | None = None,
files: dict | None = None,
):
data = {
"inputs": inputs,
@@ -97,22 +97,33 @@ class ChatClient(DifyClient):
stream=True if response_mode == "streaming" else False,
)

def get_suggested(self, message_id, user: str):
def get_suggested(self, message_id: str, user: str):
params = {"user": user}
return self._send_request(
"GET", f"/messages/{message_id}/suggested", params=params
)

def stop_message(self, task_id, user):
def stop_message(self, task_id: str, user: str):
data = {"user": user}
return self._send_request("POST", f"/chat-messages/{task_id}/stop", data)

def get_conversations(self, user, last_id=None, limit=None, pinned=None):
params = {"user": user, "last_id": last_id, "limit": limit, "pinned": pinned}
def get_conversations(
self,
user: str,
last_id: str | None = None,
limit: int | None = None,
pinned: bool | None = None
):
params = {"user": user, "last_id": last_id,
"limit": limit, "pinned": pinned}
return self._send_request("GET", "/conversations", params=params)

def get_conversation_messages(
self, user, conversation_id=None, first_id=None, limit=None
self,
user: str,
conversation_id: str | None = None,
first_id: str | None = None,
limit: int | None = None
):
params = {"user": user}

@@ -126,18 +137,18 @@ class ChatClient(DifyClient):
return self._send_request("GET", "/messages", params=params)

def rename_conversation(
self, conversation_id, name, auto_generate: bool, user: str
self, conversation_id: str, name: str, auto_generate: bool, user: str
):
data = {"name": name, "auto_generate": auto_generate, "user": user}
return self._send_request(
"POST", f"/conversations/{conversation_id}/name", data
)

def delete_conversation(self, conversation_id, user):
def delete_conversation(self, conversation_id: str, user: str):
data = {"user": user}
return self._send_request("DELETE", f"/conversations/{conversation_id}", data)

def audio_to_text(self, audio_file, user):
def audio_to_text(self, audio_file: dict, user: str):
data = {"user": user}
files = {"audio_file": audio_file}
return self._send_request_with_files("POST", "/audio-to-text", data, files)

+ 1
- 1
web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx Ver arquivo

@@ -27,7 +27,7 @@ const I18N_PREFIX = 'app.tracing'
const Panel: FC = () => {
const { t } = useTranslation()
const pathname = usePathname()
const matched = pathname.match(/\/app\/([^/]+)/)
const matched = /\/app\/([^/]+)/.exec(pathname)
const appId = (matched?.length && matched[1]) ? matched[1] : ''
const { isCurrentWorkspaceEditor } = useAppContext()
const readOnly = !isCurrentWorkspaceEditor

+ 1
- 1
web/app/components/app/type-selector/index.tsx Ver arquivo

@@ -103,7 +103,7 @@ export const AppTypeIcon = React.memo(({ type, className, wrapperClassName, styl
return null
})

function AppTypeSelectTrigger({ values }: { values: AppSelectorProps['value'] }) {
function AppTypeSelectTrigger({ values }: { readonly values: AppSelectorProps['value'] }) {
const { t } = useTranslation()
if (!values || values.length === 0) {
return <div className={cn(

+ 1
- 1
web/app/components/apps/app-card.tsx Ver arquivo

@@ -257,7 +257,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
}
return (
<div className="relative flex w-full flex-col py-1" onMouseLeave={onMouseLeave}>
<button className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickSettings}>
<button type="button" className='mx-1 flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 hover:bg-state-base-hover' onClick={onClickSettings}>
<span className='system-sm-regular text-text-secondary'>{t('app.editApp')}</span>
</button>
<Divider className="my-1" />

+ 1
- 1
web/app/components/base/icons/IconBase.tsx Ver arquivo

@@ -18,7 +18,7 @@ const IconBase = (
ref,
...props
}: IconBaseProps & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => {
const { data, className, onClick, style, ...restProps } = props

+ 1
- 1
web/app/components/base/icons/script.mjs Ver arquivo

@@ -66,7 +66,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/avatar/Robot.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/avatar/User.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/ArCube1.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Asterisk.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/AwsMarketplace.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Azure.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Buildings.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Diamond.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/GoogleCloud.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Group2.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Keyframe.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/Sparkles.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/billing/SparklesSoft.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/D.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/DiagonalDividingLine.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Dify.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Gdpr.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Github.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Highlight.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Iso.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Line3.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Lock.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/MessageChatSquare.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/MultiPathRetrieval.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/NTo1Retrieval.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Notion.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/Soc2.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/SparklesSoft.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/common/SparklesSoftAccent.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/education/Triangle.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Csv.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Doc.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Docx.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Html.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Json.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Md.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Pdf.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Txt.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Unknown.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Xlsx.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/files/Yaml.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/knowledge/Chunk.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/knowledge/Collapse.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/knowledge/GeneralType.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/knowledge/LayoutRight2LineMod.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/knowledge/ParentChildType.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/knowledge/SelectionMod.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Anthropic.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/AnthropicDark.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/AnthropicLight.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/AnthropicText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/AzureOpenaiService.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/AzureOpenaiServiceText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Azureai.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/AzureaiText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Baichuan.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/BaichuanText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Chatglm.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/ChatglmText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Cohere.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/CohereText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Gpt3.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Gpt4.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/Huggingface.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/HuggingfaceText.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/HuggingfaceTextHub.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 1
- 1
web/app/components/base/icons/src/public/llm/IflytekSpark.tsx Ver arquivo

@@ -11,7 +11,7 @@ const Icon = (
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.MutableRefObject<HTMLOrSVGElement>>;
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>;
},
) => <IconBase {...props} ref={ref} data={data as IconData} />


+ 0
- 0
web/app/components/base/icons/src/public/llm/IflytekSparkText.tsx Ver arquivo


Alguns arquivos não foram mostrados porque muitos arquivos mudaram nesse diff

Carregando…
Cancelar
Salvar