| @@ -0,0 +1,27 @@ | |||
| name: "✨ Refactor" | |||
| description: Refactor existing code for improved readability and maintainability. | |||
| title: "[Chore/Refactor] " | |||
| labels: | |||
| - refactor | |||
| body: | |||
| - type: textarea | |||
| id: description | |||
| attributes: | |||
| label: Description | |||
| placeholder: "Describe the refactor you are proposing." | |||
| validations: | |||
| required: true | |||
| - type: textarea | |||
| id: motivation | |||
| attributes: | |||
| label: Motivation | |||
| placeholder: "Explain why this refactor is necessary." | |||
| validations: | |||
| required: false | |||
| - type: textarea | |||
| id: additional-context | |||
| attributes: | |||
| label: Additional Context | |||
| placeholder: "Add any other context or screenshots about the request here." | |||
| validations: | |||
| required: false | |||
| @@ -7,6 +7,7 @@ on: | |||
| - "deploy/dev" | |||
| - "deploy/enterprise" | |||
| - "build/**" | |||
| - "release/e-*" | |||
| - "deploy/rag-dev" | |||
| tags: | |||
| - "*" | |||
| @@ -225,14 +225,15 @@ class AnnotationBatchImportApi(Resource): | |||
| raise Forbidden() | |||
| app_id = str(app_id) | |||
| # get file from request | |||
| file = request.files["file"] | |||
| # check file | |||
| if "file" not in request.files: | |||
| raise NoFileUploadedError() | |||
| if len(request.files) > 1: | |||
| raise TooManyFilesError() | |||
| # get file from request | |||
| file = request.files["file"] | |||
| # check file type | |||
| if not file.filename or not file.filename.lower().endswith(".csv"): | |||
| raise ValueError("Invalid file type. Only CSV files are allowed") | |||
| @@ -58,21 +58,38 @@ class InstalledAppsListApi(Resource): | |||
| # filter out apps that user doesn't have access to | |||
| if FeatureService.get_system_features().webapp_auth.enabled: | |||
| user_id = current_user.id | |||
| res = [] | |||
| app_ids = [installed_app["app"].id for installed_app in installed_app_list] | |||
| webapp_settings = EnterpriseService.WebAppAuth.batch_get_app_access_mode_by_id(app_ids) | |||
| # Pre-filter out apps without setting or with sso_verified | |||
| filtered_installed_apps = [] | |||
| app_id_to_app_code = {} | |||
| for installed_app in installed_app_list: | |||
| webapp_setting = webapp_settings.get(installed_app["app"].id) | |||
| if not webapp_setting: | |||
| app_id = installed_app["app"].id | |||
| webapp_setting = webapp_settings.get(app_id) | |||
| if not webapp_setting or webapp_setting.access_mode == "sso_verified": | |||
| continue | |||
| if webapp_setting.access_mode == "sso_verified": | |||
| continue | |||
| app_code = AppService.get_app_code_by_id(str(installed_app["app"].id)) | |||
| if EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp( | |||
| user_id=user_id, | |||
| app_code=app_code, | |||
| ): | |||
| app_code = AppService.get_app_code_by_id(str(app_id)) | |||
| app_id_to_app_code[app_id] = app_code | |||
| filtered_installed_apps.append(installed_app) | |||
| app_codes = list(app_id_to_app_code.values()) | |||
| # Batch permission check | |||
| permissions = EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps( | |||
| user_id=user_id, | |||
| app_codes=app_codes, | |||
| ) | |||
| # Keep only allowed apps | |||
| res = [] | |||
| for installed_app in filtered_installed_apps: | |||
| app_id = installed_app["app"].id | |||
| app_code = app_id_to_app_code[app_id] | |||
| if permissions.get(app_code): | |||
| res.append(installed_app) | |||
| installed_app_list = res | |||
| logger.debug("installed_app_list: %s, user_id: %s", installed_app_list, user_id) | |||
| @@ -208,6 +208,7 @@ class BasePluginClient: | |||
| except Exception: | |||
| raise PluginDaemonInnerError(code=rep.code, message=rep.message) | |||
| logger.error("Error in stream reponse for plugin %s", rep.__dict__) | |||
| self._handle_plugin_daemon_error(error.error_type, error.message) | |||
| raise ValueError(f"plugin daemon: {rep.message}, code: {rep.code}") | |||
| if rep.data is None: | |||
| @@ -2,6 +2,8 @@ from collections.abc import Mapping | |||
| from pydantic import TypeAdapter | |||
| from extensions.ext_logging import get_request_id | |||
| class PluginDaemonError(Exception): | |||
| """Base class for all plugin daemon errors.""" | |||
| @@ -11,7 +13,7 @@ class PluginDaemonError(Exception): | |||
| def __str__(self) -> str: | |||
| # returns the class name and description | |||
| return f"{self.__class__.__name__}: {self.description}" | |||
| return f"req_id: {get_request_id()} {self.__class__.__name__}: {self.description}" | |||
| class PluginDaemonInternalError(PluginDaemonError): | |||
| @@ -5,14 +5,13 @@ from __future__ import annotations | |||
| from typing import Any, Optional | |||
| from core.model_manager import ModelInstance | |||
| from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenzier import GPT2Tokenizer | |||
| from core.model_runtime.model_providers.__base.tokenizers.gpt2_tokenizer import GPT2Tokenizer | |||
| from core.rag.splitter.text_splitter import ( | |||
| TS, | |||
| Collection, | |||
| Literal, | |||
| RecursiveCharacterTextSplitter, | |||
| Set, | |||
| TokenTextSplitter, | |||
| Union, | |||
| ) | |||
| @@ -45,14 +44,6 @@ class EnhanceRecursiveCharacterTextSplitter(RecursiveCharacterTextSplitter): | |||
| return [len(text) for text in texts] | |||
| if issubclass(cls, TokenTextSplitter): | |||
| extra_kwargs = { | |||
| "model_name": embedding_model_instance.model if embedding_model_instance else "gpt2", | |||
| "allowed_special": allowed_special, | |||
| "disallowed_special": disallowed_special, | |||
| } | |||
| kwargs = {**kwargs, **extra_kwargs} | |||
| return cls(length_function=_character_encoder, **kwargs) | |||
| @@ -20,9 +20,6 @@ class Tool(ABC): | |||
| The base class of a tool | |||
| """ | |||
| entity: ToolEntity | |||
| runtime: ToolRuntime | |||
| def __init__(self, entity: ToolEntity, runtime: ToolRuntime) -> None: | |||
| self.entity = entity | |||
| self.runtime = runtime | |||
| @@ -20,8 +20,6 @@ class BuiltinTool(Tool): | |||
| :param meta: the meta data of a tool call processing | |||
| """ | |||
| provider: str | |||
| def __init__(self, provider: str, **kwargs): | |||
| super().__init__(**kwargs) | |||
| self.provider = provider | |||
| @@ -21,9 +21,6 @@ API_TOOL_DEFAULT_TIMEOUT = ( | |||
| class ApiTool(Tool): | |||
| api_bundle: ApiToolBundle | |||
| provider_id: str | |||
| """ | |||
| Api tool | |||
| """ | |||
| @@ -8,23 +8,16 @@ from core.mcp.mcp_client import MCPClient | |||
| from core.mcp.types import ImageContent, TextContent | |||
| from core.tools.__base.tool import Tool | |||
| from core.tools.__base.tool_runtime import ToolRuntime | |||
| from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType | |||
| from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolProviderType | |||
| class MCPTool(Tool): | |||
| tenant_id: str | |||
| icon: str | |||
| runtime_parameters: Optional[list[ToolParameter]] | |||
| server_url: str | |||
| provider_id: str | |||
| def __init__( | |||
| self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, icon: str, server_url: str, provider_id: str | |||
| ) -> None: | |||
| super().__init__(entity, runtime) | |||
| self.tenant_id = tenant_id | |||
| self.icon = icon | |||
| self.runtime_parameters = None | |||
| self.server_url = server_url | |||
| self.provider_id = provider_id | |||
| @@ -9,11 +9,6 @@ from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, Too | |||
| class PluginTool(Tool): | |||
| tenant_id: str | |||
| icon: str | |||
| plugin_unique_identifier: str | |||
| runtime_parameters: Optional[list[ToolParameter]] | |||
| def __init__( | |||
| self, entity: ToolEntity, runtime: ToolRuntime, tenant_id: str, icon: str, plugin_unique_identifier: str | |||
| ) -> None: | |||
| @@ -21,7 +16,7 @@ class PluginTool(Tool): | |||
| self.tenant_id = tenant_id | |||
| self.icon = icon | |||
| self.plugin_unique_identifier = plugin_unique_identifier | |||
| self.runtime_parameters = None | |||
| self.runtime_parameters: Optional[list[ToolParameter]] = None | |||
| def tool_provider_type(self) -> ToolProviderType: | |||
| return ToolProviderType.PLUGIN | |||
| @@ -20,8 +20,6 @@ from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import Datas | |||
| class DatasetRetrieverTool(Tool): | |||
| retrieval_tool: DatasetRetrieverBaseTool | |||
| def __init__(self, entity: ToolEntity, runtime: ToolRuntime, retrieval_tool: DatasetRetrieverBaseTool) -> None: | |||
| super().__init__(entity, runtime) | |||
| self.retrieval_tool = retrieval_tool | |||
| @@ -25,15 +25,6 @@ logger = logging.getLogger(__name__) | |||
| class WorkflowTool(Tool): | |||
| workflow_app_id: str | |||
| version: str | |||
| workflow_entities: dict[str, Any] | |||
| workflow_call_depth: int | |||
| thread_pool_id: Optional[str] = None | |||
| workflow_as_tool_id: str | |||
| label: str | |||
| """ | |||
| Workflow tool. | |||
| """ | |||
| @@ -136,6 +136,8 @@ def init_app(app: DifyApp): | |||
| from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter as HTTPSpanExporter | |||
| from opentelemetry.instrumentation.celery import CeleryInstrumentor | |||
| from opentelemetry.instrumentation.flask import FlaskInstrumentor | |||
| from opentelemetry.instrumentation.redis import RedisInstrumentor | |||
| from opentelemetry.instrumentation.requests import RequestsInstrumentor | |||
| from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor | |||
| from opentelemetry.metrics import get_meter, get_meter_provider, set_meter_provider | |||
| from opentelemetry.propagate import set_global_textmap | |||
| @@ -234,6 +236,8 @@ def init_app(app: DifyApp): | |||
| CeleryInstrumentor(tracer_provider=get_tracer_provider(), meter_provider=get_meter_provider()).instrument() | |||
| instrument_exception_logging() | |||
| init_sqlalchemy_instrumentor(app) | |||
| RedisInstrumentor().instrument() | |||
| RequestsInstrumentor().instrument() | |||
| atexit.register(shutdown_tracer) | |||
| @@ -895,6 +895,19 @@ class WorkflowAppLog(Base): | |||
| created_by_role = CreatorUserRole(self.created_by_role) | |||
| return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None | |||
| def to_dict(self): | |||
| return { | |||
| "id": self.id, | |||
| "tenant_id": self.tenant_id, | |||
| "app_id": self.app_id, | |||
| "workflow_id": self.workflow_id, | |||
| "workflow_run_id": self.workflow_run_id, | |||
| "created_from": self.created_from, | |||
| "created_by_role": self.created_by_role, | |||
| "created_by": self.created_by, | |||
| "created_at": self.created_at, | |||
| } | |||
| class ConversationVariable(Base): | |||
| __tablename__ = "workflow_conversation_variables" | |||
| @@ -49,6 +49,8 @@ dependencies = [ | |||
| "opentelemetry-instrumentation==0.48b0", | |||
| "opentelemetry-instrumentation-celery==0.48b0", | |||
| "opentelemetry-instrumentation-flask==0.48b0", | |||
| "opentelemetry-instrumentation-redis==0.48b0", | |||
| "opentelemetry-instrumentation-requests==0.48b0", | |||
| "opentelemetry-instrumentation-sqlalchemy==0.48b0", | |||
| "opentelemetry-propagator-b3==1.27.0", | |||
| # opentelemetry-proto1.28.0 depends on protobuf (>=5.0,<6.0), | |||
| @@ -13,7 +13,19 @@ from core.model_runtime.utils.encoders import jsonable_encoder | |||
| from extensions.ext_database import db | |||
| from extensions.ext_storage import storage | |||
| from models.account import Tenant | |||
| from models.model import App, Conversation, Message | |||
| from models.model import ( | |||
| App, | |||
| AppAnnotationHitHistory, | |||
| Conversation, | |||
| Message, | |||
| MessageAgentThought, | |||
| MessageAnnotation, | |||
| MessageChain, | |||
| MessageFeedback, | |||
| MessageFile, | |||
| ) | |||
| from models.web import SavedMessage | |||
| from models.workflow import WorkflowAppLog | |||
| from repositories.factory import DifyAPIRepositoryFactory | |||
| from services.billing_service import BillingService | |||
| @@ -21,6 +33,85 @@ logger = logging.getLogger(__name__) | |||
| class ClearFreePlanTenantExpiredLogs: | |||
| @classmethod | |||
| def _clear_message_related_tables(cls, session: Session, tenant_id: str, batch_message_ids: list[str]) -> None: | |||
| """ | |||
| Clean up message-related tables to avoid data redundancy. | |||
| This method cleans up tables that have foreign key relationships with Message. | |||
| Args: | |||
| session: Database session, the same with the one in process_tenant method | |||
| tenant_id: Tenant ID for logging purposes | |||
| batch_message_ids: List of message IDs to clean up | |||
| """ | |||
| if not batch_message_ids: | |||
| return | |||
| # Clean up each related table | |||
| related_tables = [ | |||
| (MessageFeedback, "message_feedbacks"), | |||
| (MessageFile, "message_files"), | |||
| (MessageAnnotation, "message_annotations"), | |||
| (MessageChain, "message_chains"), | |||
| (MessageAgentThought, "message_agent_thoughts"), | |||
| (AppAnnotationHitHistory, "app_annotation_hit_histories"), | |||
| (SavedMessage, "saved_messages"), | |||
| ] | |||
| for model, table_name in related_tables: | |||
| # Query records related to expired messages | |||
| records = ( | |||
| session.query(model) | |||
| .filter( | |||
| model.message_id.in_(batch_message_ids), # type: ignore | |||
| ) | |||
| .all() | |||
| ) | |||
| if len(records) == 0: | |||
| continue | |||
| # Save records before deletion | |||
| record_ids = [record.id for record in records] | |||
| try: | |||
| record_data = [] | |||
| for record in records: | |||
| try: | |||
| if hasattr(record, "to_dict"): | |||
| record_data.append(record.to_dict()) | |||
| else: | |||
| # if record doesn't have to_dict method, we need to transform it to dict manually | |||
| record_dict = {} | |||
| for column in record.__table__.columns: | |||
| record_dict[column.name] = getattr(record, column.name) | |||
| record_data.append(record_dict) | |||
| except Exception: | |||
| logger.exception("Failed to transform %s record: %s", table_name, record.id) | |||
| continue | |||
| if record_data: | |||
| storage.save( | |||
| f"free_plan_tenant_expired_logs/" | |||
| f"{tenant_id}/{table_name}/{datetime.datetime.now().strftime('%Y-%m-%d')}" | |||
| f"-{time.time()}.json", | |||
| json.dumps( | |||
| jsonable_encoder(record_data), | |||
| ).encode("utf-8"), | |||
| ) | |||
| except Exception: | |||
| logger.exception("Failed to save %s records", table_name) | |||
| session.query(model).filter( | |||
| model.id.in_(record_ids), # type: ignore | |||
| ).delete(synchronize_session=False) | |||
| click.echo( | |||
| click.style( | |||
| f"[{datetime.datetime.now()}] Processed {len(record_ids)} " | |||
| f"{table_name} records for tenant {tenant_id}" | |||
| ) | |||
| ) | |||
| @classmethod | |||
| def process_tenant(cls, flask_app: Flask, tenant_id: str, days: int, batch: int): | |||
| with flask_app.app_context(): | |||
| @@ -58,6 +149,7 @@ class ClearFreePlanTenantExpiredLogs: | |||
| Message.id.in_(message_ids), | |||
| ).delete(synchronize_session=False) | |||
| cls._clear_message_related_tables(session, tenant_id, message_ids) | |||
| session.commit() | |||
| click.echo( | |||
| @@ -199,6 +291,48 @@ class ClearFreePlanTenantExpiredLogs: | |||
| if len(workflow_runs) < batch: | |||
| break | |||
| while True: | |||
| with Session(db.engine).no_autoflush as session: | |||
| workflow_app_logs = ( | |||
| session.query(WorkflowAppLog) | |||
| .filter( | |||
| WorkflowAppLog.tenant_id == tenant_id, | |||
| WorkflowAppLog.created_at < datetime.datetime.now() - datetime.timedelta(days=days), | |||
| ) | |||
| .limit(batch) | |||
| .all() | |||
| ) | |||
| if len(workflow_app_logs) == 0: | |||
| break | |||
| # save workflow app logs | |||
| storage.save( | |||
| f"free_plan_tenant_expired_logs/" | |||
| f"{tenant_id}/workflow_app_logs/{datetime.datetime.now().strftime('%Y-%m-%d')}" | |||
| f"-{time.time()}.json", | |||
| json.dumps( | |||
| jsonable_encoder( | |||
| [workflow_app_log.to_dict() for workflow_app_log in workflow_app_logs], | |||
| ), | |||
| ).encode("utf-8"), | |||
| ) | |||
| workflow_app_log_ids = [workflow_app_log.id for workflow_app_log in workflow_app_logs] | |||
| # delete workflow app logs | |||
| session.query(WorkflowAppLog).filter( | |||
| WorkflowAppLog.id.in_(workflow_app_log_ids), | |||
| ).delete(synchronize_session=False) | |||
| session.commit() | |||
| click.echo( | |||
| click.style( | |||
| f"[{datetime.datetime.now()}] Processed {len(workflow_app_log_ids)}" | |||
| f" workflow app logs for tenant {tenant_id}" | |||
| ) | |||
| ) | |||
| @classmethod | |||
| def process(cls, days: int, batch: int, tenant_ids: list[str]): | |||
| """ | |||
| @@ -52,6 +52,16 @@ class EnterpriseService: | |||
| return data.get("result", False) | |||
| @classmethod | |||
| def batch_is_user_allowed_to_access_webapps(cls, user_id: str, app_codes: list[str]): | |||
| if not app_codes: | |||
| return {} | |||
| body = {"userId": user_id, "appCodes": app_codes} | |||
| data = EnterpriseRequest.send_request("POST", "/webapp/permission/batch", json=body) | |||
| if not data: | |||
| raise ValueError("No data found.") | |||
| return data.get("permissions", {}) | |||
| @classmethod | |||
| def get_app_access_mode_by_id(cls, app_id: str) -> WebAppSettings: | |||
| if not app_id: | |||
| @@ -0,0 +1,739 @@ | |||
| import pytest | |||
| from faker import Faker | |||
| from core.variables.segments import StringSegment | |||
| from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID | |||
| from models import App, Workflow | |||
| from models.enums import DraftVariableType | |||
| from models.workflow import WorkflowDraftVariable | |||
| from services.workflow_draft_variable_service import ( | |||
| UpdateNotSupportedError, | |||
| WorkflowDraftVariableService, | |||
| ) | |||
| class TestWorkflowDraftVariableService: | |||
| """ | |||
| Comprehensive integration tests for WorkflowDraftVariableService using testcontainers. | |||
| This test class covers all major functionality of the WorkflowDraftVariableService: | |||
| - CRUD operations for workflow draft variables (Create, Read, Update, Delete) | |||
| - Variable listing and filtering by type (conversation, system, node) | |||
| - Variable updates and resets with proper validation | |||
| - Variable deletion operations at different scopes | |||
| - Special functionality like prefill and conversation ID retrieval | |||
| - Error handling for various edge cases and invalid operations | |||
| All tests use the testcontainers infrastructure to ensure proper database isolation | |||
| and realistic testing environment with actual database interactions. | |||
| """ | |||
| @pytest.fixture | |||
| def mock_external_service_dependencies(self): | |||
| """ | |||
| Mock setup for external service dependencies. | |||
| WorkflowDraftVariableService doesn't have external dependencies that need mocking, | |||
| so this fixture returns an empty dictionary to maintain consistency with other test classes. | |||
| This ensures the test structure remains consistent across different service test files. | |||
| """ | |||
| # WorkflowDraftVariableService doesn't have external dependencies that need mocking | |||
| return {} | |||
| def _create_test_app(self, db_session_with_containers, mock_external_service_dependencies, fake=None): | |||
| """ | |||
| Helper method to create a test app with realistic data for testing. | |||
| This method creates a complete App instance with all required fields populated | |||
| using Faker for generating realistic test data. The app is configured for | |||
| workflow mode to support workflow draft variable testing. | |||
| Args: | |||
| db_session_with_containers: Database session from testcontainers infrastructure | |||
| mock_external_service_dependencies: Mock dependencies (unused in this service) | |||
| fake: Faker instance for generating test data, creates new instance if not provided | |||
| Returns: | |||
| App: Created test app instance with all required fields populated | |||
| """ | |||
| fake = fake or Faker() | |||
| app = App() | |||
| app.id = fake.uuid4() | |||
| app.tenant_id = fake.uuid4() | |||
| app.name = fake.company() | |||
| app.description = fake.text() | |||
| app.mode = "workflow" | |||
| app.icon_type = "emoji" | |||
| app.icon = "🤖" | |||
| app.icon_background = "#FFEAD5" | |||
| app.enable_site = True | |||
| app.enable_api = True | |||
| app.created_by = fake.uuid4() | |||
| app.updated_by = app.created_by | |||
| from extensions.ext_database import db | |||
| db.session.add(app) | |||
| db.session.commit() | |||
| return app | |||
| def _create_test_workflow(self, db_session_with_containers, app, fake=None): | |||
| """ | |||
| Helper method to create a test workflow associated with an app. | |||
| This method creates a Workflow instance using the proper factory method | |||
| to ensure all required fields are set correctly. The workflow is configured | |||
| as a draft version with basic graph structure for testing workflow variables. | |||
| Args: | |||
| db_session_with_containers: Database session from testcontainers infrastructure | |||
| app: The app to associate the workflow with | |||
| fake: Faker instance for generating test data, creates new instance if not provided | |||
| Returns: | |||
| Workflow: Created test workflow instance with proper configuration | |||
| """ | |||
| fake = fake or Faker() | |||
| workflow = Workflow.new( | |||
| tenant_id=app.tenant_id, | |||
| app_id=app.id, | |||
| type="workflow", | |||
| version="draft", | |||
| graph='{"nodes": [], "edges": []}', | |||
| features="{}", | |||
| created_by=app.created_by, | |||
| environment_variables=[], | |||
| conversation_variables=[], | |||
| ) | |||
| from extensions.ext_database import db | |||
| db.session.add(workflow) | |||
| db.session.commit() | |||
| return workflow | |||
| def _create_test_variable( | |||
| self, db_session_with_containers, app_id, node_id, name, value, variable_type="conversation", fake=None | |||
| ): | |||
| """ | |||
| Helper method to create a test workflow draft variable with proper configuration. | |||
| This method creates different types of variables (conversation, system, node) using | |||
| the appropriate factory methods to ensure proper initialization. Each variable type | |||
| has specific requirements and this method handles the creation logic for all types. | |||
| Args: | |||
| db_session_with_containers: Database session from testcontainers infrastructure | |||
| app_id: ID of the app to associate the variable with | |||
| node_id: ID of the node (or special constants like CONVERSATION_VARIABLE_NODE_ID) | |||
| name: Name of the variable for identification | |||
| value: StringSegment value for the variable content | |||
| variable_type: Type of variable ("conversation", "system", "node") determining creation method | |||
| fake: Faker instance for generating test data, creates new instance if not provided | |||
| Returns: | |||
| WorkflowDraftVariable: Created test variable instance with proper type configuration | |||
| """ | |||
| fake = fake or Faker() | |||
| if variable_type == "conversation": | |||
| # Create conversation variable using the appropriate factory method | |||
| variable = WorkflowDraftVariable.new_conversation_variable( | |||
| app_id=app_id, | |||
| name=name, | |||
| value=value, | |||
| description=fake.text(max_nb_chars=20), | |||
| ) | |||
| elif variable_type == "system": | |||
| # Create system variable with editable flag and execution context | |||
| variable = WorkflowDraftVariable.new_sys_variable( | |||
| app_id=app_id, | |||
| name=name, | |||
| value=value, | |||
| node_execution_id=fake.uuid4(), | |||
| editable=True, | |||
| ) | |||
| else: # node variable | |||
| # Create node variable with visibility and editability settings | |||
| variable = WorkflowDraftVariable.new_node_variable( | |||
| app_id=app_id, | |||
| node_id=node_id, | |||
| name=name, | |||
| value=value, | |||
| node_execution_id=fake.uuid4(), | |||
| visible=True, | |||
| editable=True, | |||
| ) | |||
| from extensions.ext_database import db | |||
| db.session.add(variable) | |||
| db.session.commit() | |||
| return variable | |||
| def test_get_variable_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test getting a single variable by ID successfully. | |||
| This test verifies that the service can retrieve a specific variable | |||
| by its ID and that the returned variable contains the correct data. | |||
| It ensures the basic CRUD read operation works correctly for workflow draft variables. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| test_value = StringSegment(value=fake.word()) | |||
| variable = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "test_var", test_value, fake=fake | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_variable = service.get_variable(variable.id) | |||
| assert retrieved_variable is not None | |||
| assert retrieved_variable.id == variable.id | |||
| assert retrieved_variable.name == "test_var" | |||
| assert retrieved_variable.app_id == app.id | |||
| assert retrieved_variable.get_value().value == test_value.value | |||
| def test_get_variable_not_found(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test getting a variable that doesn't exist. | |||
| This test verifies that the service returns None when trying to | |||
| retrieve a variable with a non-existent ID. This ensures proper | |||
| handling of missing data scenarios. | |||
| """ | |||
| fake = Faker() | |||
| non_existent_id = fake.uuid4() | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_variable = service.get_variable(non_existent_id) | |||
| assert retrieved_variable is None | |||
| def test_get_draft_variables_by_selectors_success( | |||
| self, db_session_with_containers, mock_external_service_dependencies | |||
| ): | |||
| """ | |||
| Test getting variables by selectors successfully. | |||
| This test verifies that the service can retrieve multiple variables | |||
| using selector pairs (node_id, variable_name) and returns the correct | |||
| variables for each selector. This is useful for bulk variable retrieval | |||
| operations in workflow execution contexts. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| var1_value = StringSegment(value=fake.word()) | |||
| var2_value = StringSegment(value=fake.word()) | |||
| var3_value = StringSegment(value=fake.word()) | |||
| var1 = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "var1", var1_value, fake=fake | |||
| ) | |||
| var2 = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "var2", var2_value, fake=fake | |||
| ) | |||
| var3 = self._create_test_variable( | |||
| db_session_with_containers, app.id, "test_node_1", "var3", var3_value, "node", fake=fake | |||
| ) | |||
| selectors = [ | |||
| [CONVERSATION_VARIABLE_NODE_ID, "var1"], | |||
| [CONVERSATION_VARIABLE_NODE_ID, "var2"], | |||
| ["test_node_1", "var3"], | |||
| ] | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_variables = service.get_draft_variables_by_selectors(app.id, selectors) | |||
| assert len(retrieved_variables) == 3 | |||
| var_names = [var.name for var in retrieved_variables] | |||
| assert "var1" in var_names | |||
| assert "var2" in var_names | |||
| assert "var3" in var_names | |||
| for var in retrieved_variables: | |||
| if var.name == "var1": | |||
| assert var.get_value().value == var1_value.value | |||
| elif var.name == "var2": | |||
| assert var.get_value().value == var2_value.value | |||
| elif var.name == "var3": | |||
| assert var.get_value().value == var3_value.value | |||
| def test_list_variables_without_values_success( | |||
| self, db_session_with_containers, mock_external_service_dependencies | |||
| ): | |||
| """ | |||
| Test listing variables without values successfully with pagination. | |||
| This test verifies that the service can list variables with pagination | |||
| and that the returned variables don't include their values (for performance). | |||
| This is important for scenarios where only variable metadata is needed | |||
| without loading the actual content. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| for i in range(5): | |||
| test_value = StringSegment(value=fake.numerify("value##")) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, fake.word(), test_value, fake=fake | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| result = service.list_variables_without_values(app.id, page=1, limit=3) | |||
| assert result.total == 5 | |||
| assert len(result.variables) == 3 | |||
| assert result.variables[0].created_at >= result.variables[1].created_at | |||
| assert result.variables[1].created_at >= result.variables[2].created_at | |||
| for var in result.variables: | |||
| assert var.name is not None | |||
| assert var.app_id == app.id | |||
| def test_list_node_variables_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test listing variables for a specific node successfully. | |||
| This test verifies that the service can filter and return only | |||
| variables associated with a specific node ID. This is crucial for | |||
| workflow execution where variables need to be scoped to specific nodes. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| node_id = fake.word() | |||
| var1_value = StringSegment(value=fake.word()) | |||
| var2_value = StringSegment(value=fake.word()) | |||
| var3_value = StringSegment(value=fake.word()) | |||
| self._create_test_variable(db_session_with_containers, app.id, node_id, "var1", var1_value, "node", fake=fake) | |||
| self._create_test_variable(db_session_with_containers, app.id, node_id, "var2", var3_value, "node", fake=fake) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, "other_node", "var3", var2_value, "node", fake=fake | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| result = service.list_node_variables(app.id, node_id) | |||
| assert len(result.variables) == 2 | |||
| for var in result.variables: | |||
| assert var.node_id == node_id | |||
| assert var.app_id == app.id | |||
| var_names = [var.name for var in result.variables] | |||
| assert "var1" in var_names | |||
| assert "var2" in var_names | |||
| assert "var3" not in var_names | |||
| def test_list_conversation_variables_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test listing conversation variables successfully. | |||
| This test verifies that the service can filter and return only | |||
| conversation variables, excluding system and node variables. | |||
| Conversation variables are user-facing variables that can be | |||
| modified during conversation flows. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| conv_var1_value = StringSegment(value=fake.word()) | |||
| conv_var2_value = StringSegment(value=fake.word()) | |||
| conv_var1 = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "conv_var1", conv_var1_value, fake=fake | |||
| ) | |||
| conv_var2 = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "conv_var2", conv_var2_value, fake=fake | |||
| ) | |||
| sys_var_value = StringSegment(value=fake.word()) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, SYSTEM_VARIABLE_NODE_ID, "sys_var", sys_var_value, "system", fake=fake | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| result = service.list_conversation_variables(app.id) | |||
| assert len(result.variables) == 2 | |||
| for var in result.variables: | |||
| assert var.node_id == CONVERSATION_VARIABLE_NODE_ID | |||
| assert var.app_id == app.id | |||
| assert var.get_variable_type() == DraftVariableType.CONVERSATION | |||
| var_names = [var.name for var in result.variables] | |||
| assert "conv_var1" in var_names | |||
| assert "conv_var2" in var_names | |||
| assert "sys_var" not in var_names | |||
| def test_update_variable_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test updating a variable's name and value successfully. | |||
| This test verifies that the service can update both the name and value | |||
| of an editable variable and that the changes are persisted correctly. | |||
| It also checks that the last_edited_at timestamp is updated appropriately. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| original_value = StringSegment(value=fake.word()) | |||
| new_value = StringSegment(value=fake.word()) | |||
| variable = self._create_test_variable( | |||
| db_session_with_containers, | |||
| app.id, | |||
| CONVERSATION_VARIABLE_NODE_ID, | |||
| "original_name", | |||
| original_value, | |||
| fake=fake, | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| updated_variable = service.update_variable(variable, name="new_name", value=new_value) | |||
| assert updated_variable.name == "new_name" | |||
| assert updated_variable.get_value().value == new_value.value | |||
| assert updated_variable.last_edited_at is not None | |||
| from extensions.ext_database import db | |||
| db.session.refresh(variable) | |||
| assert variable.name == "new_name" | |||
| assert variable.get_value().value == new_value.value | |||
| assert variable.last_edited_at is not None | |||
| def test_update_variable_not_editable(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test that updating a non-editable variable raises an exception. | |||
| This test verifies that the service properly prevents updates to | |||
| variables that are not marked as editable. This is important for | |||
| maintaining data integrity and preventing unauthorized modifications | |||
| to system-controlled variables. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| original_value = StringSegment(value=fake.word()) | |||
| new_value = StringSegment(value=fake.word()) | |||
| variable = WorkflowDraftVariable.new_sys_variable( | |||
| app_id=app.id, | |||
| name=fake.word(), # This is typically not editable | |||
| value=original_value, | |||
| node_execution_id=fake.uuid4(), | |||
| editable=False, # Set as non-editable | |||
| ) | |||
| from extensions.ext_database import db | |||
| db.session.add(variable) | |||
| db.session.commit() | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| with pytest.raises(UpdateNotSupportedError) as exc_info: | |||
| service.update_variable(variable, name="new_name", value=new_value) | |||
| assert "variable not support updating" in str(exc_info.value) | |||
| assert variable.id in str(exc_info.value) | |||
| def test_reset_conversation_variable_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test resetting conversation variable successfully. | |||
| This test verifies that the service can reset a conversation variable | |||
| to its default value and clear the last_edited_at timestamp. | |||
| This functionality is useful for reverting user modifications | |||
| back to the original workflow configuration. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| workflow = self._create_test_workflow(db_session_with_containers, app, fake=fake) | |||
| from core.variables.variables import StringVariable | |||
| conv_var = StringVariable( | |||
| id=fake.uuid4(), | |||
| name="test_conv_var", | |||
| value="default_value", | |||
| selector=[CONVERSATION_VARIABLE_NODE_ID, "test_conv_var"], | |||
| ) | |||
| workflow.conversation_variables = [conv_var] | |||
| from extensions.ext_database import db | |||
| db.session.commit() | |||
| modified_value = StringSegment(value=fake.word()) | |||
| variable = self._create_test_variable( | |||
| db_session_with_containers, | |||
| app.id, | |||
| CONVERSATION_VARIABLE_NODE_ID, | |||
| "test_conv_var", | |||
| modified_value, | |||
| fake=fake, | |||
| ) | |||
| variable.last_edited_at = fake.date_time() | |||
| db.session.commit() | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| reset_variable = service.reset_variable(workflow, variable) | |||
| assert reset_variable is not None | |||
| assert reset_variable.get_value().value == "default_value" | |||
| assert reset_variable.last_edited_at is None | |||
| db.session.refresh(variable) | |||
| assert variable.get_value().value == "default_value" | |||
| assert variable.last_edited_at is None | |||
| def test_delete_variable_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test deleting a single variable successfully. | |||
| This test verifies that the service can delete a specific variable | |||
| and that it's properly removed from the database. It ensures that | |||
| the deletion operation is atomic and complete. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| test_value = StringSegment(value=fake.word()) | |||
| variable = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "test_var", test_value, fake=fake | |||
| ) | |||
| from extensions.ext_database import db | |||
| assert db.session.query(WorkflowDraftVariable).filter_by(id=variable.id).first() is not None | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| service.delete_variable(variable) | |||
| assert db.session.query(WorkflowDraftVariable).filter_by(id=variable.id).first() is None | |||
| def test_delete_workflow_variables_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test deleting all variables for a workflow successfully. | |||
| This test verifies that the service can delete all variables | |||
| associated with a specific app/workflow. This is useful for | |||
| cleanup operations when workflows are deleted or reset. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| for i in range(3): | |||
| test_value = StringSegment(value=fake.numerify("value##")) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, fake.word(), test_value, fake=fake | |||
| ) | |||
| other_app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| other_value = StringSegment(value=fake.word()) | |||
| self._create_test_variable( | |||
| db_session_with_containers, other_app.id, CONVERSATION_VARIABLE_NODE_ID, fake.word(), other_value, fake=fake | |||
| ) | |||
| from extensions.ext_database import db | |||
| app_variables = db.session.query(WorkflowDraftVariable).filter_by(app_id=app.id).all() | |||
| other_app_variables = db.session.query(WorkflowDraftVariable).filter_by(app_id=other_app.id).all() | |||
| assert len(app_variables) == 3 | |||
| assert len(other_app_variables) == 1 | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| service.delete_workflow_variables(app.id) | |||
| app_variables_after = db.session.query(WorkflowDraftVariable).filter_by(app_id=app.id).all() | |||
| other_app_variables_after = db.session.query(WorkflowDraftVariable).filter_by(app_id=other_app.id).all() | |||
| assert len(app_variables_after) == 0 | |||
| assert len(other_app_variables_after) == 1 | |||
| def test_delete_node_variables_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test deleting all variables for a specific node successfully. | |||
| This test verifies that the service can delete all variables | |||
| associated with a specific node while preserving variables | |||
| for other nodes and conversation variables. This is important | |||
| for node-specific cleanup operations in workflow management. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| node_id = fake.word() | |||
| for i in range(2): | |||
| test_value = StringSegment(value=fake.numerify("node_value##")) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, node_id, fake.word(), test_value, "node", fake=fake | |||
| ) | |||
| other_node_value = StringSegment(value=fake.word()) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, "other_node", fake.word(), other_node_value, "node", fake=fake | |||
| ) | |||
| conv_value = StringSegment(value=fake.word()) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, fake.word(), conv_value, fake=fake | |||
| ) | |||
| from extensions.ext_database import db | |||
| target_node_variables = db.session.query(WorkflowDraftVariable).filter_by(app_id=app.id, node_id=node_id).all() | |||
| other_node_variables = ( | |||
| db.session.query(WorkflowDraftVariable).filter_by(app_id=app.id, node_id="other_node").all() | |||
| ) | |||
| conv_variables = ( | |||
| db.session.query(WorkflowDraftVariable) | |||
| .filter_by(app_id=app.id, node_id=CONVERSATION_VARIABLE_NODE_ID) | |||
| .all() | |||
| ) | |||
| assert len(target_node_variables) == 2 | |||
| assert len(other_node_variables) == 1 | |||
| assert len(conv_variables) == 1 | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| service.delete_node_variables(app.id, node_id) | |||
| target_node_variables_after = ( | |||
| db.session.query(WorkflowDraftVariable).filter_by(app_id=app.id, node_id=node_id).all() | |||
| ) | |||
| other_node_variables_after = ( | |||
| db.session.query(WorkflowDraftVariable).filter_by(app_id=app.id, node_id="other_node").all() | |||
| ) | |||
| conv_variables_after = ( | |||
| db.session.query(WorkflowDraftVariable) | |||
| .filter_by(app_id=app.id, node_id=CONVERSATION_VARIABLE_NODE_ID) | |||
| .all() | |||
| ) | |||
| assert len(target_node_variables_after) == 0 | |||
| assert len(other_node_variables_after) == 1 | |||
| assert len(conv_variables_after) == 1 | |||
| def test_prefill_conversation_variable_default_values_success( | |||
| self, db_session_with_containers, mock_external_service_dependencies | |||
| ): | |||
| """ | |||
| Test prefill conversation variable default values successfully. | |||
| This test verifies that the service can automatically create | |||
| conversation variables with default values based on the workflow | |||
| configuration when none exist. This is important for initializing | |||
| workflow variables with proper defaults from the workflow definition. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| workflow = self._create_test_workflow(db_session_with_containers, app, fake=fake) | |||
| from core.variables.variables import StringVariable | |||
| conv_var1 = StringVariable( | |||
| id=fake.uuid4(), | |||
| name="conv_var1", | |||
| value="default_value1", | |||
| selector=[CONVERSATION_VARIABLE_NODE_ID, "conv_var1"], | |||
| ) | |||
| conv_var2 = StringVariable( | |||
| id=fake.uuid4(), | |||
| name="conv_var2", | |||
| value="default_value2", | |||
| selector=[CONVERSATION_VARIABLE_NODE_ID, "conv_var2"], | |||
| ) | |||
| workflow.conversation_variables = [conv_var1, conv_var2] | |||
| from extensions.ext_database import db | |||
| db.session.commit() | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| service.prefill_conversation_variable_default_values(workflow) | |||
| draft_variables = ( | |||
| db.session.query(WorkflowDraftVariable) | |||
| .filter_by(app_id=app.id, node_id=CONVERSATION_VARIABLE_NODE_ID) | |||
| .all() | |||
| ) | |||
| assert len(draft_variables) == 2 | |||
| var_names = [var.name for var in draft_variables] | |||
| assert "conv_var1" in var_names | |||
| assert "conv_var2" in var_names | |||
| for var in draft_variables: | |||
| assert var.app_id == app.id | |||
| assert var.node_id == CONVERSATION_VARIABLE_NODE_ID | |||
| assert var.editable is True | |||
| assert var.get_variable_type() == DraftVariableType.CONVERSATION | |||
| def test_get_conversation_id_from_draft_variable_success( | |||
| self, db_session_with_containers, mock_external_service_dependencies | |||
| ): | |||
| """ | |||
| Test getting conversation ID from draft variable successfully. | |||
| This test verifies that the service can extract the conversation ID | |||
| from a system variable named "conversation_id". This is important | |||
| for maintaining conversation context across workflow executions. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| conversation_id = fake.uuid4() | |||
| conv_id_value = StringSegment(value=conversation_id) | |||
| self._create_test_variable( | |||
| db_session_with_containers, | |||
| app.id, | |||
| SYSTEM_VARIABLE_NODE_ID, | |||
| "conversation_id", | |||
| conv_id_value, | |||
| "system", | |||
| fake=fake, | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_conv_id = service._get_conversation_id_from_draft_variable(app.id) | |||
| assert retrieved_conv_id == conversation_id | |||
| def test_get_conversation_id_from_draft_variable_not_found( | |||
| self, db_session_with_containers, mock_external_service_dependencies | |||
| ): | |||
| """ | |||
| Test getting conversation ID when it doesn't exist. | |||
| This test verifies that the service returns None when no | |||
| conversation_id variable exists for the app. This ensures | |||
| proper handling of missing conversation context scenarios. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_conv_id = service._get_conversation_id_from_draft_variable(app.id) | |||
| assert retrieved_conv_id is None | |||
| def test_list_system_variables_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test listing system variables successfully. | |||
| This test verifies that the service can filter and return only | |||
| system variables, excluding conversation and node variables. | |||
| System variables are internal variables used by the workflow | |||
| engine for maintaining state and context. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| sys_var1_value = StringSegment(value=fake.word()) | |||
| sys_var2_value = StringSegment(value=fake.word()) | |||
| sys_var1 = self._create_test_variable( | |||
| db_session_with_containers, app.id, SYSTEM_VARIABLE_NODE_ID, "sys_var1", sys_var1_value, "system", fake=fake | |||
| ) | |||
| sys_var2 = self._create_test_variable( | |||
| db_session_with_containers, app.id, SYSTEM_VARIABLE_NODE_ID, "sys_var2", sys_var2_value, "system", fake=fake | |||
| ) | |||
| conv_var_value = StringSegment(value=fake.word()) | |||
| self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "conv_var", conv_var_value, fake=fake | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| result = service.list_system_variables(app.id) | |||
| assert len(result.variables) == 2 | |||
| for var in result.variables: | |||
| assert var.node_id == SYSTEM_VARIABLE_NODE_ID | |||
| assert var.app_id == app.id | |||
| assert var.get_variable_type() == DraftVariableType.SYS | |||
| var_names = [var.name for var in result.variables] | |||
| assert "sys_var1" in var_names | |||
| assert "sys_var2" in var_names | |||
| assert "conv_var" not in var_names | |||
| def test_get_variable_by_name_success(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test getting variables by name successfully for different types. | |||
| This test verifies that the service can retrieve variables by name | |||
| for different variable types (conversation, system, node). This | |||
| functionality is important for variable lookup operations during | |||
| workflow execution and user interactions. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| test_value = StringSegment(value=fake.word()) | |||
| conv_var = self._create_test_variable( | |||
| db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "test_conv_var", test_value, fake=fake | |||
| ) | |||
| sys_var = self._create_test_variable( | |||
| db_session_with_containers, app.id, SYSTEM_VARIABLE_NODE_ID, "test_sys_var", test_value, "system", fake=fake | |||
| ) | |||
| node_var = self._create_test_variable( | |||
| db_session_with_containers, app.id, "test_node", "test_node_var", test_value, "node", fake=fake | |||
| ) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_conv_var = service.get_conversation_variable(app.id, "test_conv_var") | |||
| assert retrieved_conv_var is not None | |||
| assert retrieved_conv_var.name == "test_conv_var" | |||
| assert retrieved_conv_var.node_id == CONVERSATION_VARIABLE_NODE_ID | |||
| retrieved_sys_var = service.get_system_variable(app.id, "test_sys_var") | |||
| assert retrieved_sys_var is not None | |||
| assert retrieved_sys_var.name == "test_sys_var" | |||
| assert retrieved_sys_var.node_id == SYSTEM_VARIABLE_NODE_ID | |||
| retrieved_node_var = service.get_node_variable(app.id, "test_node", "test_node_var") | |||
| assert retrieved_node_var is not None | |||
| assert retrieved_node_var.name == "test_node_var" | |||
| assert retrieved_node_var.node_id == "test_node" | |||
| def test_get_variable_by_name_not_found(self, db_session_with_containers, mock_external_service_dependencies): | |||
| """ | |||
| Test getting variables by name when they don't exist. | |||
| This test verifies that the service returns None when trying to | |||
| retrieve variables by name that don't exist. This ensures proper | |||
| handling of missing variable scenarios for all variable types. | |||
| """ | |||
| fake = Faker() | |||
| app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake) | |||
| service = WorkflowDraftVariableService(db_session_with_containers) | |||
| retrieved_conv_var = service.get_conversation_variable(app.id, "non_existent_conv_var") | |||
| assert retrieved_conv_var is None | |||
| retrieved_sys_var = service.get_system_variable(app.id, "non_existent_sys_var") | |||
| assert retrieved_sys_var is None | |||
| retrieved_node_var = service.get_node_variable(app.id, "test_node", "non_existent_node_var") | |||
| assert retrieved_node_var is None | |||
| @@ -0,0 +1,168 @@ | |||
| import datetime | |||
| from unittest.mock import Mock, patch | |||
| import pytest | |||
| from sqlalchemy.orm import Session | |||
| from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs | |||
| class TestClearFreePlanTenantExpiredLogs: | |||
| """Unit tests for ClearFreePlanTenantExpiredLogs._clear_message_related_tables method.""" | |||
| @pytest.fixture | |||
| def mock_session(self): | |||
| """Create a mock database session.""" | |||
| session = Mock(spec=Session) | |||
| session.query.return_value.filter.return_value.all.return_value = [] | |||
| session.query.return_value.filter.return_value.delete.return_value = 0 | |||
| return session | |||
| @pytest.fixture | |||
| def mock_storage(self): | |||
| """Create a mock storage object.""" | |||
| storage = Mock() | |||
| storage.save.return_value = None | |||
| return storage | |||
| @pytest.fixture | |||
| def sample_message_ids(self): | |||
| """Sample message IDs for testing.""" | |||
| return ["msg-1", "msg-2", "msg-3"] | |||
| @pytest.fixture | |||
| def sample_records(self): | |||
| """Sample records for testing.""" | |||
| records = [] | |||
| for i in range(3): | |||
| record = Mock() | |||
| record.id = f"record-{i}" | |||
| record.to_dict.return_value = { | |||
| "id": f"record-{i}", | |||
| "message_id": f"msg-{i}", | |||
| "created_at": datetime.datetime.now().isoformat(), | |||
| } | |||
| records.append(record) | |||
| return records | |||
| def test_clear_message_related_tables_empty_message_ids(self, mock_session): | |||
| """Test that method returns early when message_ids is empty.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", []) | |||
| # Should not call any database operations | |||
| mock_session.query.assert_not_called() | |||
| mock_storage.save.assert_not_called() | |||
| def test_clear_message_related_tables_no_records_found(self, mock_session, sample_message_ids): | |||
| """Test when no related records are found.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| mock_session.query.return_value.filter.return_value.all.return_value = [] | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| # Should call query for each related table but find no records | |||
| assert mock_session.query.call_count > 0 | |||
| mock_storage.save.assert_not_called() | |||
| def test_clear_message_related_tables_with_records_and_to_dict( | |||
| self, mock_session, sample_message_ids, sample_records | |||
| ): | |||
| """Test when records are found and have to_dict method.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| mock_session.query.return_value.filter.return_value.all.return_value = sample_records | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| # Should call to_dict on each record (called once per table, so 7 times total) | |||
| for record in sample_records: | |||
| assert record.to_dict.call_count == 7 | |||
| # Should save backup data | |||
| assert mock_storage.save.call_count > 0 | |||
| def test_clear_message_related_tables_with_records_no_to_dict(self, mock_session, sample_message_ids): | |||
| """Test when records are found but don't have to_dict method.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| # Create records without to_dict method | |||
| records = [] | |||
| for i in range(2): | |||
| record = Mock() | |||
| mock_table = Mock() | |||
| mock_id_column = Mock() | |||
| mock_id_column.name = "id" | |||
| mock_message_id_column = Mock() | |||
| mock_message_id_column.name = "message_id" | |||
| mock_table.columns = [mock_id_column, mock_message_id_column] | |||
| record.__table__ = mock_table | |||
| record.id = f"record-{i}" | |||
| record.message_id = f"msg-{i}" | |||
| del record.to_dict | |||
| records.append(record) | |||
| # Mock records for first table only, empty for others | |||
| mock_session.query.return_value.filter.return_value.all.side_effect = [ | |||
| records, | |||
| [], | |||
| [], | |||
| [], | |||
| [], | |||
| [], | |||
| [], | |||
| ] | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| # Should save backup data even without to_dict | |||
| assert mock_storage.save.call_count > 0 | |||
| def test_clear_message_related_tables_storage_error_continues( | |||
| self, mock_session, sample_message_ids, sample_records | |||
| ): | |||
| """Test that method continues even when storage.save fails.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| mock_storage.save.side_effect = Exception("Storage error") | |||
| mock_session.query.return_value.filter.return_value.all.return_value = sample_records | |||
| # Should not raise exception | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| # Should still delete records even if backup fails | |||
| assert mock_session.query.return_value.filter.return_value.delete.called | |||
| def test_clear_message_related_tables_serialization_error_continues(self, mock_session, sample_message_ids): | |||
| """Test that method continues even when record serialization fails.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| record = Mock() | |||
| record.id = "record-1" | |||
| record.to_dict.side_effect = Exception("Serialization error") | |||
| mock_session.query.return_value.filter.return_value.all.return_value = [record] | |||
| # Should not raise exception | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| # Should still delete records even if serialization fails | |||
| assert mock_session.query.return_value.filter.return_value.delete.called | |||
| def test_clear_message_related_tables_deletion_called(self, mock_session, sample_message_ids, sample_records): | |||
| """Test that deletion is called for found records.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| mock_session.query.return_value.filter.return_value.all.return_value = sample_records | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| # Should call delete for each table that has records | |||
| assert mock_session.query.return_value.filter.return_value.delete.called | |||
| def test_clear_message_related_tables_logging_output( | |||
| self, mock_session, sample_message_ids, sample_records, capsys | |||
| ): | |||
| """Test that logging output is generated.""" | |||
| with patch("services.clear_free_plan_tenant_expired_logs.storage") as mock_storage: | |||
| mock_session.query.return_value.filter.return_value.all.return_value = sample_records | |||
| ClearFreePlanTenantExpiredLogs._clear_message_related_tables(mock_session, "tenant-123", sample_message_ids) | |||
| pass | |||
| @@ -1,5 +1,5 @@ | |||
| version = 1 | |||
| revision = 2 | |||
| revision = 3 | |||
| requires-python = ">=3.11, <3.13" | |||
| resolution-markers = [ | |||
| "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", | |||
| @@ -1265,6 +1265,8 @@ dependencies = [ | |||
| { name = "opentelemetry-instrumentation" }, | |||
| { name = "opentelemetry-instrumentation-celery" }, | |||
| { name = "opentelemetry-instrumentation-flask" }, | |||
| { name = "opentelemetry-instrumentation-redis" }, | |||
| { name = "opentelemetry-instrumentation-requests" }, | |||
| { name = "opentelemetry-instrumentation-sqlalchemy" }, | |||
| { name = "opentelemetry-propagator-b3" }, | |||
| { name = "opentelemetry-proto" }, | |||
| @@ -1448,6 +1450,8 @@ requires-dist = [ | |||
| { name = "opentelemetry-instrumentation", specifier = "==0.48b0" }, | |||
| { name = "opentelemetry-instrumentation-celery", specifier = "==0.48b0" }, | |||
| { name = "opentelemetry-instrumentation-flask", specifier = "==0.48b0" }, | |||
| { name = "opentelemetry-instrumentation-redis", specifier = "==0.48b0" }, | |||
| { name = "opentelemetry-instrumentation-requests", specifier = "==0.48b0" }, | |||
| { name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.48b0" }, | |||
| { name = "opentelemetry-propagator-b3", specifier = "==1.27.0" }, | |||
| { name = "opentelemetry-proto", specifier = "==1.27.0" }, | |||
| @@ -3670,6 +3674,36 @@ wheels = [ | |||
| { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588, upload-time = "2024-08-28T21:26:58.504Z" }, | |||
| ] | |||
| [[package]] | |||
| name = "opentelemetry-instrumentation-redis" | |||
| version = "0.48b0" | |||
| source = { registry = "https://pypi.org/simple" } | |||
| dependencies = [ | |||
| { name = "opentelemetry-api" }, | |||
| { name = "opentelemetry-instrumentation" }, | |||
| { name = "opentelemetry-semantic-conventions" }, | |||
| { name = "wrapt" }, | |||
| ] | |||
| sdist = { url = "https://files.pythonhosted.org/packages/70/be/92e98e4c7f275be3d373899a41b0a7d4df64266657d985dbbdb9a54de0d5/opentelemetry_instrumentation_redis-0.48b0.tar.gz", hash = "sha256:61e33e984b4120e1b980d9fba6e9f7ca0c8d972f9970654d8f6e9f27fa115a8c", size = 10511, upload-time = "2024-08-28T21:28:15.061Z" } | |||
| wheels = [ | |||
| { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610, upload-time = "2024-08-28T21:27:18.759Z" }, | |||
| ] | |||
| [[package]] | |||
| name = "opentelemetry-instrumentation-requests" | |||
| version = "0.48b0" | |||
| source = { registry = "https://pypi.org/simple" } | |||
| dependencies = [ | |||
| { name = "opentelemetry-api" }, | |||
| { name = "opentelemetry-instrumentation" }, | |||
| { name = "opentelemetry-semantic-conventions" }, | |||
| { name = "opentelemetry-util-http" }, | |||
| ] | |||
| sdist = { url = "https://files.pythonhosted.org/packages/52/ac/5eb78efde21ff21d0ad5dc8c6cc6a0f8ae482ce8a46293c2f45a628b6166/opentelemetry_instrumentation_requests-0.48b0.tar.gz", hash = "sha256:67ab9bd877a0352ee0db4616c8b4ae59736ddd700c598ed907482d44f4c9a2b3", size = 14120, upload-time = "2024-08-28T21:28:16.933Z" } | |||
| wheels = [ | |||
| { url = "https://files.pythonhosted.org/packages/43/df/0df9226d1b14f29d23c07e6194b9fd5ad50e7d987b7fd13df7dcf718aeb1/opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl", hash = "sha256:d4f01852121d0bd4c22f14f429654a735611d4f7bf3cf93f244bdf1489b2233d", size = 12366, upload-time = "2024-08-28T21:27:20.771Z" }, | |||
| ] | |||
| [[package]] | |||
| name = "opentelemetry-instrumentation-sqlalchemy" | |||
| version = "0.48b0" | |||
| @@ -538,7 +538,7 @@ services: | |||
| milvus-standalone: | |||
| container_name: milvus-standalone | |||
| image: milvusdb/milvus:v2.5.0-beta | |||
| image: milvusdb/milvus:v2.5.15 | |||
| profiles: | |||
| - milvus | |||
| command: [ 'milvus', 'run', 'standalone' ] | |||
| @@ -1087,7 +1087,7 @@ services: | |||
| milvus-standalone: | |||
| container_name: milvus-standalone | |||
| image: milvusdb/milvus:v2.5.0-beta | |||
| image: milvusdb/milvus:v2.5.15 | |||
| profiles: | |||
| - milvus | |||
| command: [ 'milvus', 'run', 'standalone' ] | |||
| @@ -0,0 +1,156 @@ | |||
| import React from 'react' | |||
| import { render } from '@testing-library/react' | |||
| import '@testing-library/jest-dom' | |||
| import { OpikIconBig } from '@/app/components/base/icons/src/public/tracing' | |||
| // Mock dependencies to isolate the SVG rendering issue | |||
| jest.mock('react-i18next', () => ({ | |||
| useTranslation: () => ({ | |||
| t: (key: string) => key, | |||
| }), | |||
| })) | |||
| describe('SVG Attribute Error Reproduction', () => { | |||
| // Capture console errors | |||
| const originalError = console.error | |||
| let errorMessages: string[] = [] | |||
| beforeEach(() => { | |||
| errorMessages = [] | |||
| console.error = jest.fn((message) => { | |||
| errorMessages.push(message) | |||
| originalError(message) | |||
| }) | |||
| }) | |||
| afterEach(() => { | |||
| console.error = originalError | |||
| }) | |||
| it('should reproduce inkscape attribute errors when rendering OpikIconBig', () => { | |||
| console.log('\n=== TESTING OpikIconBig SVG ATTRIBUTE ERRORS ===') | |||
| // Test multiple renders to check for inconsistency | |||
| for (let i = 0; i < 5; i++) { | |||
| console.log(`\nRender attempt ${i + 1}:`) | |||
| const { unmount } = render(<OpikIconBig />) | |||
| // Check for specific inkscape attribute errors | |||
| const inkscapeErrors = errorMessages.filter(msg => | |||
| typeof msg === 'string' && msg.includes('inkscape'), | |||
| ) | |||
| if (inkscapeErrors.length > 0) { | |||
| console.log(`Found ${inkscapeErrors.length} inkscape errors:`) | |||
| inkscapeErrors.forEach((error, index) => { | |||
| console.log(` ${index + 1}. ${error.substring(0, 100)}...`) | |||
| }) | |||
| } | |||
| else { | |||
| console.log('No inkscape errors found in this render') | |||
| } | |||
| unmount() | |||
| // Clear errors for next iteration | |||
| errorMessages = [] | |||
| } | |||
| }) | |||
| it('should analyze the SVG structure causing the errors', () => { | |||
| console.log('\n=== ANALYZING SVG STRUCTURE ===') | |||
| // Import the JSON data directly | |||
| const iconData = require('@/app/components/base/icons/src/public/tracing/OpikIconBig.json') | |||
| console.log('Icon structure analysis:') | |||
| console.log('- Root element:', iconData.icon.name) | |||
| console.log('- Children count:', iconData.icon.children?.length || 0) | |||
| // Find problematic elements | |||
| const findProblematicElements = (node: any, path = '') => { | |||
| const problematicElements: any[] = [] | |||
| if (node.name && (node.name.includes(':') || node.name.startsWith('sodipodi'))) { | |||
| problematicElements.push({ | |||
| path, | |||
| name: node.name, | |||
| attributes: Object.keys(node.attributes || {}), | |||
| }) | |||
| } | |||
| // Check attributes for inkscape/sodipodi properties | |||
| if (node.attributes) { | |||
| const problematicAttrs = Object.keys(node.attributes).filter(attr => | |||
| attr.startsWith('inkscape:') || attr.startsWith('sodipodi:'), | |||
| ) | |||
| if (problematicAttrs.length > 0) { | |||
| problematicElements.push({ | |||
| path, | |||
| name: node.name, | |||
| problematicAttributes: problematicAttrs, | |||
| }) | |||
| } | |||
| } | |||
| if (node.children) { | |||
| node.children.forEach((child: any, index: number) => { | |||
| problematicElements.push( | |||
| ...findProblematicElements(child, `${path}/${node.name}[${index}]`), | |||
| ) | |||
| }) | |||
| } | |||
| return problematicElements | |||
| } | |||
| const problematicElements = findProblematicElements(iconData.icon, 'root') | |||
| console.log(`\n🚨 Found ${problematicElements.length} problematic elements:`) | |||
| problematicElements.forEach((element, index) => { | |||
| console.log(`\n${index + 1}. Element: ${element.name}`) | |||
| console.log(` Path: ${element.path}`) | |||
| if (element.problematicAttributes) | |||
| console.log(` Problematic attributes: ${element.problematicAttributes.join(', ')}`) | |||
| }) | |||
| }) | |||
| it('should test the normalizeAttrs function behavior', () => { | |||
| console.log('\n=== TESTING normalizeAttrs FUNCTION ===') | |||
| const { normalizeAttrs } = require('@/app/components/base/icons/utils') | |||
| const testAttributes = { | |||
| 'inkscape:showpageshadow': '2', | |||
| 'inkscape:pageopacity': '0.0', | |||
| 'inkscape:pagecheckerboard': '0', | |||
| 'inkscape:deskcolor': '#d1d1d1', | |||
| 'sodipodi:docname': 'opik-icon-big.svg', | |||
| 'xmlns:inkscape': 'https://www.inkscape.org/namespaces/inkscape', | |||
| 'xmlns:sodipodi': 'https://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd', | |||
| 'xmlns:svg': 'https://www.w3.org/2000/svg', | |||
| 'data-name': 'Layer 1', | |||
| 'normal-attr': 'value', | |||
| 'class': 'test-class', | |||
| } | |||
| console.log('Input attributes:', Object.keys(testAttributes)) | |||
| const normalized = normalizeAttrs(testAttributes) | |||
| console.log('Normalized attributes:', Object.keys(normalized)) | |||
| console.log('Normalized values:', normalized) | |||
| // Check if problematic attributes are still present | |||
| const problematicKeys = Object.keys(normalized).filter(key => | |||
| key.toLowerCase().includes('inkscape') || key.toLowerCase().includes('sodipodi'), | |||
| ) | |||
| if (problematicKeys.length > 0) | |||
| console.log(`🚨 PROBLEM: Still found problematic attributes: ${problematicKeys.join(', ')}`) | |||
| else | |||
| console.log('✅ No problematic attributes found after normalization') | |||
| }) | |||
| }) | |||
| @@ -1,12 +1,9 @@ | |||
| 'use client' | |||
| import type { FC } from 'react' | |||
| import React, { useCallback, useEffect, useRef, useState } from 'react' | |||
| import { | |||
| RiEqualizer2Line, | |||
| } from '@remixicon/react' | |||
| import React, { useCallback, useRef, useState } from 'react' | |||
| import type { PopupProps } from './config-popup' | |||
| import ConfigPopup from './config-popup' | |||
| import cn from '@/utils/classnames' | |||
| import { | |||
| PortalToFollowElem, | |||
| PortalToFollowElemContent, | |||
| @@ -17,13 +14,13 @@ type Props = { | |||
| readOnly: boolean | |||
| className?: string | |||
| hasConfigured: boolean | |||
| controlShowPopup?: number | |||
| children?: React.ReactNode | |||
| } & PopupProps | |||
| const ConfigBtn: FC<Props> = ({ | |||
| className, | |||
| hasConfigured, | |||
| controlShowPopup, | |||
| children, | |||
| ...popupProps | |||
| }) => { | |||
| const [open, doSetOpen] = useState(false) | |||
| @@ -37,13 +34,6 @@ const ConfigBtn: FC<Props> = ({ | |||
| setOpen(!openRef.current) | |||
| }, [setOpen]) | |||
| useEffect(() => { | |||
| if (controlShowPopup) | |||
| // setOpen(!openRef.current) | |||
| setOpen(true) | |||
| // eslint-disable-next-line react-hooks/exhaustive-deps | |||
| }, [controlShowPopup]) | |||
| if (popupProps.readOnly && !hasConfigured) | |||
| return null | |||
| @@ -52,14 +42,11 @@ const ConfigBtn: FC<Props> = ({ | |||
| open={open} | |||
| onOpenChange={setOpen} | |||
| placement='bottom-end' | |||
| offset={{ | |||
| mainAxis: 12, | |||
| crossAxis: hasConfigured ? 8 : 49, | |||
| }} | |||
| offset={12} | |||
| > | |||
| <PortalToFollowElemTrigger onClick={handleTrigger}> | |||
| <div className={cn(className, 'rounded-md p-1')}> | |||
| <RiEqualizer2Line className='h-4 w-4 text-text-tertiary' /> | |||
| <div className="select-none"> | |||
| {children} | |||
| </div> | |||
| </PortalToFollowElemTrigger> | |||
| <PortalToFollowElemContent className='z-[11]'> | |||
| @@ -1,8 +1,9 @@ | |||
| 'use client' | |||
| import type { FC } from 'react' | |||
| import React, { useCallback, useEffect, useState } from 'react' | |||
| import React, { useEffect, useState } from 'react' | |||
| import { | |||
| RiArrowDownDoubleLine, | |||
| RiEqualizer2Line, | |||
| } from '@remixicon/react' | |||
| import { useTranslation } from 'react-i18next' | |||
| import { usePathname } from 'next/navigation' | |||
| @@ -180,10 +181,6 @@ const Panel: FC = () => { | |||
| })() | |||
| }, []) | |||
| const [controlShowPopup, setControlShowPopup] = useState<number>(0) | |||
| const showPopup = useCallback(() => { | |||
| setControlShowPopup(Date.now()) | |||
| }, [setControlShowPopup]) | |||
| if (!isLoaded) { | |||
| return ( | |||
| <div className='mb-3 flex items-center justify-between'> | |||
| @@ -196,46 +193,66 @@ const Panel: FC = () => { | |||
| return ( | |||
| <div className={cn('flex items-center justify-between')}> | |||
| <div | |||
| className={cn( | |||
| 'flex cursor-pointer items-center rounded-xl border-l-[0.5px] border-t border-effects-highlight bg-background-default-dodge p-2 shadow-xs hover:border-effects-highlight-lightmode-off hover:bg-background-default-lighter', | |||
| controlShowPopup && 'border-effects-highlight-lightmode-off bg-background-default-lighter', | |||
| )} | |||
| onClick={showPopup} | |||
| > | |||
| {!inUseTracingProvider && ( | |||
| <> | |||
| {!inUseTracingProvider && ( | |||
| <ConfigButton | |||
| appId={appId} | |||
| readOnly={readOnly} | |||
| hasConfigured={false} | |||
| enabled={enabled} | |||
| onStatusChange={handleTracingEnabledChange} | |||
| chosenProvider={inUseTracingProvider} | |||
| onChooseProvider={handleChooseProvider} | |||
| arizeConfig={arizeConfig} | |||
| phoenixConfig={phoenixConfig} | |||
| langSmithConfig={langSmithConfig} | |||
| langFuseConfig={langFuseConfig} | |||
| opikConfig={opikConfig} | |||
| weaveConfig={weaveConfig} | |||
| aliyunConfig={aliyunConfig} | |||
| onConfigUpdated={handleTracingConfigUpdated} | |||
| onConfigRemoved={handleTracingConfigRemoved} | |||
| > | |||
| <div | |||
| className={cn( | |||
| 'flex cursor-pointer select-none items-center rounded-xl border-l-[0.5px] border-t border-effects-highlight bg-background-default-dodge p-2 shadow-xs hover:border-effects-highlight-lightmode-off hover:bg-background-default-lighter', | |||
| )} | |||
| > | |||
| <TracingIcon size='md' /> | |||
| <div className='system-sm-semibold mx-2 text-text-secondary'>{t(`${I18N_PREFIX}.title`)}</div> | |||
| <div className='flex items-center' onClick={e => e.stopPropagation()}> | |||
| <ConfigButton | |||
| appId={appId} | |||
| readOnly={readOnly} | |||
| hasConfigured={false} | |||
| enabled={enabled} | |||
| onStatusChange={handleTracingEnabledChange} | |||
| chosenProvider={inUseTracingProvider} | |||
| onChooseProvider={handleChooseProvider} | |||
| arizeConfig={arizeConfig} | |||
| phoenixConfig={phoenixConfig} | |||
| langSmithConfig={langSmithConfig} | |||
| langFuseConfig={langFuseConfig} | |||
| opikConfig={opikConfig} | |||
| weaveConfig={weaveConfig} | |||
| aliyunConfig={aliyunConfig} | |||
| onConfigUpdated={handleTracingConfigUpdated} | |||
| onConfigRemoved={handleTracingConfigRemoved} | |||
| controlShowPopup={controlShowPopup} | |||
| /> | |||
| <div className='rounded-md p-1'> | |||
| <RiEqualizer2Line className='h-4 w-4 text-text-tertiary' /> | |||
| </div> | |||
| <Divider type='vertical' className='h-3.5' /> | |||
| <div className='rounded-md p-1'> | |||
| <RiArrowDownDoubleLine className='h-4 w-4 text-text-tertiary' /> | |||
| </div> | |||
| </> | |||
| )} | |||
| {hasConfiguredTracing && ( | |||
| <> | |||
| </div> | |||
| </ConfigButton> | |||
| )} | |||
| {hasConfiguredTracing && ( | |||
| <ConfigButton | |||
| appId={appId} | |||
| readOnly={readOnly} | |||
| hasConfigured | |||
| enabled={enabled} | |||
| onStatusChange={handleTracingEnabledChange} | |||
| chosenProvider={inUseTracingProvider} | |||
| onChooseProvider={handleChooseProvider} | |||
| arizeConfig={arizeConfig} | |||
| phoenixConfig={phoenixConfig} | |||
| langSmithConfig={langSmithConfig} | |||
| langFuseConfig={langFuseConfig} | |||
| opikConfig={opikConfig} | |||
| weaveConfig={weaveConfig} | |||
| aliyunConfig={aliyunConfig} | |||
| onConfigUpdated={handleTracingConfigUpdated} | |||
| onConfigRemoved={handleTracingConfigRemoved} | |||
| > | |||
| <div | |||
| className={cn( | |||
| 'flex cursor-pointer select-none items-center rounded-xl border-l-[0.5px] border-t border-effects-highlight bg-background-default-dodge p-2 shadow-xs hover:border-effects-highlight-lightmode-off hover:bg-background-default-lighter', | |||
| )} | |||
| > | |||
| <div className='ml-4 mr-1 flex items-center'> | |||
| <Indicator color={enabled ? 'green' : 'gray'} /> | |||
| <div className='system-xs-semibold-uppercase ml-1.5 text-text-tertiary'> | |||
| @@ -243,33 +260,14 @@ const Panel: FC = () => { | |||
| </div> | |||
| </div> | |||
| {InUseProviderIcon && <InUseProviderIcon className='ml-1 h-4' />} | |||
| <Divider type='vertical' className='h-3.5' /> | |||
| <div className='flex items-center' onClick={e => e.stopPropagation()}> | |||
| <ConfigButton | |||
| appId={appId} | |||
| readOnly={readOnly} | |||
| hasConfigured | |||
| className='ml-2' | |||
| enabled={enabled} | |||
| onStatusChange={handleTracingEnabledChange} | |||
| chosenProvider={inUseTracingProvider} | |||
| onChooseProvider={handleChooseProvider} | |||
| arizeConfig={arizeConfig} | |||
| phoenixConfig={phoenixConfig} | |||
| langSmithConfig={langSmithConfig} | |||
| langFuseConfig={langFuseConfig} | |||
| opikConfig={opikConfig} | |||
| weaveConfig={weaveConfig} | |||
| aliyunConfig={aliyunConfig} | |||
| onConfigUpdated={handleTracingConfigUpdated} | |||
| onConfigRemoved={handleTracingConfigRemoved} | |||
| controlShowPopup={controlShowPopup} | |||
| /> | |||
| <div className='ml-2 rounded-md p-1'> | |||
| <RiEqualizer2Line className='h-4 w-4 text-text-tertiary' /> | |||
| </div> | |||
| </> | |||
| )} | |||
| </div > | |||
| </div > | |||
| <Divider type='vertical' className='h-3.5' /> | |||
| </div> | |||
| </ConfigButton> | |||
| )} | |||
| </div> | |||
| ) | |||
| } | |||
| export default React.memo(Panel) | |||
| @@ -1,9 +1,7 @@ | |||
| import React from 'react' | |||
| import DatasetUpdateForm from '@/app/components/datasets/create' | |||
| type Props = {} | |||
| const DatasetCreation = async (props: Props) => { | |||
| const DatasetCreation = async () => { | |||
| return ( | |||
| <DatasetUpdateForm /> | |||
| ) | |||
| @@ -88,7 +88,8 @@ const HeaderOptions: FC<Props> = ({ | |||
| await clearAllAnnotations(appId) | |||
| onAdded() | |||
| } | |||
| catch (_) { | |||
| catch (e) { | |||
| console.error(`failed to clear all annotations, ${e}`) | |||
| } | |||
| finally { | |||
| setShowClearConfirm(false) | |||
| @@ -39,10 +39,10 @@ const Footer = () => { | |||
| <footer className='relative shrink-0 grow-0 px-12 py-2'> | |||
| <button | |||
| onClick={handleClose} | |||
| className='absolute right-2 top-2 flex h-6 w-6 cursor-pointer items-center justify-center rounded-full transition-colors duration-200 ease-in-out hover:bg-gray-100 dark:hover:bg-gray-800' | |||
| className='absolute right-2 top-2 flex h-6 w-6 cursor-pointer items-center justify-center rounded-full transition-colors duration-200 ease-in-out hover:bg-components-main-nav-nav-button-bg-active' | |||
| aria-label="Close footer" | |||
| > | |||
| <RiCloseLine className='h-4 w-4 text-text-tertiary' /> | |||
| <RiCloseLine className='h-4 w-4 text-text-tertiary hover:text-text-secondary' /> | |||
| </button> | |||
| <h3 className='text-gradient text-xl font-semibold leading-tight'>{t('app.join')}</h3> | |||
| <p className='system-sm-regular mt-1 text-text-tertiary'>{t('app.communityIntro')}</p> | |||
| @@ -115,8 +115,11 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { | |||
| }, []) | |||
| useEffect(() => { | |||
| if (appData?.site.default_language) | |||
| changeLanguage(appData.site.default_language) | |||
| const setLocaleFromProps = async () => { | |||
| if (appData?.site.default_language) | |||
| await changeLanguage(appData.site.default_language) | |||
| } | |||
| setLocaleFromProps() | |||
| }, [appData]) | |||
| const [sidebarCollapseState, setSidebarCollapseState] = useState<boolean>(false) | |||
| @@ -101,15 +101,15 @@ export const useEmbeddedChatbot = () => { | |||
| if (localeParam) { | |||
| // If locale parameter exists in URL, use it instead of default | |||
| changeLanguage(localeParam) | |||
| await changeLanguage(localeParam) | |||
| } | |||
| else if (localeFromSysVar) { | |||
| // If locale is set as a system variable, use that | |||
| changeLanguage(localeFromSysVar) | |||
| await changeLanguage(localeFromSysVar) | |||
| } | |||
| else if (appInfo?.site.default_language) { | |||
| // Otherwise use the default from app config | |||
| changeLanguage(appInfo.site.default_language) | |||
| await changeLanguage(appInfo.site.default_language) | |||
| } | |||
| } | |||
| @@ -68,6 +68,7 @@ export const useFile = (fileConfig: FileUpload) => { | |||
| } | |||
| return true | |||
| } | |||
| case SupportUploadFileTypes.custom: | |||
| case SupportUploadFileTypes.document: { | |||
| if (fileSize > docSizeLimit) { | |||
| notify({ | |||
| @@ -107,19 +108,6 @@ export const useFile = (fileConfig: FileUpload) => { | |||
| } | |||
| return true | |||
| } | |||
| case SupportUploadFileTypes.custom: { | |||
| if (fileSize > docSizeLimit) { | |||
| notify({ | |||
| type: 'error', | |||
| message: t('common.fileUploader.uploadFromComputerLimit', { | |||
| type: SupportUploadFileTypes.document, | |||
| size: formatFileSize(docSizeLimit), | |||
| }), | |||
| }) | |||
| return false | |||
| } | |||
| return true | |||
| } | |||
| default: { | |||
| return true | |||
| } | |||
| @@ -231,7 +219,7 @@ export const useFile = (fileConfig: FileUpload) => { | |||
| url: res.url, | |||
| } | |||
| if (!isAllowedFileExtension(res.name, res.mime_type, fileConfig.allowed_file_types || [], fileConfig.allowed_file_extensions || [])) { | |||
| notify({ type: 'error', message: `${t('common.fileUploader.fileExtensionNotSupport')} ${file.type}` }) | |||
| notify({ type: 'error', message: `${t('common.fileUploader.fileExtensionNotSupport')} ${newFile.type}` }) | |||
| handleRemoveFile(uploadingFile.id) | |||
| } | |||
| if (!checkSizeLimit(newFile.supportFileType, newFile.size)) | |||
| @@ -14,9 +14,26 @@ export type Attrs = { | |||
| export function normalizeAttrs(attrs: Attrs = {}): Attrs { | |||
| return Object.keys(attrs).reduce((acc: Attrs, key) => { | |||
| // Filter out editor metadata attributes before processing | |||
| if (key.startsWith('inkscape:') | |||
| || key.startsWith('sodipodi:') | |||
| || key.startsWith('xmlns:inkscape') | |||
| || key.startsWith('xmlns:sodipodi') | |||
| || key.startsWith('xmlns:svg') | |||
| || key === 'data-name') | |||
| return acc | |||
| const val = attrs[key] | |||
| key = key.replace(/([-]\w)/g, (g: string) => g[1].toUpperCase()) | |||
| key = key.replace(/([:]\w)/g, (g: string) => g[1].toUpperCase()) | |||
| // Additional filter after camelCase conversion | |||
| if (key === 'xmlnsInkscape' | |||
| || key === 'xmlnsSodipodi' | |||
| || key === 'xmlnsSvg' | |||
| || key === 'dataName') | |||
| return acc | |||
| switch (key) { | |||
| case 'class': | |||
| acc.className = val | |||
| @@ -139,7 +139,10 @@ const TagFilter: FC<TagFilterProps> = ({ | |||
| </div> | |||
| <div className='border-t-[0.5px] border-divider-regular' /> | |||
| <div className='p-1'> | |||
| <div className='flex cursor-pointer items-center gap-2 rounded-lg py-[6px] pl-3 pr-2 hover:bg-state-base-hover' onClick={() => setShowTagManagementModal(true)}> | |||
| <div className='flex cursor-pointer items-center gap-2 rounded-lg py-[6px] pl-3 pr-2 hover:bg-state-base-hover' onClick={() => { | |||
| setShowTagManagementModal(true) | |||
| setOpen(false) | |||
| }}> | |||
| <Tag03 className='h-4 w-4 text-text-tertiary' /> | |||
| <div className='grow truncate text-sm leading-5 text-text-secondary'> | |||
| {t('common.tag.manageTags')} | |||
| @@ -87,7 +87,7 @@ const Doc = ({ apiBaseUrl }: DocProps) => { | |||
| <div className={`fixed right-20 top-32 z-10 transition-all ${isTocExpanded ? 'w-64' : 'w-10'}`}> | |||
| {isTocExpanded | |||
| ? ( | |||
| <nav className='toc max-h-[calc(100vh-150px)] w-full overflow-y-auto rounded-lg bg-components-panel-bg p-4 shadow-md'> | |||
| <nav className='toc max-h-[calc(100vh-150px)] w-full overflow-y-auto rounded-lg border border-components-panel-border bg-components-panel-bg p-4 shadow-md'> | |||
| <div className='mb-4 flex items-center justify-between'> | |||
| <h3 className='text-lg font-semibold text-text-primary'>{t('appApi.develop.toc')}</h3> | |||
| <button | |||
| @@ -115,7 +115,7 @@ const Doc = ({ apiBaseUrl }: DocProps) => { | |||
| : ( | |||
| <button | |||
| onClick={() => setIsTocExpanded(true)} | |||
| className='flex h-10 w-10 items-center justify-center rounded-full bg-components-button-secondary-bg shadow-md transition-colors duration-200 hover:bg-components-button-secondary-bg-hover' | |||
| className='flex h-10 w-10 items-center justify-center rounded-full border border-components-panel-border bg-components-button-secondary-bg shadow-md transition-colors duration-200 hover:bg-components-button-secondary-bg-hover' | |||
| > | |||
| <RiListUnordered className='h-6 w-6 text-components-button-secondary-text' /> | |||
| </button> | |||
| @@ -25,7 +25,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </CodeGroup> | |||
| </div> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/document/create-by-text' | |||
| @@ -163,7 +163,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/document/create-by-file' | |||
| @@ -294,7 +294,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets' | |||
| @@ -400,7 +400,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets' | |||
| @@ -472,7 +472,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -553,7 +553,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -714,7 +714,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -751,7 +751,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/update-by-text' | |||
| @@ -853,7 +853,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/update-by-file' | |||
| @@ -952,7 +952,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{batch}/indexing-status' | |||
| @@ -1007,7 +1007,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}' | |||
| @@ -1047,7 +1047,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents' | |||
| @@ -1122,7 +1122,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}' | |||
| @@ -1245,7 +1245,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| ___ | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/status/{action}' | |||
| @@ -1302,7 +1302,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments' | |||
| @@ -1388,7 +1388,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments' | |||
| @@ -1476,7 +1476,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}' | |||
| @@ -1546,7 +1546,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}' | |||
| @@ -1590,7 +1590,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}' | |||
| @@ -1679,7 +1679,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks' | |||
| @@ -1750,7 +1750,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks' | |||
| @@ -1827,7 +1827,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' | |||
| @@ -1873,7 +1873,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' | |||
| @@ -1947,7 +1947,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/upload-file' | |||
| @@ -1998,7 +1998,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/retrieve' | |||
| @@ -2177,7 +2177,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata' | |||
| @@ -2224,7 +2224,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/{metadata_id}' | |||
| @@ -2273,7 +2273,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/{metadata_id}' | |||
| @@ -2306,7 +2306,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/built-in/{action}' | |||
| @@ -2339,7 +2339,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/metadata' | |||
| @@ -2378,7 +2378,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata' | |||
| @@ -2424,7 +2424,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/workspaces/current/models/model-types/text-embedding' | |||
| @@ -2528,7 +2528,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| Okay, I will translate the Chinese text in your document while keeping all formatting and code content unchanged. | |||
| <Heading | |||
| @@ -2574,7 +2574,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2615,7 +2615,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2662,7 +2662,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| @@ -2704,7 +2704,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags/binding' | |||
| @@ -2746,7 +2746,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags/unbinding' | |||
| @@ -2789,7 +2789,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/<uuid:dataset_id>/tags' | |||
| @@ -2837,7 +2837,7 @@ Okay, I will translate the Chinese text in your document while keeping all forma | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Row> | |||
| @@ -25,7 +25,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </CodeGroup> | |||
| </div> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/document/create-by-text' | |||
| @@ -163,7 +163,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/document/create-by-file' | |||
| @@ -294,7 +294,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets' | |||
| @@ -399,7 +399,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets' | |||
| @@ -471,7 +471,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -508,7 +508,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/update-by-text' | |||
| @@ -610,7 +610,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/update-by-file' | |||
| @@ -709,7 +709,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{batch}/indexing-status' | |||
| @@ -764,7 +764,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}' | |||
| @@ -804,7 +804,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents' | |||
| @@ -879,7 +879,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}' | |||
| @@ -1002,7 +1002,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| ___ | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| @@ -1060,7 +1060,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments' | |||
| @@ -1146,7 +1146,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments' | |||
| @@ -1234,7 +1234,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}' | |||
| @@ -1304,7 +1304,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| method='DELETE' | |||
| @@ -1347,7 +1347,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| method='POST' | |||
| @@ -1435,7 +1435,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks' | |||
| @@ -1506,7 +1506,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks' | |||
| @@ -1583,7 +1583,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' | |||
| @@ -1629,7 +1629,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' | |||
| @@ -1703,7 +1703,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/upload-file' | |||
| @@ -1754,7 +1754,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/retrieve' | |||
| @@ -1933,7 +1933,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata' | |||
| @@ -1980,7 +1980,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/{metadata_id}' | |||
| @@ -2029,7 +2029,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/{metadata_id}' | |||
| @@ -2062,7 +2062,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/built-in/{action}' | |||
| @@ -2095,7 +2095,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/metadata' | |||
| @@ -2136,7 +2136,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata' | |||
| @@ -2182,7 +2182,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| method='POST' | |||
| @@ -2226,7 +2226,7 @@ ___ | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2267,7 +2267,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2314,7 +2314,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| @@ -2356,7 +2356,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags/binding' | |||
| @@ -2398,7 +2398,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags/unbinding' | |||
| @@ -2441,7 +2441,7 @@ ___ | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/<uuid:dataset_id>/tags' | |||
| @@ -2489,7 +2489,7 @@ ___ | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Row> | |||
| <Col> | |||
| @@ -25,7 +25,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </CodeGroup> | |||
| </div> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/document/create-by-text' | |||
| @@ -167,7 +167,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/document/create-by-file' | |||
| @@ -298,7 +298,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets' | |||
| @@ -403,7 +403,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets' | |||
| @@ -475,7 +475,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -556,7 +556,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -721,7 +721,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}' | |||
| @@ -758,7 +758,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/update-by-text' | |||
| @@ -860,7 +860,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/update-by-file' | |||
| @@ -959,7 +959,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{batch}/indexing-status' | |||
| @@ -1014,7 +1014,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}' | |||
| @@ -1054,7 +1054,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents' | |||
| @@ -1129,7 +1129,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}' | |||
| @@ -1252,7 +1252,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi | |||
| </Col> | |||
| </Row> | |||
| ___ | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| @@ -1310,7 +1310,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments' | |||
| @@ -1396,7 +1396,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments' | |||
| @@ -1484,7 +1484,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}' | |||
| @@ -1528,7 +1528,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}' | |||
| @@ -1598,7 +1598,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| method='POST' | |||
| @@ -1687,7 +1687,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks' | |||
| @@ -1758,7 +1758,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks' | |||
| @@ -1835,7 +1835,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' | |||
| @@ -1881,7 +1881,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Row> | |||
| <Col> | |||
| @@ -1915,7 +1915,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' | |||
| @@ -1989,7 +1989,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/{document_id}/upload-file' | |||
| @@ -2040,7 +2040,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/retrieve' | |||
| @@ -2219,7 +2219,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata' | |||
| @@ -2266,7 +2266,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/{metadata_id}' | |||
| @@ -2315,7 +2315,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/{metadata_id}' | |||
| @@ -2348,7 +2348,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata/built-in/{action}' | |||
| @@ -2381,7 +2381,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/documents/metadata' | |||
| @@ -2422,7 +2422,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/{dataset_id}/metadata' | |||
| @@ -2468,7 +2468,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/workspaces/current/models/model-types/text-embedding' | |||
| @@ -2572,7 +2572,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2617,7 +2617,7 @@ ___ | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2658,7 +2658,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags' | |||
| @@ -2705,7 +2705,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| @@ -2747,7 +2747,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags/binding' | |||
| @@ -2789,7 +2789,7 @@ ___ | |||
| </Col> | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/tags/unbinding' | |||
| @@ -2832,7 +2832,7 @@ ___ | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Heading | |||
| url='/datasets/<uuid:dataset_id>/tags' | |||
| @@ -2880,7 +2880,7 @@ ___ | |||
| </Row> | |||
| <hr className='ml-0 mr-0' /> | |||
| <hr style={{ marginLeft: 0, marginRight: 0, width: '100%', maxWidth: '100%' }} /> | |||
| <Row> | |||
| <Col> | |||
| @@ -87,7 +87,7 @@ const Doc = ({ appDetail }: IDocProps) => { | |||
| <div className={`fixed right-8 top-32 z-10 transition-all ${isTocExpanded ? 'w-64' : 'w-10'}`}> | |||
| {isTocExpanded | |||
| ? ( | |||
| <nav className="toc max-h-[calc(100vh-150px)] w-full overflow-y-auto rounded-lg bg-components-panel-bg p-4 shadow-md"> | |||
| <nav className="toc max-h-[calc(100vh-150px)] w-full overflow-y-auto rounded-lg border border-components-panel-border bg-components-panel-bg p-4 shadow-md"> | |||
| <div className="mb-4 flex items-center justify-between"> | |||
| <h3 className="text-lg font-semibold text-text-primary">{t('appApi.develop.toc')}</h3> | |||
| <button | |||
| @@ -115,7 +115,7 @@ const Doc = ({ appDetail }: IDocProps) => { | |||
| : ( | |||
| <button | |||
| onClick={() => setIsTocExpanded(true)} | |||
| className="flex h-10 w-10 items-center justify-center rounded-full bg-components-button-secondary-bg shadow-md transition-colors duration-200 hover:bg-components-button-secondary-bg-hover" | |||
| className="flex h-10 w-10 items-center justify-center rounded-full border border-components-panel-border bg-components-button-secondary-bg shadow-md transition-colors duration-200 hover:bg-components-button-secondary-bg-hover" | |||
| > | |||
| <RiListUnordered className="h-6 w-6 text-components-button-secondary-text" /> | |||
| </button> | |||
| @@ -371,7 +371,7 @@ const TextGeneration: FC<IMainProps> = ({ | |||
| setAppId(appId) | |||
| setSiteInfo(siteInfo as SiteInfo) | |||
| setCustomConfig(custom_config) | |||
| changeLanguage(siteInfo.default_language) | |||
| await changeLanguage(siteInfo.default_language) | |||
| const { user_input_form, more_like_this, file_upload, text_to_speech }: any = appParams | |||
| setVisionConfig({ | |||
| @@ -134,7 +134,8 @@ const CustomEdge = ({ | |||
| style={{ | |||
| stroke, | |||
| strokeWidth: 2, | |||
| opacity: data._waitingRun ? 0.7 : 1, | |||
| opacity: data._dimmed ? 0.3 : (data._waitingRun ? 0.7 : 1), | |||
| strokeDasharray: data._isTemp ? '8 8' : undefined, | |||
| }} | |||
| /> | |||
| <EdgeLabelRenderer> | |||
| @@ -1,5 +1,5 @@ | |||
| import type { MouseEvent } from 'react' | |||
| import { useCallback, useRef } from 'react' | |||
| import { useCallback, useRef, useState } from 'react' | |||
| import { useTranslation } from 'react-i18next' | |||
| import produce from 'immer' | |||
| import type { | |||
| @@ -62,6 +62,7 @@ import { WorkflowHistoryEvent, useWorkflowHistory } from './use-workflow-history | |||
| import { useNodesMetaData } from './use-nodes-meta-data' | |||
| import type { RAGPipelineVariables } from '@/models/pipeline' | |||
| import useInspectVarsCrud from './use-inspect-vars-crud' | |||
| import { getNodeUsedVars } from '../nodes/_base/components/variable/utils' | |||
| export const useNodesInteractions = () => { | |||
| const { t } = useTranslation() | |||
| @@ -1564,6 +1565,135 @@ export const useNodesInteractions = () => { | |||
| setNodes(nodes) | |||
| }, [redo, store, workflowHistoryStore, getNodesReadOnly, getWorkflowReadOnly]) | |||
| const [isDimming, setIsDimming] = useState(false) | |||
| /** Add opacity-30 to all nodes except the nodeId */ | |||
| const dimOtherNodes = useCallback(() => { | |||
| if (isDimming) | |||
| return | |||
| const { getNodes, setNodes, edges, setEdges } = store.getState() | |||
| const nodes = getNodes() | |||
| const selectedNode = nodes.find(n => n.data.selected) | |||
| if (!selectedNode) | |||
| return | |||
| setIsDimming(true) | |||
| // const workflowNodes = useStore(s => s.getNodes()) | |||
| const workflowNodes = nodes | |||
| const usedVars = getNodeUsedVars(selectedNode) | |||
| const dependencyNodes: Node[] = [] | |||
| usedVars.forEach((valueSelector) => { | |||
| const node = workflowNodes.find(node => node.id === valueSelector?.[0]) | |||
| if (node) { | |||
| if (!dependencyNodes.includes(node)) | |||
| dependencyNodes.push(node) | |||
| } | |||
| }) | |||
| const outgoers = getOutgoers(selectedNode as Node, nodes as Node[], edges) | |||
| for (let currIdx = 0; currIdx < outgoers.length; currIdx++) { | |||
| const node = outgoers[currIdx] | |||
| const outgoersForNode = getOutgoers(node, nodes as Node[], edges) | |||
| outgoersForNode.forEach((item) => { | |||
| const existed = outgoers.some(v => v.id === item.id) | |||
| if (!existed) | |||
| outgoers.push(item) | |||
| }) | |||
| } | |||
| const dependentNodes: Node[] = [] | |||
| outgoers.forEach((node) => { | |||
| const usedVars = getNodeUsedVars(node) | |||
| const used = usedVars.some(v => v?.[0] === selectedNode.id) | |||
| if (used) { | |||
| const existed = dependentNodes.some(v => v.id === node.id) | |||
| if (!existed) | |||
| dependentNodes.push(node) | |||
| } | |||
| }) | |||
| const dimNodes = [...dependencyNodes, ...dependentNodes, selectedNode] | |||
| const newNodes = produce(nodes, (draft) => { | |||
| draft.forEach((n) => { | |||
| const dimNode = dimNodes.find(v => v.id === n.id) | |||
| if (!dimNode) | |||
| n.data._dimmed = true | |||
| }) | |||
| }) | |||
| setNodes(newNodes) | |||
| const tempEdges: Edge[] = [] | |||
| dependencyNodes.forEach((n) => { | |||
| tempEdges.push({ | |||
| id: `tmp_${n.id}-source-${selectedNode.id}-target`, | |||
| type: CUSTOM_EDGE, | |||
| source: n.id, | |||
| sourceHandle: 'source_tmp', | |||
| target: selectedNode.id, | |||
| targetHandle: 'target_tmp', | |||
| animated: true, | |||
| data: { | |||
| sourceType: n.data.type, | |||
| targetType: selectedNode.data.type, | |||
| _isTemp: true, | |||
| _connectedNodeIsHovering: true, | |||
| }, | |||
| }) | |||
| }) | |||
| dependentNodes.forEach((n) => { | |||
| tempEdges.push({ | |||
| id: `tmp_${selectedNode.id}-source-${n.id}-target`, | |||
| type: CUSTOM_EDGE, | |||
| source: selectedNode.id, | |||
| sourceHandle: 'source_tmp', | |||
| target: n.id, | |||
| targetHandle: 'target_tmp', | |||
| animated: true, | |||
| data: { | |||
| sourceType: selectedNode.data.type, | |||
| targetType: n.data.type, | |||
| _isTemp: true, | |||
| _connectedNodeIsHovering: true, | |||
| }, | |||
| }) | |||
| }) | |||
| const newEdges = produce(edges, (draft) => { | |||
| draft.forEach((e) => { | |||
| e.data._dimmed = true | |||
| }) | |||
| draft.push(...tempEdges) | |||
| }) | |||
| setEdges(newEdges) | |||
| }, [isDimming, store]) | |||
| /** Restore all nodes to full opacity */ | |||
| const undimAllNodes = useCallback(() => { | |||
| const { getNodes, setNodes, edges, setEdges } = store.getState() | |||
| const nodes = getNodes() | |||
| setIsDimming(false) | |||
| const newNodes = produce(nodes, (draft) => { | |||
| draft.forEach((n) => { | |||
| n.data._dimmed = false | |||
| }) | |||
| }) | |||
| setNodes(newNodes) | |||
| const newEdges = produce(edges.filter(e => !e.data._isTemp), (draft) => { | |||
| draft.forEach((e) => { | |||
| e.data._dimmed = false | |||
| }) | |||
| }) | |||
| setEdges(newEdges) | |||
| }, [store]) | |||
| return { | |||
| handleNodeDragStart, | |||
| handleNodeDrag, | |||
| @@ -1588,5 +1718,7 @@ export const useNodesInteractions = () => { | |||
| handleNodeDisconnect, | |||
| handleHistoryBack, | |||
| handleHistoryForward, | |||
| dimOtherNodes, | |||
| undimAllNodes, | |||
| } | |||
| } | |||
| @@ -131,10 +131,34 @@ export const useSelectionInteractions = () => { | |||
| setEdges(newEdges) | |||
| }, [store]) | |||
| const handleSelectionContextMenu = useCallback((e: MouseEvent) => { | |||
| const target = e.target as HTMLElement | |||
| if (!target.classList.contains('react-flow__nodesselection-rect')) | |||
| return | |||
| e.preventDefault() | |||
| const container = document.querySelector('#workflow-container') | |||
| const { x, y } = container!.getBoundingClientRect() | |||
| workflowStore.setState({ | |||
| selectionMenu: { | |||
| top: e.clientY - y, | |||
| left: e.clientX - x, | |||
| }, | |||
| }) | |||
| }, [workflowStore]) | |||
| const handleSelectionContextmenuCancel = useCallback(() => { | |||
| workflowStore.setState({ | |||
| selectionMenu: undefined, | |||
| }) | |||
| }, [workflowStore]) | |||
| return { | |||
| handleSelectionStart, | |||
| handleSelectionChange, | |||
| handleSelectionDrag, | |||
| handleSelectionCancel, | |||
| handleSelectionContextMenu, | |||
| handleSelectionContextmenuCancel, | |||
| } | |||
| } | |||
| @@ -25,6 +25,8 @@ export const useShortcuts = (): void => { | |||
| handleNodesDelete, | |||
| handleHistoryBack, | |||
| handleHistoryForward, | |||
| dimOtherNodes, | |||
| undimAllNodes, | |||
| } = useNodesInteractions() | |||
| const { handleStartWorkflowRun } = useWorkflowStartRun() | |||
| const { shortcutsEnabled: workflowHistoryShortcutsEnabled } = useWorkflowHistoryStore() | |||
| @@ -211,4 +213,35 @@ export const useShortcuts = (): void => { | |||
| exactMatch: true, | |||
| useCapture: true, | |||
| }) | |||
| // Shift ↓ | |||
| useKeyPress( | |||
| 'shift', | |||
| (e) => { | |||
| console.log('Shift down', e) | |||
| if (shouldHandleShortcut(e)) | |||
| dimOtherNodes() | |||
| }, | |||
| { | |||
| exactMatch: true, | |||
| useCapture: true, | |||
| events: ['keydown'], | |||
| }, | |||
| ) | |||
| // Shift ↑ | |||
| useKeyPress( | |||
| (e) => { | |||
| return e.key === 'Shift' | |||
| }, | |||
| (e) => { | |||
| if (shouldHandleShortcut(e)) | |||
| undimAllNodes() | |||
| }, | |||
| { | |||
| exactMatch: true, | |||
| useCapture: true, | |||
| events: ['keyup'], | |||
| }, | |||
| ) | |||
| } | |||
| @@ -67,6 +67,7 @@ import HelpLine from './help-line' | |||
| import CandidateNode from './candidate-node' | |||
| import PanelContextmenu from './panel-contextmenu' | |||
| import NodeContextmenu from './node-contextmenu' | |||
| import SelectionContextmenu from './selection-contextmenu' | |||
| import SyncingDataModal from './syncing-data-modal' | |||
| import LimitTips from './limit-tips' | |||
| import { | |||
| @@ -266,6 +267,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ | |||
| handleSelectionStart, | |||
| handleSelectionChange, | |||
| handleSelectionDrag, | |||
| handleSelectionContextMenu, | |||
| } = useSelectionInteractions() | |||
| const { | |||
| handlePaneContextMenu, | |||
| @@ -316,6 +318,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ | |||
| <Operator handleRedo={handleHistoryForward} handleUndo={handleHistoryBack} /> | |||
| <PanelContextmenu /> | |||
| <NodeContextmenu /> | |||
| <SelectionContextmenu /> | |||
| <HelpLine /> | |||
| { | |||
| !!showConfirm && ( | |||
| @@ -352,6 +355,7 @@ export const Workflow: FC<WorkflowProps> = memo(({ | |||
| onSelectionChange={handleSelectionChange} | |||
| onSelectionDrag={handleSelectionDrag} | |||
| onPaneContextMenu={handlePaneContextMenu} | |||
| onSelectionContextMenu={handleSelectionContextMenu} | |||
| connectionLineComponent={CustomConnectionLine} | |||
| // TODO: For LOOP node, how to distinguish between ITERATION and LOOP here? Maybe both are the same? | |||
| connectionLineContainerStyle={{ zIndex: ITERATION_CHILDREN_Z_INDEX }} | |||
| @@ -7,6 +7,7 @@ import { useTranslation } from 'react-i18next' | |||
| export enum TabType { | |||
| settings = 'settings', | |||
| lastRun = 'lastRun', | |||
| relations = 'relations', | |||
| } | |||
| type Props = { | |||
| @@ -143,6 +143,7 @@ const BaseNode: FC<BaseNodeProps> = ({ | |||
| showSelectedBorder ? 'border-components-option-card-option-selected-border' : 'border-transparent', | |||
| !showSelectedBorder && data._inParallelHovering && 'border-workflow-block-border-highlight', | |||
| data._waitingRun && 'opacity-70', | |||
| data._dimmed && 'opacity-30', | |||
| )} | |||
| ref={nodeRef} | |||
| style={{ | |||
| @@ -1,5 +1,5 @@ | |||
| import type { AnswerNodeType } from './types' | |||
| export const checkNodeValid = (payload: AnswerNodeType) => { | |||
| export const checkNodeValid = (_payload: AnswerNodeType) => { | |||
| return true | |||
| } | |||
| @@ -1,7 +1,7 @@ | |||
| import type { AssignerNodeType } from './types' | |||
| import { AssignerNodeInputType, WriteMode } from './types' | |||
| export const checkNodeValid = (payload: AssignerNodeType) => { | |||
| export const checkNodeValid = (_payload: AssignerNodeType) => { | |||
| return true | |||
| } | |||
| @@ -1,5 +1,5 @@ | |||
| import type { EndNodeType } from './types' | |||
| export const checkNodeValid = (payload: EndNodeType) => { | |||
| export const checkNodeValid = (_payload: EndNodeType) => { | |||
| return true | |||
| } | |||
| @@ -202,7 +202,7 @@ const ConditionItem = ({ | |||
| onRemoveCondition?.(caseId, condition.id) | |||
| }, [caseId, condition, conditionId, isSubVariableKey, onRemoveCondition, onRemoveSubVariableCondition]) | |||
| const handleVarChange = useCallback((valueSelector: ValueSelector, varItem: Var) => { | |||
| const handleVarChange = useCallback((valueSelector: ValueSelector, _varItem: Var) => { | |||
| const resolvedVarType = getVarType({ | |||
| valueSelector, | |||
| availableNodes, | |||
| @@ -82,7 +82,7 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({ | |||
| Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${keys.map(k => `${k} (${removedDetails[k]})`).join(', ')}` }) | |||
| handleCompletionParamsChange(filtered) | |||
| } | |||
| catch (e) { | |||
| catch { | |||
| Toast.notify({ type: 'error', message: t('common.error') }) | |||
| handleCompletionParamsChange({}) | |||
| } | |||
| @@ -5,7 +5,7 @@ import { Validator } from 'jsonschema' | |||
| import produce from 'immer' | |||
| import { z } from 'zod' | |||
| export const checkNodeValid = (payload: LLMNodeType) => { | |||
| export const checkNodeValid = (_payload: LLMNodeType) => { | |||
| return true | |||
| } | |||
| @@ -280,7 +280,7 @@ const validator = new Validator() | |||
| export const validateSchemaAgainstDraft7 = (schemaToValidate: any) => { | |||
| const schema = produce(schemaToValidate, (draft: any) => { | |||
| // Make sure the schema has the $schema property for draft-07 | |||
| // Make sure the schema has the $schema property for draft-07 | |||
| if (!draft.$schema) | |||
| draft.$schema = 'http://json-schema.org/draft-07/schema#' | |||
| }) | |||
| @@ -6,7 +6,6 @@ import produce from 'immer' | |||
| import { v4 as uuid4 } from 'uuid' | |||
| import { | |||
| useIsChatMode, | |||
| useIsNodeInLoop, | |||
| useNodesReadOnly, | |||
| useWorkflow, | |||
| } from '../../hooks' | |||
| @@ -20,10 +19,8 @@ import type { HandleAddCondition, HandleAddSubVariableCondition, HandleRemoveCon | |||
| import useIsVarFileAttribute from './use-is-var-file-attribute' | |||
| import { useStore } from '@/app/components/workflow/store' | |||
| const DELIMITER = '@@@@@' | |||
| const useConfig = (id: string, payload: LoopNodeType) => { | |||
| const { nodesReadOnly: readOnly } = useNodesReadOnly() | |||
| const { isNodeInLoop } = useIsNodeInLoop(id) | |||
| const isChatMode = useIsChatMode() | |||
| const conversationVariables = useStore(s => s.conversationVariables) | |||
| @@ -39,10 +36,8 @@ const useConfig = (id: string, payload: LoopNodeType) => { | |||
| }, []) | |||
| // output | |||
| const { getLoopNodeChildren, getBeforeNodesInSameBranch } = useWorkflow() | |||
| const beforeNodes = getBeforeNodesInSameBranch(id) | |||
| const { getLoopNodeChildren } = useWorkflow() | |||
| const loopChildrenNodes = [{ id, data: payload } as any, ...getLoopNodeChildren(id)] | |||
| const canChooseVarNodes = [...beforeNodes, ...loopChildrenNodes] | |||
| const childrenNodeVars = toNodeOutputVars(loopChildrenNodes, isChatMode, undefined, [], conversationVariables) | |||
| const { | |||
| @@ -1,5 +1,5 @@ | |||
| import type { StartNodeType } from './types' | |||
| export const checkNodeValid = (payload: StartNodeType) => { | |||
| export const checkNodeValid = (_payload: StartNodeType) => { | |||
| return true | |||
| } | |||
| @@ -1,5 +1,5 @@ | |||
| import type { TemplateTransformNodeType } from './types' | |||
| export const checkNodeValid = (payload: TemplateTransformNodeType) => { | |||
| export const checkNodeValid = (_payload: TemplateTransformNodeType) => { | |||
| return true | |||
| } | |||
| @@ -1,5 +1,5 @@ | |||
| import type { ToolNodeType } from './types' | |||
| export const checkNodeValid = (payload: ToolNodeType) => { | |||
| export const checkNodeValid = (_payload: ToolNodeType) => { | |||
| return true | |||
| } | |||
| @@ -0,0 +1,433 @@ | |||
| import { | |||
| memo, | |||
| useCallback, | |||
| useEffect, | |||
| useMemo, | |||
| useRef, | |||
| } from 'react' | |||
| import { useTranslation } from 'react-i18next' | |||
| import { useClickAway } from 'ahooks' | |||
| import { useStore as useReactFlowStore, useStoreApi } from 'reactflow' | |||
| import { | |||
| RiAlignBottom, | |||
| RiAlignCenter, | |||
| RiAlignJustify, | |||
| RiAlignLeft, | |||
| RiAlignRight, | |||
| RiAlignTop, | |||
| } from '@remixicon/react' | |||
| import { useNodesReadOnly, useNodesSyncDraft } from './hooks' | |||
| import produce from 'immer' | |||
| import { WorkflowHistoryEvent, useWorkflowHistory } from './hooks/use-workflow-history' | |||
| import { useStore } from './store' | |||
| import { useSelectionInteractions } from './hooks/use-selection-interactions' | |||
| import { useWorkflowStore } from './store' | |||
| enum AlignType { | |||
| Left = 'left', | |||
| Center = 'center', | |||
| Right = 'right', | |||
| Top = 'top', | |||
| Middle = 'middle', | |||
| Bottom = 'bottom', | |||
| DistributeHorizontal = 'distributeHorizontal', | |||
| DistributeVertical = 'distributeVertical', | |||
| } | |||
| const SelectionContextmenu = () => { | |||
| const { t } = useTranslation() | |||
| const ref = useRef(null) | |||
| const { getNodesReadOnly } = useNodesReadOnly() | |||
| const { handleSelectionContextmenuCancel } = useSelectionInteractions() | |||
| const selectionMenu = useStore(s => s.selectionMenu) | |||
| // Access React Flow methods | |||
| const store = useStoreApi() | |||
| const workflowStore = useWorkflowStore() | |||
| // Get selected nodes for alignment logic | |||
| const selectedNodes = useReactFlowStore(state => | |||
| state.getNodes().filter(node => node.selected), | |||
| ) | |||
| const { handleSyncWorkflowDraft } = useNodesSyncDraft() | |||
| const { saveStateToHistory } = useWorkflowHistory() | |||
| const menuRef = useRef<HTMLDivElement>(null) | |||
| const menuPosition = useMemo(() => { | |||
| if (!selectionMenu) return { left: 0, top: 0 } | |||
| let left = selectionMenu.left | |||
| let top = selectionMenu.top | |||
| const container = document.querySelector('#workflow-container') | |||
| if (container) { | |||
| const { width: containerWidth, height: containerHeight } = container.getBoundingClientRect() | |||
| const menuWidth = 240 | |||
| const estimatedMenuHeight = 380 | |||
| if (left + menuWidth > containerWidth) | |||
| left = left - menuWidth | |||
| if (top + estimatedMenuHeight > containerHeight) | |||
| top = top - estimatedMenuHeight | |||
| left = Math.max(0, left) | |||
| top = Math.max(0, top) | |||
| } | |||
| return { left, top } | |||
| }, [selectionMenu]) | |||
| useClickAway(() => { | |||
| handleSelectionContextmenuCancel() | |||
| }, ref) | |||
| useEffect(() => { | |||
| if (selectionMenu && selectedNodes.length <= 1) | |||
| handleSelectionContextmenuCancel() | |||
| }, [selectionMenu, selectedNodes.length, handleSelectionContextmenuCancel]) | |||
| // Handle align nodes logic | |||
| const handleAlignNode = useCallback((currentNode: any, nodeToAlign: any, alignType: AlignType, minX: number, maxX: number, minY: number, maxY: number) => { | |||
| const width = nodeToAlign.width | |||
| const height = nodeToAlign.height | |||
| // Calculate new positions based on alignment type | |||
| switch (alignType) { | |||
| case AlignType.Left: | |||
| // For left alignment, align left edge of each node to minX | |||
| currentNode.position.x = minX | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.x = minX | |||
| break | |||
| case AlignType.Center: { | |||
| // For center alignment, center each node horizontally in the selection bounds | |||
| const centerX = minX + (maxX - minX) / 2 - width / 2 | |||
| currentNode.position.x = centerX | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.x = centerX | |||
| break | |||
| } | |||
| case AlignType.Right: { | |||
| // For right alignment, align right edge of each node to maxX | |||
| const rightX = maxX - width | |||
| currentNode.position.x = rightX | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.x = rightX | |||
| break | |||
| } | |||
| case AlignType.Top: { | |||
| // For top alignment, align top edge of each node to minY | |||
| currentNode.position.y = minY | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.y = minY | |||
| break | |||
| } | |||
| case AlignType.Middle: { | |||
| // For middle alignment, center each node vertically in the selection bounds | |||
| const middleY = minY + (maxY - minY) / 2 - height / 2 | |||
| currentNode.position.y = middleY | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.y = middleY | |||
| break | |||
| } | |||
| case AlignType.Bottom: { | |||
| // For bottom alignment, align bottom edge of each node to maxY | |||
| const newY = Math.round(maxY - height) | |||
| currentNode.position.y = newY | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.y = newY | |||
| break | |||
| } | |||
| } | |||
| }, []) | |||
| // Handle distribute nodes logic | |||
| const handleDistributeNodes = useCallback((nodesToAlign: any[], nodes: any[], alignType: AlignType) => { | |||
| // Sort nodes appropriately | |||
| const sortedNodes = [...nodesToAlign].sort((a, b) => { | |||
| if (alignType === AlignType.DistributeHorizontal) { | |||
| // Sort by left position for horizontal distribution | |||
| return a.position.x - b.position.x | |||
| } | |||
| else { | |||
| // Sort by top position for vertical distribution | |||
| return a.position.y - b.position.y | |||
| } | |||
| }) | |||
| if (sortedNodes.length < 3) | |||
| return null // Need at least 3 nodes for distribution | |||
| let totalGap = 0 | |||
| let fixedSpace = 0 | |||
| if (alignType === AlignType.DistributeHorizontal) { | |||
| // Fixed positions - first node's left edge and last node's right edge | |||
| const firstNodeLeft = sortedNodes[0].position.x | |||
| const lastNodeRight = sortedNodes[sortedNodes.length - 1].position.x + (sortedNodes[sortedNodes.length - 1].width || 0) | |||
| // Total available space | |||
| totalGap = lastNodeRight - firstNodeLeft | |||
| // Space occupied by nodes themselves | |||
| fixedSpace = sortedNodes.reduce((sum, node) => sum + (node.width || 0), 0) | |||
| } | |||
| else { | |||
| // Fixed positions - first node's top edge and last node's bottom edge | |||
| const firstNodeTop = sortedNodes[0].position.y | |||
| const lastNodeBottom = sortedNodes[sortedNodes.length - 1].position.y + (sortedNodes[sortedNodes.length - 1].height || 0) | |||
| // Total available space | |||
| totalGap = lastNodeBottom - firstNodeTop | |||
| // Space occupied by nodes themselves | |||
| fixedSpace = sortedNodes.reduce((sum, node) => sum + (node.height || 0), 0) | |||
| } | |||
| // Available space for gaps | |||
| const availableSpace = totalGap - fixedSpace | |||
| // Calculate even spacing between node edges | |||
| const spacing = availableSpace / (sortedNodes.length - 1) | |||
| if (spacing <= 0) | |||
| return null // Nodes are overlapping, can't distribute evenly | |||
| return produce(nodes, (draft) => { | |||
| // Keep first node fixed, position others with even gaps | |||
| let currentPosition | |||
| if (alignType === AlignType.DistributeHorizontal) { | |||
| // Start from first node's right edge | |||
| currentPosition = sortedNodes[0].position.x + (sortedNodes[0].width || 0) | |||
| } | |||
| else { | |||
| // Start from first node's bottom edge | |||
| currentPosition = sortedNodes[0].position.y + (sortedNodes[0].height || 0) | |||
| } | |||
| // Skip first node (index 0), it stays in place | |||
| for (let i = 1; i < sortedNodes.length - 1; i++) { | |||
| const nodeToAlign = sortedNodes[i] | |||
| const currentNode = draft.find(n => n.id === nodeToAlign.id) | |||
| if (!currentNode) continue | |||
| if (alignType === AlignType.DistributeHorizontal) { | |||
| // Position = previous right edge + spacing | |||
| const newX: number = currentPosition + spacing | |||
| currentNode.position.x = newX | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.x = newX | |||
| // Update for next iteration - current node's right edge | |||
| currentPosition = newX + (nodeToAlign.width || 0) | |||
| } | |||
| else { | |||
| // Position = previous bottom edge + spacing | |||
| const newY: number = currentPosition + spacing | |||
| currentNode.position.y = newY | |||
| if (currentNode.positionAbsolute) | |||
| currentNode.positionAbsolute.y = newY | |||
| // Update for next iteration - current node's bottom edge | |||
| currentPosition = newY + (nodeToAlign.height || 0) | |||
| } | |||
| } | |||
| }) | |||
| }, []) | |||
| const handleAlignNodes = useCallback((alignType: AlignType) => { | |||
| if (getNodesReadOnly() || selectedNodes.length <= 1) { | |||
| handleSelectionContextmenuCancel() | |||
| return | |||
| } | |||
| // Disable node animation state - same as handleNodeDragStart | |||
| workflowStore.setState({ nodeAnimation: false }) | |||
| // Get all current nodes | |||
| const nodes = store.getState().getNodes() | |||
| // Get all selected nodes | |||
| const selectedNodeIds = selectedNodes.map(node => node.id) | |||
| const nodesToAlign = nodes.filter(node => selectedNodeIds.includes(node.id)) | |||
| if (nodesToAlign.length <= 1) { | |||
| handleSelectionContextmenuCancel() | |||
| return | |||
| } | |||
| // Calculate node boundaries for alignment | |||
| let minX = Number.MAX_SAFE_INTEGER | |||
| let maxX = Number.MIN_SAFE_INTEGER | |||
| let minY = Number.MAX_SAFE_INTEGER | |||
| let maxY = Number.MIN_SAFE_INTEGER | |||
| // Calculate boundaries of selected nodes | |||
| const validNodes = nodesToAlign.filter(node => node.width && node.height) | |||
| validNodes.forEach((node) => { | |||
| const width = node.width! | |||
| const height = node.height! | |||
| minX = Math.min(minX, node.position.x) | |||
| maxX = Math.max(maxX, node.position.x + width) | |||
| minY = Math.min(minY, node.position.y) | |||
| maxY = Math.max(maxY, node.position.y + height) | |||
| }) | |||
| // Handle distribute nodes logic | |||
| if (alignType === AlignType.DistributeHorizontal || alignType === AlignType.DistributeVertical) { | |||
| const distributeNodes = handleDistributeNodes(nodesToAlign, nodes, alignType) | |||
| if (distributeNodes) { | |||
| // Apply node distribution updates | |||
| store.getState().setNodes(distributeNodes) | |||
| handleSelectionContextmenuCancel() | |||
| // Clear guide lines | |||
| const { setHelpLineHorizontal, setHelpLineVertical } = workflowStore.getState() | |||
| setHelpLineHorizontal() | |||
| setHelpLineVertical() | |||
| // Sync workflow draft | |||
| handleSyncWorkflowDraft() | |||
| // Save to history | |||
| saveStateToHistory(WorkflowHistoryEvent.NodeDragStop) | |||
| return // End function execution | |||
| } | |||
| } | |||
| const newNodes = produce(nodes, (draft) => { | |||
| // Iterate through all selected nodes | |||
| const validNodesToAlign = nodesToAlign.filter(node => node.width && node.height) | |||
| validNodesToAlign.forEach((nodeToAlign) => { | |||
| // Find the corresponding node in draft - consistent with handleNodeDrag | |||
| const currentNode = draft.find(n => n.id === nodeToAlign.id) | |||
| if (!currentNode) | |||
| return | |||
| // Use the extracted alignment function | |||
| handleAlignNode(currentNode, nodeToAlign, alignType, minX, maxX, minY, maxY) | |||
| }) | |||
| }) | |||
| // Apply node position updates - consistent with handleNodeDrag and handleNodeDragStop | |||
| try { | |||
| // Directly use setNodes to update nodes - consistent with handleNodeDrag | |||
| store.getState().setNodes(newNodes) | |||
| // Close popup | |||
| handleSelectionContextmenuCancel() | |||
| // Clear guide lines - consistent with handleNodeDragStop | |||
| const { setHelpLineHorizontal, setHelpLineVertical } = workflowStore.getState() | |||
| setHelpLineHorizontal() | |||
| setHelpLineVertical() | |||
| // Sync workflow draft - consistent with handleNodeDragStop | |||
| handleSyncWorkflowDraft() | |||
| // Save to history - consistent with handleNodeDragStop | |||
| saveStateToHistory(WorkflowHistoryEvent.NodeDragStop) | |||
| } | |||
| catch (err) { | |||
| console.error('Failed to update nodes:', err) | |||
| } | |||
| }, [store, workflowStore, selectedNodes, getNodesReadOnly, handleSyncWorkflowDraft, saveStateToHistory, handleSelectionContextmenuCancel, handleAlignNode, handleDistributeNodes]) | |||
| if (!selectionMenu) | |||
| return null | |||
| return ( | |||
| <div | |||
| className='absolute z-[9]' | |||
| style={{ | |||
| left: menuPosition.left, | |||
| top: menuPosition.top, | |||
| }} | |||
| ref={ref} | |||
| > | |||
| <div ref={menuRef} className='w-[240px] rounded-lg border-[0.5px] border-components-panel-border bg-components-panel-bg shadow-xl'> | |||
| <div className='p-1'> | |||
| <div className='system-xs-medium px-2 py-2 text-text-tertiary'> | |||
| {t('workflow.operator.vertical')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.Top)} | |||
| > | |||
| <RiAlignTop className='h-4 w-4' /> | |||
| {t('workflow.operator.alignTop')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.Middle)} | |||
| > | |||
| <RiAlignCenter className='h-4 w-4 rotate-90' /> | |||
| {t('workflow.operator.alignMiddle')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.Bottom)} | |||
| > | |||
| <RiAlignBottom className='h-4 w-4' /> | |||
| {t('workflow.operator.alignBottom')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.DistributeVertical)} | |||
| > | |||
| <RiAlignJustify className='h-4 w-4 rotate-90' /> | |||
| {t('workflow.operator.distributeVertical')} | |||
| </div> | |||
| </div> | |||
| <div className='h-[1px] bg-divider-regular'></div> | |||
| <div className='p-1'> | |||
| <div className='system-xs-medium px-2 py-2 text-text-tertiary'> | |||
| {t('workflow.operator.horizontal')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.Left)} | |||
| > | |||
| <RiAlignLeft className='h-4 w-4' /> | |||
| {t('workflow.operator.alignLeft')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.Center)} | |||
| > | |||
| <RiAlignCenter className='h-4 w-4' /> | |||
| {t('workflow.operator.alignCenter')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.Right)} | |||
| > | |||
| <RiAlignRight className='h-4 w-4' /> | |||
| {t('workflow.operator.alignRight')} | |||
| </div> | |||
| <div | |||
| className='flex h-8 cursor-pointer items-center gap-2 rounded-lg px-3 text-sm text-text-secondary hover:bg-state-base-hover' | |||
| onClick={() => handleAlignNodes(AlignType.DistributeHorizontal)} | |||
| > | |||
| <RiAlignJustify className='h-4 w-4' /> | |||
| {t('workflow.operator.distributeHorizontal')} | |||
| </div> | |||
| </div> | |||
| </div> | |||
| </div> | |||
| ) | |||
| } | |||
| export default memo(SelectionContextmenu) | |||
| @@ -23,7 +23,7 @@ type InspectVarsActions = { | |||
| export type InspectVarsSliceShape = InspectVarsState & InspectVarsActions | |||
| export const createInspectVarsSlice: StateCreator<InspectVarsSliceShape> = (set, get) => { | |||
| export const createInspectVarsSlice: StateCreator<InspectVarsSliceShape> = (set) => { | |||
| return ({ | |||
| currentFocusNodeId: null, | |||
| nodesWithInspectVars: [], | |||
| @@ -75,11 +75,11 @@ export const createInspectVarsSlice: StateCreator<InspectVarsSliceShape> = (set, | |||
| if (!targetNode) | |||
| return | |||
| const targetVar = targetNode.vars.find(varItem => varItem.id === varId) | |||
| if(!targetVar) | |||
| if (!targetVar) | |||
| return | |||
| targetVar.value = value | |||
| targetVar.edited = true | |||
| }, | |||
| }, | |||
| ) | |||
| return { | |||
| nodesWithInspectVars: nodes, | |||
| @@ -93,11 +93,11 @@ export const createInspectVarsSlice: StateCreator<InspectVarsSliceShape> = (set, | |||
| if (!targetNode) | |||
| return | |||
| const targetVar = targetNode.vars.find(varItem => varItem.id === varId) | |||
| if(!targetVar) | |||
| if (!targetVar) | |||
| return | |||
| targetVar.value = value | |||
| targetVar.edited = false | |||
| }, | |||
| }, | |||
| ) | |||
| return { | |||
| nodesWithInspectVars: nodes, | |||
| @@ -111,11 +111,11 @@ export const createInspectVarsSlice: StateCreator<InspectVarsSliceShape> = (set, | |||
| if (!targetNode) | |||
| return | |||
| const targetVar = targetNode.vars.find(varItem => varItem.id === varId) | |||
| if(!targetVar) | |||
| if (!targetVar) | |||
| return | |||
| targetVar.name = selector[1] | |||
| targetVar.selector = selector | |||
| }, | |||
| }, | |||
| ) | |||
| return { | |||
| nodesWithInspectVars: nodes, | |||
| @@ -131,7 +131,7 @@ export const createInspectVarsSlice: StateCreator<InspectVarsSliceShape> = (set, | |||
| const needChangeVarIndex = targetNode.vars.findIndex(varItem => varItem.id === varId) | |||
| if (needChangeVarIndex !== -1) | |||
| targetNode.vars.splice(needChangeVarIndex, 1) | |||
| }, | |||
| }, | |||
| ) | |||
| return { | |||
| nodesWithInspectVars: nodes, | |||
| @@ -15,6 +15,11 @@ export type PanelSliceShape = { | |||
| left: number | |||
| } | |||
| setPanelMenu: (panelMenu: PanelSliceShape['panelMenu']) => void | |||
| selectionMenu?: { | |||
| top: number | |||
| left: number | |||
| } | |||
| setSelectionMenu: (selectionMenu: PanelSliceShape['selectionMenu']) => void | |||
| showVariableInspectPanel: boolean | |||
| setShowVariableInspectPanel: (showVariableInspectPanel: boolean) => void | |||
| initShowLastRunTab: boolean | |||
| @@ -33,6 +38,8 @@ export const createPanelSlice: StateCreator<PanelSliceShape> = set => ({ | |||
| setShowDebugAndPreviewPanel: showDebugAndPreviewPanel => set(() => ({ showDebugAndPreviewPanel })), | |||
| panelMenu: undefined, | |||
| setPanelMenu: panelMenu => set(() => ({ panelMenu })), | |||
| selectionMenu: undefined, | |||
| setSelectionMenu: selectionMenu => set(() => ({ selectionMenu })), | |||
| showVariableInspectPanel: false, | |||
| setShowVariableInspectPanel: showVariableInspectPanel => set(() => ({ showVariableInspectPanel })), | |||
| initShowLastRunTab: false, | |||
| @@ -103,6 +103,7 @@ export type CommonNodeType<T = {}> = { | |||
| retry_config?: WorkflowRetryConfig | |||
| default_value?: DefaultValueForm[] | |||
| credential_id?: string | |||
| _dimmed?: boolean | |||
| } & T & Partial<Pick<ToolDefaultValue, 'provider_id' | 'provider_type' | 'provider_name' | 'tool_name'>> | |||
| & Partial<Pick<DataSourceDefaultValue, 'plugin_id' | 'provider_type' | 'provider_name' | 'datasource_name'>> | |||
| @@ -119,7 +120,8 @@ export type CommonEdgeType = { | |||
| isInLoop?: boolean | |||
| loop_id?: string | |||
| sourceType: BlockEnum | |||
| targetType: BlockEnum | |||
| targetType: BlockEnum, | |||
| _isTemp?: boolean, | |||
| } | |||
| export type Node<T = {}> = ReactFlowNode<CommonNodeType<T>> | |||
| @@ -82,6 +82,7 @@ export default combine( | |||
| '**/.next/', | |||
| '**/public/*', | |||
| '**/*.json', | |||
| '**/*.js', | |||
| ], | |||
| }, | |||
| { | |||
| @@ -3,3 +3,8 @@ declare module 'lamejs/src/js/MPEGMode'; | |||
| declare module 'lamejs/src/js/Lame'; | |||
| declare module 'lamejs/src/js/BitStream'; | |||
| declare module 'react-18-input-autosize'; | |||
| declare module '*.mdx' { | |||
| let MDXComponent: (props: any) => JSX.Element | |||
| export default MDXComponent | |||
| } | |||
| @@ -89,11 +89,11 @@ if (!i18n.isInitialized) { | |||
| } | |||
| export const changeLanguage = async (lng?: string) => { | |||
| const resolvedLng = lng ?? 'en-US' | |||
| const resource = await loadLangResources(resolvedLng) | |||
| if (!i18n.hasResourceBundle(resolvedLng, 'translation')) | |||
| i18n.addResourceBundle(resolvedLng, 'translation', resource, true, true) | |||
| await i18n.changeLanguage(resolvedLng) | |||
| if (!lng) return | |||
| const resource = await loadLangResources(lng) | |||
| if (!i18n.hasResourceBundle(lng, 'translation')) | |||
| i18n.addResourceBundle(lng, 'translation', resource, true, true) | |||
| await i18n.changeLanguage(lng) | |||
| } | |||
| export default i18n | |||
| @@ -968,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Einstellungen', | |||
| lastRunTab: 'Letzte Ausführung', | |||
| relations: { | |||
| dependents: 'Angehörige', | |||
| dependenciesDescription: 'Knoten, auf die sich dieser Knoten stützt', | |||
| dependencies: 'Abhängigkeiten', | |||
| noDependencies: 'Keine Abhängigkeiten', | |||
| dependentsDescription: 'Knoten, die auf diesem Knoten basieren', | |||
| noDependents: 'Keine Angehörigen', | |||
| }, | |||
| relationsTab: 'Beziehungen', | |||
| }, | |||
| } | |||
| @@ -294,6 +294,18 @@ const translation = { | |||
| zoomTo50: 'Zoom to 50%', | |||
| zoomTo100: 'Zoom to 100%', | |||
| zoomToFit: 'Zoom to Fit', | |||
| alignNodes: 'Align Nodes', | |||
| alignLeft: 'Left', | |||
| alignCenter: 'Center', | |||
| alignRight: 'Right', | |||
| alignTop: 'Top', | |||
| alignMiddle: 'Middle', | |||
| alignBottom: 'Bottom', | |||
| vertical: 'Vertical', | |||
| horizontal: 'Horizontal', | |||
| distributeHorizontal: 'Space Horizontally', | |||
| distributeVertical: 'Space Vertically', | |||
| selectionAlignment: 'Selection Alignment', | |||
| }, | |||
| variableReference: { | |||
| noAvailableVars: 'No available variables', | |||
| @@ -965,6 +977,7 @@ const translation = { | |||
| debug: { | |||
| settingsTab: 'Settings', | |||
| lastRunTab: 'Last Run', | |||
| relationsTab: 'Relations', | |||
| noData: { | |||
| description: 'The results of the last run will be displayed here', | |||
| runThisNode: 'Run this node', | |||
| @@ -990,6 +1003,14 @@ const translation = { | |||
| chatNode: 'Conversation', | |||
| systemNode: 'System', | |||
| }, | |||
| relations: { | |||
| dependencies: 'Dependencies', | |||
| dependents: 'Dependents', | |||
| dependenciesDescription: 'Nodes that this node relies on', | |||
| dependentsDescription: 'Nodes that rely on this node', | |||
| noDependencies: 'No dependencies', | |||
| noDependents: 'No dependents', | |||
| }, | |||
| }, | |||
| } | |||
| @@ -968,6 +968,15 @@ const translation = { | |||
| }, | |||
| lastRunTab: 'Última ejecución', | |||
| settingsTab: 'Ajustes', | |||
| relations: { | |||
| dependents: 'Dependientes', | |||
| dependenciesDescription: 'Nodos en los que se basa este nodo', | |||
| dependentsDescription: 'Nodos que dependen de este nodo', | |||
| noDependencies: 'Sin dependencias', | |||
| noDependents: 'Sin dependientes', | |||
| dependencies: 'Dependencias', | |||
| }, | |||
| relationsTab: 'Relaciones', | |||
| }, | |||
| } | |||
| @@ -104,9 +104,7 @@ const translation = { | |||
| noHistory: 'بدون تاریخچه', | |||
| loadMore: 'بارگذاری گردش کار بیشتر', | |||
| exportPNG: 'صادرات به فرمت PNG', | |||
| noExist: 'هیچگونه متغیری وجود ندارد', | |||
| exitVersions: 'نسخههای خروجی', | |||
| referenceVar: 'متغیر مرجع', | |||
| exportSVG: 'صادرات به فرمت SVG', | |||
| exportJPEG: 'صادرات به فرمت JPEG', | |||
| exportImage: 'تصویر را صادر کنید', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| select: 'انتخاب', | |||
| addSubVariable: 'متغیر فرعی', | |||
| condition: 'شرط', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'تخصیص متغیرها', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'تنظیمات', | |||
| lastRunTab: 'آخرین اجرا', | |||
| relations: { | |||
| dependents: 'وابسته', | |||
| dependencies: 'وابسته', | |||
| noDependents: 'بدون وابستگان', | |||
| noDependencies: 'بدون وابستگی', | |||
| dependenciesDescription: 'گره هایی که این گره به آنها متکی است', | |||
| dependentsDescription: 'گره هایی که به این گره متکی هستند', | |||
| }, | |||
| relationsTab: 'روابط', | |||
| }, | |||
| } | |||
| @@ -107,9 +107,7 @@ const translation = { | |||
| exitVersions: 'Versions de sortie', | |||
| exportSVG: 'Exporter en SVG', | |||
| publishUpdate: 'Publier une mise à jour', | |||
| noExist: 'Aucune variable de ce type', | |||
| versionHistory: 'Historique des versions', | |||
| referenceVar: 'Variable de référence', | |||
| exportImage: 'Exporter l\'image', | |||
| exportJPEG: 'Exporter en JPEG', | |||
| needEndNode: 'Le nœud de fin doit être ajouté', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| select: 'Choisir', | |||
| addSubVariable: 'Sous-variable', | |||
| condition: 'Condition', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Attribuer des variables', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Paramètres', | |||
| lastRunTab: 'Dernière Exécution', | |||
| relations: { | |||
| dependencies: 'Dépendances', | |||
| dependentsDescription: 'Nœuds qui s’appuient sur ce nœud', | |||
| noDependents: 'Pas de personnes à charge', | |||
| dependents: 'Dépendants', | |||
| noDependencies: 'Aucune dépendance', | |||
| dependenciesDescription: 'Nœuds sur lesquels repose ce nœud', | |||
| }, | |||
| relationsTab: 'Relations', | |||
| }, | |||
| } | |||
| @@ -109,8 +109,6 @@ const translation = { | |||
| exitVersions: 'निकलने के संस्करण', | |||
| exportPNG: 'PNG के रूप में निर्यात करें', | |||
| exportJPEG: 'JPEG के रूप में निर्यात करें', | |||
| referenceVar: 'संदर्भ चर', | |||
| noExist: 'कोई ऐसा चर नहीं है', | |||
| exportImage: 'छवि निर्यात करें', | |||
| publishUpdate: 'अपडेट प्रकाशित करें', | |||
| exportSVG: 'SVG के रूप में निर्यात करें', | |||
| @@ -623,7 +621,6 @@ const translation = { | |||
| }, | |||
| select: 'चुनना', | |||
| addSubVariable: 'उप चर', | |||
| condition: 'स्थिति', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'वेरिएबल्स असाइन करें', | |||
| @@ -991,6 +988,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'सेटिंग्स', | |||
| lastRunTab: 'अंतिम रन', | |||
| relations: { | |||
| dependents: 'निष्पाभ लोग', | |||
| dependentsDescription: 'इस नोड पर निर्भर नोड्स', | |||
| dependencies: 'निर्भरता', | |||
| noDependents: 'कोई आश्रित नहीं', | |||
| dependenciesDescription: 'यह नोड जिस नोड पर निर्भर करता है', | |||
| noDependencies: 'कोई निर्भरताएँ नहीं', | |||
| }, | |||
| relationsTab: 'रिश्ते', | |||
| }, | |||
| } | |||
| @@ -110,11 +110,9 @@ const translation = { | |||
| publishUpdate: 'Pubblica aggiornamento', | |||
| versionHistory: 'Cronologia delle versioni', | |||
| exitVersions: 'Uscita Versioni', | |||
| referenceVar: 'Variabile di riferimento', | |||
| exportSVG: 'Esporta come SVG', | |||
| exportImage: 'Esporta immagine', | |||
| exportJPEG: 'Esporta come JPEG', | |||
| noExist: 'Nessuna variabile del genere', | |||
| exportPNG: 'Esporta come PNG', | |||
| needEndNode: 'Deve essere aggiunto il nodo finale', | |||
| addBlock: 'Aggiungi nodo', | |||
| @@ -627,7 +625,6 @@ const translation = { | |||
| }, | |||
| addSubVariable: 'Variabile secondaria', | |||
| select: 'Selezionare', | |||
| condition: 'Condizione', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Assegna variabili', | |||
| @@ -997,6 +994,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Impostazioni', | |||
| lastRunTab: 'Ultima corsa', | |||
| relations: { | |||
| dependents: 'Dipendenti', | |||
| noDependencies: 'Nessuna dipendenza', | |||
| dependencies: 'Dipendenze', | |||
| noDependents: 'Nessuna persona a carico', | |||
| dependentsDescription: 'Nodi che si basano su questo nodo', | |||
| dependenciesDescription: 'Nodi su cui si basa questo nodo', | |||
| }, | |||
| relationsTab: 'Relazioni', | |||
| }, | |||
| } | |||
| @@ -968,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: '設定', | |||
| lastRunTab: '最後の実行', | |||
| relationsTab: '関係', | |||
| relations: { | |||
| dependencies: '依存元', | |||
| dependents: '依存先', | |||
| dependenciesDescription: 'このノードが依存している他のノード', | |||
| dependentsDescription: 'このノードに依存している他のノード', | |||
| noDependencies: '依存元なし', | |||
| noDependents: '依存先なし', | |||
| }, | |||
| }, | |||
| } | |||
| @@ -1019,6 +1019,15 @@ const translation = { | |||
| }, | |||
| settingsTab: '설정', | |||
| lastRunTab: '마지막 실행', | |||
| relations: { | |||
| dependencies: '종속성', | |||
| dependentsDescription: '이 노드에 의존하는 노드', | |||
| noDependents: '부양가족 없음', | |||
| noDependencies: '종속성 없음', | |||
| dependents: '부양 가족', | |||
| dependenciesDescription: '이 노드가 의존하는 노드', | |||
| }, | |||
| relationsTab: '관계', | |||
| }, | |||
| } | |||
| @@ -108,10 +108,8 @@ const translation = { | |||
| versionHistory: 'Historia wersji', | |||
| exportSVG: 'Eksportuj jako SVG', | |||
| exportJPEG: 'Eksportuj jako JPEG', | |||
| noExist: 'Nie ma takiej zmiennej', | |||
| exportPNG: 'Eksportuj jako PNG', | |||
| publishUpdate: 'Opublikuj aktualizację', | |||
| referenceVar: 'Zmienna odniesienia', | |||
| addBlock: 'Dodaj węzeł', | |||
| needEndNode: 'Należy dodać węzeł końcowy', | |||
| needAnswerNode: 'Węzeł odpowiedzi musi zostać dodany', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| addSubVariable: 'Zmienna podrzędna', | |||
| select: 'Wybrać', | |||
| condition: 'Stan', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Przypisz zmienne', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Ustawienia', | |||
| lastRunTab: 'Ostatnie uruchomienie', | |||
| relations: { | |||
| dependencies: 'Zależności', | |||
| dependenciesDescription: 'Węzły, na których opiera się ten węzeł', | |||
| noDependents: 'Brak osób na utrzymaniu', | |||
| dependents: 'Zależności', | |||
| dependentsDescription: 'Węzły, które opierają się na tym węźle', | |||
| noDependencies: 'Brak zależności', | |||
| }, | |||
| relationsTab: 'Stosunków', | |||
| }, | |||
| } | |||
| @@ -107,8 +107,6 @@ const translation = { | |||
| publishUpdate: 'Publicar Atualização', | |||
| versionHistory: 'Histórico de Versão', | |||
| exportImage: 'Exportar Imagem', | |||
| referenceVar: 'Variável de Referência', | |||
| noExist: 'Nenhuma variável desse tipo', | |||
| exitVersions: 'Versões de Sair', | |||
| exportSVG: 'Exportar como SVG', | |||
| exportJPEG: 'Exportar como JPEG', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| addSubVariable: 'Subvariável', | |||
| select: 'Selecionar', | |||
| condition: 'Condição', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Atribuir variáveis', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Configurações', | |||
| lastRunTab: 'Última execução', | |||
| relations: { | |||
| noDependents: 'Sem dependentes', | |||
| dependenciesDescription: 'Nós dos quais esse nó depende', | |||
| dependents: 'Dependentes', | |||
| dependencies: 'Dependências', | |||
| dependentsDescription: 'Nós que dependem desse nó', | |||
| noDependencies: 'Sem dependências', | |||
| }, | |||
| relationsTab: 'Relações', | |||
| }, | |||
| } | |||
| @@ -106,11 +106,9 @@ const translation = { | |||
| exportImage: 'Exportă imaginea', | |||
| exportSVG: 'Exportă ca SVG', | |||
| exportPNG: 'Exportă ca PNG', | |||
| noExist: 'Nu există o astfel de variabilă', | |||
| exitVersions: 'Ieșire Versiuni', | |||
| versionHistory: 'Istoricul versiunilor', | |||
| publishUpdate: 'Publicați actualizarea', | |||
| referenceVar: 'Variabilă de referință', | |||
| exportJPEG: 'Exportă ca JPEG', | |||
| addBlock: 'Adaugă nod', | |||
| needAnswerNode: 'Nodul de răspuns trebuie adăugat', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| select: 'Alege', | |||
| addSubVariable: 'Subvariabilă', | |||
| condition: 'Condiție', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Atribuie variabile', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Setări', | |||
| lastRunTab: 'Ultima execuție', | |||
| relations: { | |||
| dependencies: 'Dependenţele', | |||
| noDependencies: 'Fără dependențe', | |||
| dependents: 'Dependenţe', | |||
| noDependents: 'Fără persoane aflate în întreținere', | |||
| dependentsDescription: 'Noduri care se bazează pe acest nod', | |||
| dependenciesDescription: 'Noduri pe care se bazează acest nod', | |||
| }, | |||
| relationsTab: 'Relații', | |||
| }, | |||
| } | |||
| @@ -103,12 +103,10 @@ const translation = { | |||
| addFailureBranch: 'Добавить ветвь Fail', | |||
| noHistory: 'Без истории', | |||
| loadMore: 'Загрузите больше рабочих процессов', | |||
| noExist: 'Такой переменной не существует', | |||
| versionHistory: 'История версий', | |||
| exportPNG: 'Экспортировать как PNG', | |||
| exportImage: 'Экспортировать изображение', | |||
| exportJPEG: 'Экспортировать как JPEG', | |||
| referenceVar: 'Ссылочная переменная', | |||
| exitVersions: 'Выходные версии', | |||
| exportSVG: 'Экспортировать как SVG', | |||
| publishUpdate: 'Опубликовать обновление', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| select: 'Выбирать', | |||
| addSubVariable: 'Подпеременная', | |||
| condition: 'Условие', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Назначить переменные', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| lastRunTab: 'Последний запуск', | |||
| settingsTab: 'Настройки', | |||
| relations: { | |||
| dependencies: 'Зависимости', | |||
| dependents: 'Иждивенцев', | |||
| noDependencies: 'Нет зависимостей', | |||
| dependentsDescription: 'Узлы, которые полагаются на этот узел', | |||
| noDependents: 'Отсутствие иждивенцев', | |||
| dependenciesDescription: 'Узлы, на которые опирается этот узел', | |||
| }, | |||
| relationsTab: 'Отношения', | |||
| }, | |||
| } | |||
| @@ -968,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Nastavitve', | |||
| lastRunTab: 'Zadnji zagon', | |||
| relations: { | |||
| dependencies: 'Odvisnosti', | |||
| dependents: 'Odvisnim', | |||
| noDependents: 'Brez vzdrževanih oseb', | |||
| dependentsDescription: 'Vozlišča, ki se zanašajo na to vozlišče', | |||
| dependenciesDescription: 'Vozlišča, na katera se zanaša to vozlišče', | |||
| noDependencies: 'Brez odvisnosti', | |||
| }, | |||
| relationsTab: 'Odnose', | |||
| }, | |||
| } | |||
| @@ -105,9 +105,7 @@ const translation = { | |||
| noHistory: 'ไม่มีประวัติ', | |||
| versionHistory: 'ประวัติรุ่น', | |||
| exportPNG: 'ส่งออกเป็น PNG', | |||
| noExist: 'ไม่มีตัวแปรดังกล่าว', | |||
| exportJPEG: 'ส่งออกเป็น JPEG', | |||
| referenceVar: 'ตัวแปรอ้างอิง', | |||
| publishUpdate: 'เผยแพร่การอัปเดต', | |||
| exitVersions: 'ออกเวอร์ชัน', | |||
| exportImage: 'ส่งออกภาพ', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| selectVariable: 'เลือกตัวแปร...', | |||
| addSubVariable: 'ตัวแปรย่อย', | |||
| select: 'เลือก', | |||
| condition: 'เงื่อนไข', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'กําหนดตัวแปร', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'การตั้งค่า', | |||
| lastRunTab: 'รอบสุดท้าย', | |||
| relations: { | |||
| dependents: 'ผู้อยู่ในอุปการะ', | |||
| dependencies: 'อ้าง อิง', | |||
| dependenciesDescription: 'โหนดที่โหนดนี้อาศัย', | |||
| noDependencies: 'ไม่มีการพึ่งพา', | |||
| noDependents: 'ไม่มีผู้อยู่ในอุปการะ', | |||
| dependentsDescription: 'โหนดที่อาศัยโหนดนี้', | |||
| }, | |||
| relationsTab: 'สัมพันธ์', | |||
| }, | |||
| } | |||
| @@ -109,9 +109,7 @@ const translation = { | |||
| exitVersions: 'Çıkış Sürümleri', | |||
| versionHistory: 'Sürüm Geçmişi', | |||
| exportJPEG: 'JPEG olarak dışa aktar', | |||
| noExist: 'Böyle bir değişken yok', | |||
| exportSVG: 'SVG olarak dışa aktar', | |||
| referenceVar: 'Referans Değişken', | |||
| addBlock: 'Düğüm Ekle', | |||
| needAnswerNode: 'Cevap düğümü eklenmelidir.', | |||
| needEndNode: 'Son düğüm eklenmelidir', | |||
| @@ -609,7 +607,6 @@ const translation = { | |||
| }, | |||
| addSubVariable: 'Alt Değişken', | |||
| select: 'Seçmek', | |||
| condition: 'Koşul', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Değişken ata', | |||
| @@ -972,6 +969,15 @@ const translation = { | |||
| }, | |||
| lastRunTab: 'Son Koşu', | |||
| settingsTab: 'Ayarlar', | |||
| relations: { | |||
| noDependents: 'Bakmakla yükümlü olunan kişi yok', | |||
| dependentsDescription: 'Bu düğüme dayanan düğümler', | |||
| dependenciesDescription: 'Bu düğümün dayandığı düğümler', | |||
| dependencies: 'Bağımlılık', | |||
| dependents: 'Bağımlı', | |||
| noDependencies: 'Bağımlılık yok', | |||
| }, | |||
| relationsTab: 'Ilişkiler', | |||
| }, | |||
| } | |||
| @@ -103,9 +103,7 @@ const translation = { | |||
| addFailureBranch: 'Додано гілку помилки', | |||
| noHistory: 'Без історії', | |||
| loadMore: 'Завантажте більше робочих процесів', | |||
| referenceVar: 'Посилальна змінна', | |||
| exportPNG: 'Експортувати як PNG', | |||
| noExist: 'Такої змінної не існує', | |||
| exitVersions: 'Вихідні версії', | |||
| versionHistory: 'Історія версій', | |||
| publishUpdate: 'Опублікувати оновлення', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| select: 'Виберіть', | |||
| addSubVariable: 'Підзмінна', | |||
| condition: 'Умова', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Присвоєння змінних', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| lastRunTab: 'Останній запуск', | |||
| settingsTab: 'Налаштування', | |||
| relations: { | |||
| noDependents: 'Без утриманців', | |||
| dependents: 'Утриманців', | |||
| dependencies: 'Залежностей', | |||
| noDependencies: 'Відсутність залежностей', | |||
| dependenciesDescription: 'Вузли, на які спирається цей вузол', | |||
| dependentsDescription: 'Вузли, які спираються на цей вузол', | |||
| }, | |||
| relationsTab: 'Відносин', | |||
| }, | |||
| } | |||
| @@ -109,9 +109,7 @@ const translation = { | |||
| exitVersions: 'Phiên bản thoát', | |||
| exportImage: 'Xuất hình ảnh', | |||
| exportPNG: 'Xuất dưới dạng PNG', | |||
| noExist: 'Không có biến như vậy', | |||
| exportJPEG: 'Xuất dưới dạng JPEG', | |||
| referenceVar: 'Biến tham chiếu', | |||
| needAnswerNode: 'Nút Trả lời phải được thêm vào', | |||
| addBlock: 'Thêm Node', | |||
| needEndNode: 'Nút Kết thúc phải được thêm vào', | |||
| @@ -608,7 +606,6 @@ const translation = { | |||
| }, | |||
| addSubVariable: 'Biến phụ', | |||
| select: 'Lựa', | |||
| condition: 'Điều kiện', | |||
| }, | |||
| variableAssigner: { | |||
| title: 'Gán biến', | |||
| @@ -971,6 +968,15 @@ const translation = { | |||
| }, | |||
| settingsTab: 'Cài đặt', | |||
| lastRunTab: 'Chạy Lần Cuối', | |||
| relations: { | |||
| noDependencies: 'Không phụ thuộc', | |||
| dependenciesDescription: 'Các nút mà nút này dựa vào', | |||
| dependents: 'Người phụ thuộc', | |||
| dependencies: 'Phụ thuộc', | |||
| noDependents: 'Không có người phụ thuộc', | |||
| dependentsDescription: 'Các nút dựa vào nút này', | |||
| }, | |||
| relationsTab: 'Mối quan hệ', | |||
| }, | |||
| } | |||
| @@ -294,6 +294,18 @@ const translation = { | |||
| zoomTo50: '缩放到 50%', | |||
| zoomTo100: '放大到 100%', | |||
| zoomToFit: '自适应视图', | |||
| alignNodes: '对齐节点', | |||
| alignLeft: '左对齐', | |||
| alignCenter: '居中对齐', | |||
| alignRight: '右对齐', | |||
| alignTop: '顶部对齐', | |||
| alignMiddle: '中部对齐', | |||
| alignBottom: '底部对齐', | |||
| vertical: '垂直方向', | |||
| horizontal: '水平方向', | |||
| distributeHorizontal: '水平等间距', | |||
| distributeVertical: '垂直等间距', | |||
| selectionAlignment: '选择对齐', | |||
| }, | |||
| variableReference: { | |||
| noAvailableVars: '没有可用变量', | |||
| @@ -978,6 +990,7 @@ const translation = { | |||
| debug: { | |||
| settingsTab: '设置', | |||
| lastRunTab: '上次运行', | |||
| relationsTab: '关系', | |||
| noData: { | |||
| description: '上次运行的结果将显示在这里', | |||
| runThisNode: '运行此节点', | |||
| @@ -1003,6 +1016,14 @@ const translation = { | |||
| chatNode: '会话变量', | |||
| systemNode: '系统变量', | |||
| }, | |||
| relations: { | |||
| dependencies: '依赖', | |||
| dependents: '被依赖', | |||
| dependenciesDescription: '本节点依赖的其他节点', | |||
| dependentsDescription: '依赖于本节点的其他节点', | |||
| noDependencies: '无依赖', | |||
| noDependents: '无被依赖', | |||
| }, | |||
| }, | |||
| } | |||
| @@ -941,6 +941,9 @@ const translation = { | |||
| copyId: '複製ID', | |||
| }, | |||
| debug: { | |||
| settingsTab: '設定', | |||
| lastRunTab: '最後一次運行', | |||
| relationsTab: '關係', | |||
| noData: { | |||
| runThisNode: '運行此節點', | |||
| description: '上次運行的結果將顯示在這裡', | |||
| @@ -966,8 +969,14 @@ const translation = { | |||
| emptyTip: '在畫布上逐步執行節點或逐步運行節點後,您可以在變數檢視中查看節點變數的當前值。', | |||
| resetConversationVar: '將對話變數重置為默認值', | |||
| }, | |||
| settingsTab: '設定', | |||
| lastRunTab: '最後一次運行', | |||
| relations: { | |||
| dependencies: '依賴', | |||
| dependents: '被依賴', | |||
| dependenciesDescription: '此節點所依賴的其他節點', | |||
| dependentsDescription: '依賴此節點的其他節點', | |||
| noDependencies: '無依賴', | |||
| noDependents: '無被依賴', | |||
| }, | |||
| }, | |||
| } | |||
| @@ -117,7 +117,7 @@ function unicodeToChar(text: string) { | |||
| if (!text) | |||
| return '' | |||
| return text.replace(/\\u[0-9a-f]{4}/g, (_match, p1) => { | |||
| return text.replace(/\\u([0-9a-f]{4})/g, (_match, p1) => { | |||
| return String.fromCharCode(Number.parseInt(p1, 16)) | |||
| }) | |||
| } | |||