Du kannst nicht mehr als 25 Themen auswählen Themen müssen mit entweder einem Buchstaben oder einer Ziffer beginnen. Sie können Bindestriche („-“) enthalten und bis zu 35 Zeichen lang sein.

workflow_service.py 42KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059
  1. import json
  2. import time
  3. import uuid
  4. from collections.abc import Callable, Generator, Mapping, Sequence
  5. from typing import Any, cast
  6. from sqlalchemy import exists, select
  7. from sqlalchemy.orm import Session, sessionmaker
  8. from core.app.app_config.entities import VariableEntityType
  9. from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
  10. from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
  11. from core.file import File
  12. from core.repositories import DifyCoreRepositoryFactory
  13. from core.variables import Variable
  14. from core.variables.variables import VariableUnion
  15. from core.workflow.entities import VariablePool, WorkflowNodeExecution
  16. from core.workflow.enums import ErrorStrategy, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
  17. from core.workflow.errors import WorkflowNodeRunFailedError
  18. from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, NodeRunSucceededEvent
  19. from core.workflow.node_events import NodeRunResult
  20. from core.workflow.nodes import NodeType
  21. from core.workflow.nodes.base.node import Node
  22. from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
  23. from core.workflow.nodes.start.entities import StartNodeData
  24. from core.workflow.system_variable import SystemVariable
  25. from core.workflow.workflow_entry import WorkflowEntry
  26. from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
  27. from extensions.ext_database import db
  28. from extensions.ext_storage import storage
  29. from factories.file_factory import build_from_mapping, build_from_mappings
  30. from libs.datetime_utils import naive_utc_now
  31. from models.account import Account
  32. from models.model import App, AppMode
  33. from models.tools import WorkflowToolProvider
  34. from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType
  35. from repositories.factory import DifyAPIRepositoryFactory
  36. from services.enterprise.plugin_manager_service import PluginCredentialType
  37. from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError
  38. from services.workflow.workflow_converter import WorkflowConverter
  39. from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError
  40. from .workflow_draft_variable_service import DraftVariableSaver, DraftVarLoader, WorkflowDraftVariableService
  41. class WorkflowService:
  42. """
  43. Workflow Service
  44. """
  45. def __init__(self, session_maker: sessionmaker | None = None):
  46. """Initialize WorkflowService with repository dependencies."""
  47. if session_maker is None:
  48. session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
  49. self._node_execution_service_repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
  50. session_maker
  51. )
  52. def get_node_last_run(self, app_model: App, workflow: Workflow, node_id: str) -> WorkflowNodeExecutionModel | None:
  53. """
  54. Get the most recent execution for a specific node.
  55. Args:
  56. app_model: The application model
  57. workflow: The workflow model
  58. node_id: The node identifier
  59. Returns:
  60. The most recent WorkflowNodeExecutionModel for the node, or None if not found
  61. """
  62. return self._node_execution_service_repo.get_node_last_execution(
  63. tenant_id=app_model.tenant_id,
  64. app_id=app_model.id,
  65. workflow_id=workflow.id,
  66. node_id=node_id,
  67. )
  68. def is_workflow_exist(self, app_model: App) -> bool:
  69. stmt = select(
  70. exists().where(
  71. Workflow.tenant_id == app_model.tenant_id,
  72. Workflow.app_id == app_model.id,
  73. Workflow.version == Workflow.VERSION_DRAFT,
  74. )
  75. )
  76. return db.session.execute(stmt).scalar_one()
  77. def get_draft_workflow(self, app_model: App, workflow_id: str | None = None) -> Workflow | None:
  78. """
  79. Get draft workflow
  80. """
  81. if workflow_id:
  82. return self.get_published_workflow_by_id(app_model, workflow_id)
  83. # fetch draft workflow by app_model
  84. workflow = (
  85. db.session.query(Workflow)
  86. .where(
  87. Workflow.tenant_id == app_model.tenant_id,
  88. Workflow.app_id == app_model.id,
  89. Workflow.version == Workflow.VERSION_DRAFT,
  90. )
  91. .first()
  92. )
  93. # return draft workflow
  94. return workflow
  95. def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Workflow | None:
  96. """
  97. fetch published workflow by workflow_id
  98. """
  99. workflow = (
  100. db.session.query(Workflow)
  101. .where(
  102. Workflow.tenant_id == app_model.tenant_id,
  103. Workflow.app_id == app_model.id,
  104. Workflow.id == workflow_id,
  105. )
  106. .first()
  107. )
  108. if not workflow:
  109. return None
  110. if workflow.version == Workflow.VERSION_DRAFT:
  111. raise IsDraftWorkflowError(
  112. f"Cannot use draft workflow version. Workflow ID: {workflow_id}. "
  113. f"Please use a published workflow version or leave workflow_id empty."
  114. )
  115. return workflow
  116. def get_published_workflow(self, app_model: App) -> Workflow | None:
  117. """
  118. Get published workflow
  119. """
  120. if not app_model.workflow_id:
  121. return None
  122. # fetch published workflow by workflow_id
  123. workflow = (
  124. db.session.query(Workflow)
  125. .where(
  126. Workflow.tenant_id == app_model.tenant_id,
  127. Workflow.app_id == app_model.id,
  128. Workflow.id == app_model.workflow_id,
  129. )
  130. .first()
  131. )
  132. return workflow
  133. def get_all_published_workflow(
  134. self,
  135. *,
  136. session: Session,
  137. app_model: App,
  138. page: int,
  139. limit: int,
  140. user_id: str | None,
  141. named_only: bool = False,
  142. ) -> tuple[Sequence[Workflow], bool]:
  143. """
  144. Get published workflow with pagination
  145. """
  146. if not app_model.workflow_id:
  147. return [], False
  148. stmt = (
  149. select(Workflow)
  150. .where(Workflow.app_id == app_model.id)
  151. .order_by(Workflow.version.desc())
  152. .limit(limit + 1)
  153. .offset((page - 1) * limit)
  154. )
  155. if user_id:
  156. stmt = stmt.where(Workflow.created_by == user_id)
  157. if named_only:
  158. stmt = stmt.where(Workflow.marked_name != "")
  159. workflows = session.scalars(stmt).all()
  160. has_more = len(workflows) > limit
  161. if has_more:
  162. workflows = workflows[:-1]
  163. return workflows, has_more
  164. def sync_draft_workflow(
  165. self,
  166. *,
  167. app_model: App,
  168. graph: dict,
  169. features: dict,
  170. unique_hash: str | None,
  171. account: Account,
  172. environment_variables: Sequence[Variable],
  173. conversation_variables: Sequence[Variable],
  174. ) -> Workflow:
  175. """
  176. Sync draft workflow
  177. :raises WorkflowHashNotEqualError
  178. """
  179. # fetch draft workflow by app_model
  180. workflow = self.get_draft_workflow(app_model=app_model)
  181. if workflow and workflow.unique_hash != unique_hash:
  182. raise WorkflowHashNotEqualError()
  183. # validate features structure
  184. self.validate_features_structure(app_model=app_model, features=features)
  185. # create draft workflow if not found
  186. if not workflow:
  187. workflow = Workflow(
  188. tenant_id=app_model.tenant_id,
  189. app_id=app_model.id,
  190. type=WorkflowType.from_app_mode(app_model.mode).value,
  191. version=Workflow.VERSION_DRAFT,
  192. graph=json.dumps(graph),
  193. features=json.dumps(features),
  194. created_by=account.id,
  195. environment_variables=environment_variables,
  196. conversation_variables=conversation_variables,
  197. )
  198. db.session.add(workflow)
  199. # update draft workflow if found
  200. else:
  201. workflow.graph = json.dumps(graph)
  202. workflow.features = json.dumps(features)
  203. workflow.updated_by = account.id
  204. workflow.updated_at = naive_utc_now()
  205. workflow.environment_variables = environment_variables
  206. workflow.conversation_variables = conversation_variables
  207. # commit db session changes
  208. db.session.commit()
  209. # trigger app workflow events
  210. app_draft_workflow_was_synced.send(app_model, synced_draft_workflow=workflow)
  211. # return draft workflow
  212. return workflow
  213. def publish_workflow(
  214. self,
  215. *,
  216. session: Session,
  217. app_model: App,
  218. account: Account,
  219. marked_name: str = "",
  220. marked_comment: str = "",
  221. ) -> Workflow:
  222. draft_workflow_stmt = select(Workflow).where(
  223. Workflow.tenant_id == app_model.tenant_id,
  224. Workflow.app_id == app_model.id,
  225. Workflow.version == Workflow.VERSION_DRAFT,
  226. )
  227. draft_workflow = session.scalar(draft_workflow_stmt)
  228. if not draft_workflow:
  229. raise ValueError("No valid workflow found.")
  230. # Validate credentials before publishing, for credential policy check
  231. from services.feature_service import FeatureService
  232. if FeatureService.get_system_features().plugin_manager.enabled:
  233. self._validate_workflow_credentials(draft_workflow)
  234. # create new workflow
  235. workflow = Workflow.new(
  236. tenant_id=app_model.tenant_id,
  237. app_id=app_model.id,
  238. type=draft_workflow.type,
  239. version=Workflow.version_from_datetime(naive_utc_now()),
  240. graph=draft_workflow.graph,
  241. created_by=account.id,
  242. environment_variables=draft_workflow.environment_variables,
  243. conversation_variables=draft_workflow.conversation_variables,
  244. marked_name=marked_name,
  245. marked_comment=marked_comment,
  246. rag_pipeline_variables=draft_workflow.rag_pipeline_variables,
  247. features=draft_workflow.features,
  248. )
  249. # commit db session changes
  250. session.add(workflow)
  251. # trigger app workflow events
  252. app_published_workflow_was_updated.send(app_model, published_workflow=workflow)
  253. # return new workflow
  254. return workflow
  255. def _validate_workflow_credentials(self, workflow: Workflow) -> None:
  256. """
  257. Validate all credentials in workflow nodes before publishing.
  258. :param workflow: The workflow to validate
  259. :raises ValueError: If any credentials violate policy compliance
  260. """
  261. graph_dict = workflow.graph_dict
  262. nodes = graph_dict.get("nodes", [])
  263. for node in nodes:
  264. node_data = node.get("data", {})
  265. node_type = node_data.get("type")
  266. node_id = node.get("id", "unknown")
  267. try:
  268. # Extract and validate credentials based on node type
  269. if node_type == "tool":
  270. credential_id = node_data.get("credential_id")
  271. provider = node_data.get("provider_id")
  272. if provider:
  273. if credential_id:
  274. # Check specific credential
  275. from core.helper.credential_utils import check_credential_policy_compliance
  276. check_credential_policy_compliance(
  277. credential_id=credential_id,
  278. provider=provider,
  279. credential_type=PluginCredentialType.TOOL,
  280. )
  281. else:
  282. # Check default workspace credential for this provider
  283. self._check_default_tool_credential(workflow.tenant_id, provider)
  284. elif node_type == "agent":
  285. agent_params = node_data.get("agent_parameters", {})
  286. model_config = agent_params.get("model", {}).get("value", {})
  287. if model_config.get("provider") and model_config.get("model"):
  288. self._validate_llm_model_config(
  289. workflow.tenant_id, model_config["provider"], model_config["model"]
  290. )
  291. # Validate load balancing credentials for agent model if load balancing is enabled
  292. agent_model_node_data = {"model": model_config}
  293. self._validate_load_balancing_credentials(workflow, agent_model_node_data, node_id)
  294. # Validate agent tools
  295. tools = agent_params.get("tools", {}).get("value", [])
  296. for tool in tools:
  297. # Agent tools store provider in provider_name field
  298. provider = tool.get("provider_name")
  299. credential_id = tool.get("credential_id")
  300. if provider:
  301. if credential_id:
  302. from core.helper.credential_utils import check_credential_policy_compliance
  303. check_credential_policy_compliance(credential_id, provider, PluginCredentialType.TOOL)
  304. else:
  305. self._check_default_tool_credential(workflow.tenant_id, provider)
  306. elif node_type in ["llm", "knowledge_retrieval", "parameter_extractor", "question_classifier"]:
  307. model_config = node_data.get("model", {})
  308. provider = model_config.get("provider")
  309. model_name = model_config.get("name")
  310. if provider and model_name:
  311. # Validate that the provider+model combination can fetch valid credentials
  312. self._validate_llm_model_config(workflow.tenant_id, provider, model_name)
  313. # Validate load balancing credentials if load balancing is enabled
  314. self._validate_load_balancing_credentials(workflow, node_data, node_id)
  315. else:
  316. raise ValueError(f"Node {node_id} ({node_type}): Missing provider or model configuration")
  317. except Exception as e:
  318. if isinstance(e, ValueError):
  319. raise e
  320. else:
  321. raise ValueError(f"Node {node_id} ({node_type}): {str(e)}")
  322. def _validate_llm_model_config(self, tenant_id: str, provider: str, model_name: str) -> None:
  323. """
  324. Validate that an LLM model configuration can fetch valid credentials and has active status.
  325. This method attempts to get the model instance and validates that:
  326. 1. The provider exists and is configured
  327. 2. The model exists in the provider
  328. 3. Credentials can be fetched for the model
  329. 4. The credentials pass policy compliance checks
  330. 5. The model status is ACTIVE (not NO_CONFIGURE, DISABLED, etc.)
  331. :param tenant_id: The tenant ID
  332. :param provider: The provider name
  333. :param model_name: The model name
  334. :raises ValueError: If the model configuration is invalid or credentials fail policy checks
  335. """
  336. try:
  337. from core.model_manager import ModelManager
  338. from core.model_runtime.entities.model_entities import ModelType
  339. from core.provider_manager import ProviderManager
  340. # Get model instance to validate provider+model combination
  341. model_manager = ModelManager()
  342. model_manager.get_model_instance(
  343. tenant_id=tenant_id, provider=provider, model_type=ModelType.LLM, model=model_name
  344. )
  345. # The ModelInstance constructor will automatically check credential policy compliance
  346. # via ProviderConfiguration.get_current_credentials() -> _check_credential_policy_compliance()
  347. # If it fails, an exception will be raised
  348. # Additionally, check the model status to ensure it's ACTIVE
  349. provider_manager = ProviderManager()
  350. provider_configurations = provider_manager.get_configurations(tenant_id)
  351. models = provider_configurations.get_models(provider=provider, model_type=ModelType.LLM)
  352. target_model = None
  353. for model in models:
  354. if model.model == model_name and model.provider.provider == provider:
  355. target_model = model
  356. break
  357. if target_model:
  358. target_model.raise_for_status()
  359. else:
  360. raise ValueError(f"Model {model_name} not found for provider {provider}")
  361. except Exception as e:
  362. raise ValueError(
  363. f"Failed to validate LLM model configuration (provider: {provider}, model: {model_name}): {str(e)}"
  364. )
  365. def _check_default_tool_credential(self, tenant_id: str, provider: str) -> None:
  366. """
  367. Check credential policy compliance for the default workspace credential of a tool provider.
  368. This method finds the default credential for the given provider and validates it.
  369. Uses the same fallback logic as runtime to handle deauthorized credentials.
  370. :param tenant_id: The tenant ID
  371. :param provider: The tool provider name
  372. :raises ValueError: If no default credential exists or if it fails policy compliance
  373. """
  374. try:
  375. from models.tools import BuiltinToolProvider
  376. # Use the same fallback logic as runtime: get the first available credential
  377. # ordered by is_default DESC, created_at ASC (same as tool_manager.py)
  378. default_provider = (
  379. db.session.query(BuiltinToolProvider)
  380. .where(
  381. BuiltinToolProvider.tenant_id == tenant_id,
  382. BuiltinToolProvider.provider == provider,
  383. )
  384. .order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
  385. .first()
  386. )
  387. if not default_provider:
  388. # plugin does not require credentials, skip
  389. return
  390. # Check credential policy compliance using the default credential ID
  391. from core.helper.credential_utils import check_credential_policy_compliance
  392. check_credential_policy_compliance(
  393. credential_id=default_provider.id,
  394. provider=provider,
  395. credential_type=PluginCredentialType.TOOL,
  396. check_existence=False,
  397. )
  398. except Exception as e:
  399. raise ValueError(f"Failed to validate default credential for tool provider {provider}: {str(e)}")
  400. def _validate_load_balancing_credentials(self, workflow: Workflow, node_data: dict, node_id: str) -> None:
  401. """
  402. Validate load balancing credentials for a workflow node.
  403. :param workflow: The workflow being validated
  404. :param node_data: The node data containing model configuration
  405. :param node_id: The node ID for error reporting
  406. :raises ValueError: If load balancing credentials violate policy compliance
  407. """
  408. # Extract model configuration
  409. model_config = node_data.get("model", {})
  410. provider = model_config.get("provider")
  411. model_name = model_config.get("name")
  412. if not provider or not model_name:
  413. return # No model config to validate
  414. # Check if this model has load balancing enabled
  415. if self._is_load_balancing_enabled(workflow.tenant_id, provider, model_name):
  416. # Get all load balancing configurations for this model
  417. load_balancing_configs = self._get_load_balancing_configs(workflow.tenant_id, provider, model_name)
  418. # Validate each load balancing configuration
  419. try:
  420. for config in load_balancing_configs:
  421. if config.get("credential_id"):
  422. from core.helper.credential_utils import check_credential_policy_compliance
  423. check_credential_policy_compliance(
  424. config["credential_id"], provider, PluginCredentialType.MODEL
  425. )
  426. except Exception as e:
  427. raise ValueError(f"Invalid load balancing credentials for {provider}/{model_name}: {str(e)}")
  428. def _is_load_balancing_enabled(self, tenant_id: str, provider: str, model_name: str) -> bool:
  429. """
  430. Check if load balancing is enabled for a specific model.
  431. :param tenant_id: The tenant ID
  432. :param provider: The provider name
  433. :param model_name: The model name
  434. :return: True if load balancing is enabled, False otherwise
  435. """
  436. try:
  437. from core.model_runtime.entities.model_entities import ModelType
  438. from core.provider_manager import ProviderManager
  439. # Get provider configurations
  440. provider_manager = ProviderManager()
  441. provider_configurations = provider_manager.get_configurations(tenant_id)
  442. provider_configuration = provider_configurations.get(provider)
  443. if not provider_configuration:
  444. return False
  445. # Get provider model setting
  446. provider_model_setting = provider_configuration.get_provider_model_setting(
  447. model_type=ModelType.LLM,
  448. model=model_name,
  449. )
  450. return provider_model_setting is not None and provider_model_setting.load_balancing_enabled
  451. except Exception:
  452. # If we can't determine the status, assume load balancing is not enabled
  453. return False
  454. def _get_load_balancing_configs(self, tenant_id: str, provider: str, model_name: str) -> list[dict]:
  455. """
  456. Get all load balancing configurations for a model.
  457. :param tenant_id: The tenant ID
  458. :param provider: The provider name
  459. :param model_name: The model name
  460. :return: List of load balancing configuration dictionaries
  461. """
  462. try:
  463. from services.model_load_balancing_service import ModelLoadBalancingService
  464. model_load_balancing_service = ModelLoadBalancingService()
  465. _, configs = model_load_balancing_service.get_load_balancing_configs(
  466. tenant_id=tenant_id,
  467. provider=provider,
  468. model=model_name,
  469. model_type="llm", # Load balancing is primarily used for LLM models
  470. config_from="predefined-model", # Check both predefined and custom models
  471. )
  472. _, custom_configs = model_load_balancing_service.get_load_balancing_configs(
  473. tenant_id=tenant_id, provider=provider, model=model_name, model_type="llm", config_from="custom-model"
  474. )
  475. all_configs = configs + custom_configs
  476. return [config for config in all_configs if config.get("credential_id")]
  477. except Exception:
  478. # If we can't get the configurations, return empty list
  479. # This will prevent validation errors from breaking the workflow
  480. return []
  481. def get_default_block_configs(self) -> Sequence[Mapping[str, object]]:
  482. """
  483. Get default block configs
  484. """
  485. # return default block config
  486. default_block_configs: list[Mapping[str, object]] = []
  487. for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
  488. node_class = node_class_mapping[LATEST_VERSION]
  489. default_config = node_class.get_default_config()
  490. if default_config:
  491. default_block_configs.append(default_config)
  492. return default_block_configs
  493. def get_default_block_config(
  494. self, node_type: str, filters: Mapping[str, object] | None = None
  495. ) -> Mapping[str, object]:
  496. """
  497. Get default config of node.
  498. :param node_type: node type
  499. :param filters: filter by node config parameters.
  500. :return:
  501. """
  502. node_type_enum = NodeType(node_type)
  503. # return default block config
  504. if node_type_enum not in NODE_TYPE_CLASSES_MAPPING:
  505. return {}
  506. node_class = NODE_TYPE_CLASSES_MAPPING[node_type_enum][LATEST_VERSION]
  507. default_config = node_class.get_default_config(filters=filters)
  508. if not default_config:
  509. return {}
  510. return default_config
  511. def run_draft_workflow_node(
  512. self,
  513. app_model: App,
  514. draft_workflow: Workflow,
  515. node_id: str,
  516. user_inputs: Mapping[str, Any],
  517. account: Account,
  518. query: str = "",
  519. files: Sequence[File] | None = None,
  520. ) -> WorkflowNodeExecutionModel:
  521. """
  522. Run draft workflow node
  523. """
  524. files = files or []
  525. with Session(bind=db.engine, expire_on_commit=False) as session, session.begin():
  526. draft_var_srv = WorkflowDraftVariableService(session)
  527. draft_var_srv.prefill_conversation_variable_default_values(draft_workflow)
  528. node_config = draft_workflow.get_node_config_by_id(node_id)
  529. node_type = Workflow.get_node_type_from_node_config(node_config)
  530. node_data = node_config.get("data", {})
  531. if node_type == NodeType.START:
  532. with Session(bind=db.engine) as session, session.begin():
  533. draft_var_srv = WorkflowDraftVariableService(session)
  534. conversation_id = draft_var_srv.get_or_create_conversation(
  535. account_id=account.id,
  536. app=app_model,
  537. workflow=draft_workflow,
  538. )
  539. start_data = StartNodeData.model_validate(node_data)
  540. user_inputs = _rebuild_file_for_user_inputs_in_start_node(
  541. tenant_id=draft_workflow.tenant_id, start_node_data=start_data, user_inputs=user_inputs
  542. )
  543. # init variable pool
  544. variable_pool = _setup_variable_pool(
  545. query=query,
  546. files=files or [],
  547. user_id=account.id,
  548. user_inputs=user_inputs,
  549. workflow=draft_workflow,
  550. # NOTE(QuantumGhost): We rely on `DraftVarLoader` to load conversation variables.
  551. conversation_variables=[],
  552. node_type=node_type,
  553. conversation_id=conversation_id,
  554. )
  555. else:
  556. variable_pool = VariablePool(
  557. system_variables=SystemVariable.empty(),
  558. user_inputs=user_inputs,
  559. environment_variables=draft_workflow.environment_variables,
  560. conversation_variables=[],
  561. )
  562. variable_loader = DraftVarLoader(
  563. engine=db.engine,
  564. app_id=app_model.id,
  565. tenant_id=app_model.tenant_id,
  566. )
  567. enclosing_node_type_and_id = draft_workflow.get_enclosing_node_type_and_id(node_config)
  568. if enclosing_node_type_and_id:
  569. _, enclosing_node_id = enclosing_node_type_and_id
  570. else:
  571. enclosing_node_id = None
  572. run = WorkflowEntry.single_step_run(
  573. workflow=draft_workflow,
  574. node_id=node_id,
  575. user_inputs=user_inputs,
  576. user_id=account.id,
  577. variable_pool=variable_pool,
  578. variable_loader=variable_loader,
  579. )
  580. # run draft workflow node
  581. start_at = time.perf_counter()
  582. node_execution = self._handle_single_step_result(
  583. invoke_node_fn=lambda: run,
  584. start_at=start_at,
  585. node_id=node_id,
  586. )
  587. # Set workflow_id on the NodeExecution
  588. node_execution.workflow_id = draft_workflow.id
  589. # Create repository and save the node execution
  590. repository = DifyCoreRepositoryFactory.create_workflow_node_execution_repository(
  591. session_factory=db.engine,
  592. user=account,
  593. app_id=app_model.id,
  594. triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
  595. )
  596. repository.save(node_execution)
  597. workflow_node_execution = self._node_execution_service_repo.get_execution_by_id(node_execution.id)
  598. if workflow_node_execution is None:
  599. raise ValueError(f"WorkflowNodeExecution with id {node_execution.id} not found after saving")
  600. with Session(db.engine) as session:
  601. outputs = workflow_node_execution.load_full_outputs(session, storage)
  602. with Session(bind=db.engine) as session, session.begin():
  603. draft_var_saver = DraftVariableSaver(
  604. session=session,
  605. app_id=app_model.id,
  606. node_id=workflow_node_execution.node_id,
  607. node_type=NodeType(workflow_node_execution.node_type),
  608. enclosing_node_id=enclosing_node_id,
  609. node_execution_id=node_execution.id,
  610. user=account,
  611. )
  612. draft_var_saver.save(process_data=node_execution.process_data, outputs=outputs)
  613. session.commit()
  614. return workflow_node_execution
  615. def run_free_workflow_node(
  616. self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
  617. ) -> WorkflowNodeExecution:
  618. """
  619. Run free workflow node
  620. """
  621. # run free workflow node
  622. start_at = time.perf_counter()
  623. node_execution = self._handle_single_step_result(
  624. invoke_node_fn=lambda: WorkflowEntry.run_free_node(
  625. node_id=node_id,
  626. node_data=node_data,
  627. tenant_id=tenant_id,
  628. user_id=user_id,
  629. user_inputs=user_inputs,
  630. ),
  631. start_at=start_at,
  632. node_id=node_id,
  633. )
  634. return node_execution
  635. def _handle_single_step_result(
  636. self,
  637. invoke_node_fn: Callable[[], tuple[Node, Generator[GraphNodeEventBase, None, None]]],
  638. start_at: float,
  639. node_id: str,
  640. ) -> WorkflowNodeExecution:
  641. """
  642. Handle single step execution and return WorkflowNodeExecution.
  643. Args:
  644. invoke_node_fn: Function to invoke node execution
  645. start_at: Execution start time
  646. node_id: ID of the node being executed
  647. Returns:
  648. WorkflowNodeExecution: The execution result
  649. """
  650. node, node_run_result, run_succeeded, error = self._execute_node_safely(invoke_node_fn)
  651. # Create base node execution
  652. node_execution = WorkflowNodeExecution(
  653. id=str(uuid.uuid4()),
  654. workflow_id="", # Single-step execution has no workflow ID
  655. index=1,
  656. node_id=node_id,
  657. node_type=node.node_type,
  658. title=node.title,
  659. elapsed_time=time.perf_counter() - start_at,
  660. created_at=naive_utc_now(),
  661. finished_at=naive_utc_now(),
  662. )
  663. # Populate execution result data
  664. self._populate_execution_result(node_execution, node_run_result, run_succeeded, error)
  665. return node_execution
  666. def _execute_node_safely(
  667. self, invoke_node_fn: Callable[[], tuple[Node, Generator[GraphNodeEventBase, None, None]]]
  668. ) -> tuple[Node, NodeRunResult | None, bool, str | None]:
  669. """
  670. Execute node safely and handle errors according to error strategy.
  671. Returns:
  672. Tuple of (node, node_run_result, run_succeeded, error)
  673. """
  674. try:
  675. node, node_events = invoke_node_fn()
  676. node_run_result = next(
  677. (
  678. event.node_run_result
  679. for event in node_events
  680. if isinstance(event, (NodeRunSucceededEvent, NodeRunFailedEvent))
  681. ),
  682. None,
  683. )
  684. if not node_run_result:
  685. raise ValueError("Node execution failed - no result returned")
  686. # Apply error strategy if node failed
  687. if node_run_result.status == WorkflowNodeExecutionStatus.FAILED and node.error_strategy:
  688. node_run_result = self._apply_error_strategy(node, node_run_result)
  689. run_succeeded = node_run_result.status in (
  690. WorkflowNodeExecutionStatus.SUCCEEDED,
  691. WorkflowNodeExecutionStatus.EXCEPTION,
  692. )
  693. error = node_run_result.error if not run_succeeded else None
  694. return node, node_run_result, run_succeeded, error
  695. except WorkflowNodeRunFailedError as e:
  696. node = e.node
  697. run_succeeded = False
  698. node_run_result = None
  699. error = e.error
  700. return node, node_run_result, run_succeeded, error
  701. def _apply_error_strategy(self, node: Node, node_run_result: NodeRunResult) -> NodeRunResult:
  702. """Apply error strategy when node execution fails."""
  703. # TODO(Novice): Maybe we should apply error strategy to node level?
  704. error_outputs = {
  705. "error_message": node_run_result.error,
  706. "error_type": node_run_result.error_type,
  707. }
  708. # Add default values if strategy is DEFAULT_VALUE
  709. if node.error_strategy is ErrorStrategy.DEFAULT_VALUE:
  710. error_outputs.update(node.default_value_dict)
  711. return NodeRunResult(
  712. status=WorkflowNodeExecutionStatus.EXCEPTION,
  713. error=node_run_result.error,
  714. inputs=node_run_result.inputs,
  715. metadata={WorkflowNodeExecutionMetadataKey.ERROR_STRATEGY: node.error_strategy},
  716. outputs=error_outputs,
  717. )
  718. def _populate_execution_result(
  719. self,
  720. node_execution: WorkflowNodeExecution,
  721. node_run_result: NodeRunResult | None,
  722. run_succeeded: bool,
  723. error: str | None,
  724. ) -> None:
  725. """Populate node execution with result data."""
  726. if run_succeeded and node_run_result:
  727. node_execution.inputs = (
  728. WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None
  729. )
  730. node_execution.process_data = (
  731. WorkflowEntry.handle_special_values(node_run_result.process_data)
  732. if node_run_result.process_data
  733. else None
  734. )
  735. node_execution.outputs = node_run_result.outputs
  736. node_execution.metadata = node_run_result.metadata
  737. # Set status and error based on result
  738. node_execution.status = node_run_result.status
  739. if node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION:
  740. node_execution.error = node_run_result.error
  741. else:
  742. node_execution.status = WorkflowNodeExecutionStatus.FAILED
  743. node_execution.error = error
  744. def convert_to_workflow(self, app_model: App, account: Account, args: dict) -> App:
  745. """
  746. Basic mode of chatbot app(expert mode) to workflow
  747. Completion App to Workflow App
  748. :param app_model: App instance
  749. :param account: Account instance
  750. :param args: dict
  751. :return:
  752. """
  753. # chatbot convert to workflow mode
  754. workflow_converter = WorkflowConverter()
  755. if app_model.mode not in {AppMode.CHAT, AppMode.COMPLETION}:
  756. raise ValueError(f"Current App mode: {app_model.mode} is not supported convert to workflow.")
  757. # convert to workflow
  758. new_app: App = workflow_converter.convert_to_workflow(
  759. app_model=app_model,
  760. account=account,
  761. name=args.get("name", "Default Name"),
  762. icon_type=args.get("icon_type", "emoji"),
  763. icon=args.get("icon", "🤖"),
  764. icon_background=args.get("icon_background", "#FFEAD5"),
  765. )
  766. return new_app
  767. def validate_features_structure(self, app_model: App, features: dict):
  768. if app_model.mode == AppMode.ADVANCED_CHAT:
  769. return AdvancedChatAppConfigManager.config_validate(
  770. tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
  771. )
  772. elif app_model.mode == AppMode.WORKFLOW:
  773. return WorkflowAppConfigManager.config_validate(
  774. tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
  775. )
  776. else:
  777. raise ValueError(f"Invalid app mode: {app_model.mode}")
  778. def update_workflow(
  779. self, *, session: Session, workflow_id: str, tenant_id: str, account_id: str, data: dict
  780. ) -> Workflow | None:
  781. """
  782. Update workflow attributes
  783. :param session: SQLAlchemy database session
  784. :param workflow_id: Workflow ID
  785. :param tenant_id: Tenant ID
  786. :param account_id: Account ID (for permission check)
  787. :param data: Dictionary containing fields to update
  788. :return: Updated workflow or None if not found
  789. """
  790. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  791. workflow = session.scalar(stmt)
  792. if not workflow:
  793. return None
  794. allowed_fields = ["marked_name", "marked_comment"]
  795. for field, value in data.items():
  796. if field in allowed_fields:
  797. setattr(workflow, field, value)
  798. workflow.updated_by = account_id
  799. workflow.updated_at = naive_utc_now()
  800. return workflow
  801. def delete_workflow(self, *, session: Session, workflow_id: str, tenant_id: str) -> bool:
  802. """
  803. Delete a workflow
  804. :param session: SQLAlchemy database session
  805. :param workflow_id: Workflow ID
  806. :param tenant_id: Tenant ID
  807. :return: True if successful
  808. :raises: ValueError if workflow not found
  809. :raises: WorkflowInUseError if workflow is in use
  810. :raises: DraftWorkflowDeletionError if workflow is a draft version
  811. """
  812. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  813. workflow = session.scalar(stmt)
  814. if not workflow:
  815. raise ValueError(f"Workflow with ID {workflow_id} not found")
  816. # Check if workflow is a draft version
  817. if workflow.version == Workflow.VERSION_DRAFT:
  818. raise DraftWorkflowDeletionError("Cannot delete draft workflow versions")
  819. # Check if this workflow is currently referenced by an app
  820. app_stmt = select(App).where(App.workflow_id == workflow_id)
  821. app = session.scalar(app_stmt)
  822. if app:
  823. # Cannot delete a workflow that's currently in use by an app
  824. raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.id}'")
  825. # Don't use workflow.tool_published as it's not accurate for specific workflow versions
  826. # Check if there's a tool provider using this specific workflow version
  827. tool_provider = (
  828. session.query(WorkflowToolProvider)
  829. .where(
  830. WorkflowToolProvider.tenant_id == workflow.tenant_id,
  831. WorkflowToolProvider.app_id == workflow.app_id,
  832. WorkflowToolProvider.version == workflow.version,
  833. )
  834. .first()
  835. )
  836. if tool_provider:
  837. # Cannot delete a workflow that's published as a tool
  838. raise WorkflowInUseError("Cannot delete workflow that is published as a tool")
  839. session.delete(workflow)
  840. return True
  841. def _setup_variable_pool(
  842. query: str,
  843. files: Sequence[File],
  844. user_id: str,
  845. user_inputs: Mapping[str, Any],
  846. workflow: Workflow,
  847. node_type: NodeType,
  848. conversation_id: str,
  849. conversation_variables: list[Variable],
  850. ):
  851. # Only inject system variables for START node type.
  852. if node_type == NodeType.START:
  853. system_variable = SystemVariable(
  854. user_id=user_id,
  855. app_id=workflow.app_id,
  856. workflow_id=workflow.id,
  857. files=files or [],
  858. workflow_execution_id=str(uuid.uuid4()),
  859. )
  860. # Only add chatflow-specific variables for non-workflow types
  861. if workflow.type != WorkflowType.WORKFLOW.value:
  862. system_variable.query = query
  863. system_variable.conversation_id = conversation_id
  864. system_variable.dialogue_count = 1
  865. else:
  866. system_variable = SystemVariable.empty()
  867. # init variable pool
  868. variable_pool = VariablePool(
  869. system_variables=system_variable,
  870. user_inputs=user_inputs,
  871. environment_variables=workflow.environment_variables,
  872. # Based on the definition of `VariableUnion`,
  873. # `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
  874. conversation_variables=cast(list[VariableUnion], conversation_variables), #
  875. )
  876. return variable_pool
  877. def _rebuild_file_for_user_inputs_in_start_node(
  878. tenant_id: str, start_node_data: StartNodeData, user_inputs: Mapping[str, Any]
  879. ) -> Mapping[str, Any]:
  880. inputs_copy = dict(user_inputs)
  881. for variable in start_node_data.variables:
  882. if variable.type not in (VariableEntityType.FILE, VariableEntityType.FILE_LIST):
  883. continue
  884. if variable.variable not in user_inputs:
  885. continue
  886. value = user_inputs[variable.variable]
  887. file = _rebuild_single_file(tenant_id=tenant_id, value=value, variable_entity_type=variable.type)
  888. inputs_copy[variable.variable] = file
  889. return inputs_copy
  890. def _rebuild_single_file(tenant_id: str, value: Any, variable_entity_type: VariableEntityType) -> File | Sequence[File]:
  891. if variable_entity_type == VariableEntityType.FILE:
  892. if not isinstance(value, dict):
  893. raise ValueError(f"expected dict for file object, got {type(value)}")
  894. return build_from_mapping(mapping=value, tenant_id=tenant_id)
  895. elif variable_entity_type == VariableEntityType.FILE_LIST:
  896. if not isinstance(value, list):
  897. raise ValueError(f"expected list for file list object, got {type(value)}")
  898. if len(value) == 0:
  899. return []
  900. if not isinstance(value[0], dict):
  901. raise ValueError(f"expected dict for first element in the file list, got {type(value)}")
  902. return build_from_mappings(mappings=value, tenant_id=tenant_id)
  903. else:
  904. raise Exception("unreachable")