您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

workflow_service.py 41KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026
  1. import json
  2. import time
  3. import uuid
  4. from collections.abc import Callable, Generator, Mapping, Sequence
  5. from typing import Any, cast
  6. from uuid import uuid4
  7. from sqlalchemy import exists, select
  8. from sqlalchemy.orm import Session, sessionmaker
  9. from core.app.app_config.entities import VariableEntityType
  10. from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
  11. from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
  12. from core.file import File
  13. from core.repositories import DifyCoreRepositoryFactory
  14. from core.variables import Variable
  15. from core.variables.variables import VariableUnion
  16. from core.workflow.entities.node_entities import NodeRunResult
  17. from core.workflow.entities.variable_pool import VariablePool
  18. from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus
  19. from core.workflow.errors import WorkflowNodeRunFailedError
  20. from core.workflow.graph_engine.entities.event import InNodeEvent
  21. from core.workflow.nodes import NodeType
  22. from core.workflow.nodes.base.node import BaseNode
  23. from core.workflow.nodes.enums import ErrorStrategy
  24. from core.workflow.nodes.event import RunCompletedEvent
  25. from core.workflow.nodes.event.types import NodeEvent
  26. from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
  27. from core.workflow.nodes.start.entities import StartNodeData
  28. from core.workflow.system_variable import SystemVariable
  29. from core.workflow.workflow_entry import WorkflowEntry
  30. from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
  31. from extensions.ext_database import db
  32. from factories.file_factory import build_from_mapping, build_from_mappings
  33. from libs.datetime_utils import naive_utc_now
  34. from models.account import Account
  35. from models.model import App, AppMode
  36. from models.tools import WorkflowToolProvider
  37. from models.workflow import Workflow, WorkflowNodeExecutionModel, WorkflowNodeExecutionTriggeredFrom, WorkflowType
  38. from repositories.factory import DifyAPIRepositoryFactory
  39. from services.enterprise.plugin_manager_service import PluginCredentialType
  40. from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError
  41. from services.workflow.workflow_converter import WorkflowConverter
  42. from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError
  43. from .workflow_draft_variable_service import DraftVariableSaver, DraftVarLoader, WorkflowDraftVariableService
  44. class WorkflowService:
  45. """
  46. Workflow Service
  47. """
  48. def __init__(self, session_maker: sessionmaker | None = None):
  49. """Initialize WorkflowService with repository dependencies."""
  50. if session_maker is None:
  51. session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
  52. self._node_execution_service_repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
  53. session_maker
  54. )
  55. def get_node_last_run(self, app_model: App, workflow: Workflow, node_id: str) -> WorkflowNodeExecutionModel | None:
  56. """
  57. Get the most recent execution for a specific node.
  58. Args:
  59. app_model: The application model
  60. workflow: The workflow model
  61. node_id: The node identifier
  62. Returns:
  63. The most recent WorkflowNodeExecutionModel for the node, or None if not found
  64. """
  65. return self._node_execution_service_repo.get_node_last_execution(
  66. tenant_id=app_model.tenant_id,
  67. app_id=app_model.id,
  68. workflow_id=workflow.id,
  69. node_id=node_id,
  70. )
  71. def is_workflow_exist(self, app_model: App) -> bool:
  72. stmt = select(
  73. exists().where(
  74. Workflow.tenant_id == app_model.tenant_id,
  75. Workflow.app_id == app_model.id,
  76. Workflow.version == Workflow.VERSION_DRAFT,
  77. )
  78. )
  79. return db.session.execute(stmt).scalar_one()
  80. def get_draft_workflow(self, app_model: App, workflow_id: str | None = None) -> Workflow | None:
  81. """
  82. Get draft workflow
  83. """
  84. if workflow_id:
  85. return self.get_published_workflow_by_id(app_model, workflow_id)
  86. # fetch draft workflow by app_model
  87. workflow = (
  88. db.session.query(Workflow)
  89. .where(
  90. Workflow.tenant_id == app_model.tenant_id,
  91. Workflow.app_id == app_model.id,
  92. Workflow.version == Workflow.VERSION_DRAFT,
  93. )
  94. .first()
  95. )
  96. # return draft workflow
  97. return workflow
  98. def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Workflow | None:
  99. """
  100. fetch published workflow by workflow_id
  101. """
  102. workflow = (
  103. db.session.query(Workflow)
  104. .where(
  105. Workflow.tenant_id == app_model.tenant_id,
  106. Workflow.app_id == app_model.id,
  107. Workflow.id == workflow_id,
  108. )
  109. .first()
  110. )
  111. if not workflow:
  112. return None
  113. if workflow.version == Workflow.VERSION_DRAFT:
  114. raise IsDraftWorkflowError(
  115. f"Cannot use draft workflow version. Workflow ID: {workflow_id}. "
  116. f"Please use a published workflow version or leave workflow_id empty."
  117. )
  118. return workflow
  119. def get_published_workflow(self, app_model: App) -> Workflow | None:
  120. """
  121. Get published workflow
  122. """
  123. if not app_model.workflow_id:
  124. return None
  125. # fetch published workflow by workflow_id
  126. workflow = (
  127. db.session.query(Workflow)
  128. .where(
  129. Workflow.tenant_id == app_model.tenant_id,
  130. Workflow.app_id == app_model.id,
  131. Workflow.id == app_model.workflow_id,
  132. )
  133. .first()
  134. )
  135. return workflow
  136. def get_all_published_workflow(
  137. self,
  138. *,
  139. session: Session,
  140. app_model: App,
  141. page: int,
  142. limit: int,
  143. user_id: str | None,
  144. named_only: bool = False,
  145. ) -> tuple[Sequence[Workflow], bool]:
  146. """
  147. Get published workflow with pagination
  148. """
  149. if not app_model.workflow_id:
  150. return [], False
  151. stmt = (
  152. select(Workflow)
  153. .where(Workflow.app_id == app_model.id)
  154. .order_by(Workflow.version.desc())
  155. .limit(limit + 1)
  156. .offset((page - 1) * limit)
  157. )
  158. if user_id:
  159. stmt = stmt.where(Workflow.created_by == user_id)
  160. if named_only:
  161. stmt = stmt.where(Workflow.marked_name != "")
  162. workflows = session.scalars(stmt).all()
  163. has_more = len(workflows) > limit
  164. if has_more:
  165. workflows = workflows[:-1]
  166. return workflows, has_more
  167. def sync_draft_workflow(
  168. self,
  169. *,
  170. app_model: App,
  171. graph: dict,
  172. features: dict,
  173. unique_hash: str | None,
  174. account: Account,
  175. environment_variables: Sequence[Variable],
  176. conversation_variables: Sequence[Variable],
  177. ) -> Workflow:
  178. """
  179. Sync draft workflow
  180. :raises WorkflowHashNotEqualError
  181. """
  182. # fetch draft workflow by app_model
  183. workflow = self.get_draft_workflow(app_model=app_model)
  184. if workflow and workflow.unique_hash != unique_hash:
  185. raise WorkflowHashNotEqualError()
  186. # validate features structure
  187. self.validate_features_structure(app_model=app_model, features=features)
  188. # create draft workflow if not found
  189. if not workflow:
  190. workflow = Workflow(
  191. tenant_id=app_model.tenant_id,
  192. app_id=app_model.id,
  193. type=WorkflowType.from_app_mode(app_model.mode).value,
  194. version=Workflow.VERSION_DRAFT,
  195. graph=json.dumps(graph),
  196. features=json.dumps(features),
  197. created_by=account.id,
  198. environment_variables=environment_variables,
  199. conversation_variables=conversation_variables,
  200. )
  201. db.session.add(workflow)
  202. # update draft workflow if found
  203. else:
  204. workflow.graph = json.dumps(graph)
  205. workflow.features = json.dumps(features)
  206. workflow.updated_by = account.id
  207. workflow.updated_at = naive_utc_now()
  208. workflow.environment_variables = environment_variables
  209. workflow.conversation_variables = conversation_variables
  210. # commit db session changes
  211. db.session.commit()
  212. # trigger app workflow events
  213. app_draft_workflow_was_synced.send(app_model, synced_draft_workflow=workflow)
  214. # return draft workflow
  215. return workflow
  216. def publish_workflow(
  217. self,
  218. *,
  219. session: Session,
  220. app_model: App,
  221. account: Account,
  222. marked_name: str = "",
  223. marked_comment: str = "",
  224. ) -> Workflow:
  225. draft_workflow_stmt = select(Workflow).where(
  226. Workflow.tenant_id == app_model.tenant_id,
  227. Workflow.app_id == app_model.id,
  228. Workflow.version == Workflow.VERSION_DRAFT,
  229. )
  230. draft_workflow = session.scalar(draft_workflow_stmt)
  231. if not draft_workflow:
  232. raise ValueError("No valid workflow found.")
  233. # Validate credentials before publishing, for credential policy check
  234. from services.feature_service import FeatureService
  235. if FeatureService.get_system_features().plugin_manager.enabled:
  236. self._validate_workflow_credentials(draft_workflow)
  237. # create new workflow
  238. workflow = Workflow.new(
  239. tenant_id=app_model.tenant_id,
  240. app_id=app_model.id,
  241. type=draft_workflow.type,
  242. version=Workflow.version_from_datetime(naive_utc_now()),
  243. graph=draft_workflow.graph,
  244. features=draft_workflow.features,
  245. created_by=account.id,
  246. environment_variables=draft_workflow.environment_variables,
  247. conversation_variables=draft_workflow.conversation_variables,
  248. marked_name=marked_name,
  249. marked_comment=marked_comment,
  250. )
  251. # commit db session changes
  252. session.add(workflow)
  253. # trigger app workflow events
  254. app_published_workflow_was_updated.send(app_model, published_workflow=workflow)
  255. # return new workflow
  256. return workflow
  257. def _validate_workflow_credentials(self, workflow: Workflow) -> None:
  258. """
  259. Validate all credentials in workflow nodes before publishing.
  260. :param workflow: The workflow to validate
  261. :raises ValueError: If any credentials violate policy compliance
  262. """
  263. graph_dict = workflow.graph_dict
  264. nodes = graph_dict.get("nodes", [])
  265. for node in nodes:
  266. node_data = node.get("data", {})
  267. node_type = node_data.get("type")
  268. node_id = node.get("id", "unknown")
  269. try:
  270. # Extract and validate credentials based on node type
  271. if node_type == "tool":
  272. credential_id = node_data.get("credential_id")
  273. provider = node_data.get("provider_id")
  274. if provider:
  275. if credential_id:
  276. # Check specific credential
  277. from core.helper.credential_utils import check_credential_policy_compliance
  278. check_credential_policy_compliance(
  279. credential_id=credential_id,
  280. provider=provider,
  281. credential_type=PluginCredentialType.TOOL,
  282. )
  283. else:
  284. # Check default workspace credential for this provider
  285. self._check_default_tool_credential(workflow.tenant_id, provider)
  286. elif node_type == "agent":
  287. agent_params = node_data.get("agent_parameters", {})
  288. model_config = agent_params.get("model", {}).get("value", {})
  289. if model_config.get("provider") and model_config.get("model"):
  290. self._validate_llm_model_config(
  291. workflow.tenant_id, model_config["provider"], model_config["model"]
  292. )
  293. # Validate load balancing credentials for agent model if load balancing is enabled
  294. agent_model_node_data = {"model": model_config}
  295. self._validate_load_balancing_credentials(workflow, agent_model_node_data, node_id)
  296. # Validate agent tools
  297. tools = agent_params.get("tools", {}).get("value", [])
  298. for tool in tools:
  299. # Agent tools store provider in provider_name field
  300. provider = tool.get("provider_name")
  301. credential_id = tool.get("credential_id")
  302. if provider:
  303. if credential_id:
  304. from core.helper.credential_utils import check_credential_policy_compliance
  305. check_credential_policy_compliance(credential_id, provider, PluginCredentialType.TOOL)
  306. else:
  307. self._check_default_tool_credential(workflow.tenant_id, provider)
  308. elif node_type in ["llm", "knowledge_retrieval", "parameter_extractor", "question_classifier"]:
  309. model_config = node_data.get("model", {})
  310. provider = model_config.get("provider")
  311. model_name = model_config.get("name")
  312. if provider and model_name:
  313. # Validate that the provider+model combination can fetch valid credentials
  314. self._validate_llm_model_config(workflow.tenant_id, provider, model_name)
  315. # Validate load balancing credentials if load balancing is enabled
  316. self._validate_load_balancing_credentials(workflow, node_data, node_id)
  317. else:
  318. raise ValueError(f"Node {node_id} ({node_type}): Missing provider or model configuration")
  319. except Exception as e:
  320. if isinstance(e, ValueError):
  321. raise e
  322. else:
  323. raise ValueError(f"Node {node_id} ({node_type}): {str(e)}")
  324. def _validate_llm_model_config(self, tenant_id: str, provider: str, model_name: str) -> None:
  325. """
  326. Validate that an LLM model configuration can fetch valid credentials and has active status.
  327. This method attempts to get the model instance and validates that:
  328. 1. The provider exists and is configured
  329. 2. The model exists in the provider
  330. 3. Credentials can be fetched for the model
  331. 4. The credentials pass policy compliance checks
  332. 5. The model status is ACTIVE (not NO_CONFIGURE, DISABLED, etc.)
  333. :param tenant_id: The tenant ID
  334. :param provider: The provider name
  335. :param model_name: The model name
  336. :raises ValueError: If the model configuration is invalid or credentials fail policy checks
  337. """
  338. try:
  339. from core.model_manager import ModelManager
  340. from core.model_runtime.entities.model_entities import ModelType
  341. from core.provider_manager import ProviderManager
  342. # Get model instance to validate provider+model combination
  343. model_manager = ModelManager()
  344. model_manager.get_model_instance(
  345. tenant_id=tenant_id, provider=provider, model_type=ModelType.LLM, model=model_name
  346. )
  347. # The ModelInstance constructor will automatically check credential policy compliance
  348. # via ProviderConfiguration.get_current_credentials() -> _check_credential_policy_compliance()
  349. # If it fails, an exception will be raised
  350. # Additionally, check the model status to ensure it's ACTIVE
  351. provider_manager = ProviderManager()
  352. provider_configurations = provider_manager.get_configurations(tenant_id)
  353. models = provider_configurations.get_models(provider=provider, model_type=ModelType.LLM)
  354. target_model = None
  355. for model in models:
  356. if model.model == model_name and model.provider.provider == provider:
  357. target_model = model
  358. break
  359. if target_model:
  360. target_model.raise_for_status()
  361. else:
  362. raise ValueError(f"Model {model_name} not found for provider {provider}")
  363. except Exception as e:
  364. raise ValueError(
  365. f"Failed to validate LLM model configuration (provider: {provider}, model: {model_name}): {str(e)}"
  366. )
  367. def _check_default_tool_credential(self, tenant_id: str, provider: str) -> None:
  368. """
  369. Check credential policy compliance for the default workspace credential of a tool provider.
  370. This method finds the default credential for the given provider and validates it.
  371. Uses the same fallback logic as runtime to handle deauthorized credentials.
  372. :param tenant_id: The tenant ID
  373. :param provider: The tool provider name
  374. :raises ValueError: If no default credential exists or if it fails policy compliance
  375. """
  376. try:
  377. from models.tools import BuiltinToolProvider
  378. # Use the same fallback logic as runtime: get the first available credential
  379. # ordered by is_default DESC, created_at ASC (same as tool_manager.py)
  380. default_provider = (
  381. db.session.query(BuiltinToolProvider)
  382. .where(
  383. BuiltinToolProvider.tenant_id == tenant_id,
  384. BuiltinToolProvider.provider == provider,
  385. )
  386. .order_by(BuiltinToolProvider.is_default.desc(), BuiltinToolProvider.created_at.asc())
  387. .first()
  388. )
  389. if not default_provider:
  390. raise ValueError("No default credential found")
  391. # Check credential policy compliance using the default credential ID
  392. from core.helper.credential_utils import check_credential_policy_compliance
  393. check_credential_policy_compliance(
  394. credential_id=default_provider.id,
  395. provider=provider,
  396. credential_type=PluginCredentialType.TOOL,
  397. check_existence=False,
  398. )
  399. except Exception as e:
  400. raise ValueError(f"Failed to validate default credential for tool provider {provider}: {str(e)}")
  401. def _validate_load_balancing_credentials(self, workflow: Workflow, node_data: dict, node_id: str) -> None:
  402. """
  403. Validate load balancing credentials for a workflow node.
  404. :param workflow: The workflow being validated
  405. :param node_data: The node data containing model configuration
  406. :param node_id: The node ID for error reporting
  407. :raises ValueError: If load balancing credentials violate policy compliance
  408. """
  409. # Extract model configuration
  410. model_config = node_data.get("model", {})
  411. provider = model_config.get("provider")
  412. model_name = model_config.get("name")
  413. if not provider or not model_name:
  414. return # No model config to validate
  415. # Check if this model has load balancing enabled
  416. if self._is_load_balancing_enabled(workflow.tenant_id, provider, model_name):
  417. # Get all load balancing configurations for this model
  418. load_balancing_configs = self._get_load_balancing_configs(workflow.tenant_id, provider, model_name)
  419. # Validate each load balancing configuration
  420. try:
  421. for config in load_balancing_configs:
  422. if config.get("credential_id"):
  423. from core.helper.credential_utils import check_credential_policy_compliance
  424. check_credential_policy_compliance(
  425. config["credential_id"], provider, PluginCredentialType.MODEL
  426. )
  427. except Exception as e:
  428. raise ValueError(f"Invalid load balancing credentials for {provider}/{model_name}: {str(e)}")
  429. def _is_load_balancing_enabled(self, tenant_id: str, provider: str, model_name: str) -> bool:
  430. """
  431. Check if load balancing is enabled for a specific model.
  432. :param tenant_id: The tenant ID
  433. :param provider: The provider name
  434. :param model_name: The model name
  435. :return: True if load balancing is enabled, False otherwise
  436. """
  437. try:
  438. from core.model_runtime.entities.model_entities import ModelType
  439. from core.provider_manager import ProviderManager
  440. # Get provider configurations
  441. provider_manager = ProviderManager()
  442. provider_configurations = provider_manager.get_configurations(tenant_id)
  443. provider_configuration = provider_configurations.get(provider)
  444. if not provider_configuration:
  445. return False
  446. # Get provider model setting
  447. provider_model_setting = provider_configuration.get_provider_model_setting(
  448. model_type=ModelType.LLM,
  449. model=model_name,
  450. )
  451. return provider_model_setting is not None and provider_model_setting.load_balancing_enabled
  452. except Exception:
  453. # If we can't determine the status, assume load balancing is not enabled
  454. return False
  455. def _get_load_balancing_configs(self, tenant_id: str, provider: str, model_name: str) -> list[dict]:
  456. """
  457. Get all load balancing configurations for a model.
  458. :param tenant_id: The tenant ID
  459. :param provider: The provider name
  460. :param model_name: The model name
  461. :return: List of load balancing configuration dictionaries
  462. """
  463. try:
  464. from services.model_load_balancing_service import ModelLoadBalancingService
  465. model_load_balancing_service = ModelLoadBalancingService()
  466. _, configs = model_load_balancing_service.get_load_balancing_configs(
  467. tenant_id=tenant_id,
  468. provider=provider,
  469. model=model_name,
  470. model_type="llm", # Load balancing is primarily used for LLM models
  471. config_from="predefined-model", # Check both predefined and custom models
  472. )
  473. _, custom_configs = model_load_balancing_service.get_load_balancing_configs(
  474. tenant_id=tenant_id, provider=provider, model=model_name, model_type="llm", config_from="custom-model"
  475. )
  476. all_configs = configs + custom_configs
  477. return [config for config in all_configs if config.get("credential_id")]
  478. except Exception:
  479. # If we can't get the configurations, return empty list
  480. # This will prevent validation errors from breaking the workflow
  481. return []
  482. def get_default_block_configs(self) -> list[dict]:
  483. """
  484. Get default block configs
  485. """
  486. # return default block config
  487. default_block_configs = []
  488. for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
  489. node_class = node_class_mapping[LATEST_VERSION]
  490. default_config = node_class.get_default_config()
  491. if default_config:
  492. default_block_configs.append(default_config)
  493. return default_block_configs
  494. def get_default_block_config(self, node_type: str, filters: dict | None = None) -> dict | None:
  495. """
  496. Get default config of node.
  497. :param node_type: node type
  498. :param filters: filter by node config parameters.
  499. :return:
  500. """
  501. node_type_enum = NodeType(node_type)
  502. # return default block config
  503. if node_type_enum not in NODE_TYPE_CLASSES_MAPPING:
  504. return None
  505. node_class = NODE_TYPE_CLASSES_MAPPING[node_type_enum][LATEST_VERSION]
  506. default_config = node_class.get_default_config(filters=filters)
  507. if not default_config:
  508. return None
  509. return default_config
  510. def run_draft_workflow_node(
  511. self,
  512. app_model: App,
  513. draft_workflow: Workflow,
  514. node_id: str,
  515. user_inputs: Mapping[str, Any],
  516. account: Account,
  517. query: str = "",
  518. files: Sequence[File] | None = None,
  519. ) -> WorkflowNodeExecutionModel:
  520. """
  521. Run draft workflow node
  522. """
  523. files = files or []
  524. with Session(bind=db.engine, expire_on_commit=False) as session, session.begin():
  525. draft_var_srv = WorkflowDraftVariableService(session)
  526. draft_var_srv.prefill_conversation_variable_default_values(draft_workflow)
  527. node_config = draft_workflow.get_node_config_by_id(node_id)
  528. node_type = Workflow.get_node_type_from_node_config(node_config)
  529. node_data = node_config.get("data", {})
  530. if node_type == NodeType.START:
  531. with Session(bind=db.engine) as session, session.begin():
  532. draft_var_srv = WorkflowDraftVariableService(session)
  533. conversation_id = draft_var_srv.get_or_create_conversation(
  534. account_id=account.id,
  535. app=app_model,
  536. workflow=draft_workflow,
  537. )
  538. start_data = StartNodeData.model_validate(node_data)
  539. user_inputs = _rebuild_file_for_user_inputs_in_start_node(
  540. tenant_id=draft_workflow.tenant_id, start_node_data=start_data, user_inputs=user_inputs
  541. )
  542. # init variable pool
  543. variable_pool = _setup_variable_pool(
  544. query=query,
  545. files=files or [],
  546. user_id=account.id,
  547. user_inputs=user_inputs,
  548. workflow=draft_workflow,
  549. # NOTE(QuantumGhost): We rely on `DraftVarLoader` to load conversation variables.
  550. conversation_variables=[],
  551. node_type=node_type,
  552. conversation_id=conversation_id,
  553. )
  554. else:
  555. variable_pool = VariablePool(
  556. system_variables=SystemVariable.empty(),
  557. user_inputs=user_inputs,
  558. environment_variables=draft_workflow.environment_variables,
  559. conversation_variables=[],
  560. )
  561. variable_loader = DraftVarLoader(
  562. engine=db.engine,
  563. app_id=app_model.id,
  564. tenant_id=app_model.tenant_id,
  565. )
  566. enclosing_node_type_and_id = draft_workflow.get_enclosing_node_type_and_id(node_config)
  567. if enclosing_node_type_and_id:
  568. _, enclosing_node_id = enclosing_node_type_and_id
  569. else:
  570. enclosing_node_id = None
  571. run = WorkflowEntry.single_step_run(
  572. workflow=draft_workflow,
  573. node_id=node_id,
  574. user_inputs=user_inputs,
  575. user_id=account.id,
  576. variable_pool=variable_pool,
  577. variable_loader=variable_loader,
  578. )
  579. # run draft workflow node
  580. start_at = time.perf_counter()
  581. node_execution = self._handle_node_run_result(
  582. invoke_node_fn=lambda: run,
  583. start_at=start_at,
  584. node_id=node_id,
  585. )
  586. # Set workflow_id on the NodeExecution
  587. node_execution.workflow_id = draft_workflow.id
  588. # Create repository and save the node execution
  589. repository = DifyCoreRepositoryFactory.create_workflow_node_execution_repository(
  590. session_factory=db.engine,
  591. user=account,
  592. app_id=app_model.id,
  593. triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
  594. )
  595. repository.save(node_execution)
  596. workflow_node_execution = self._node_execution_service_repo.get_execution_by_id(node_execution.id)
  597. if workflow_node_execution is None:
  598. raise ValueError(f"WorkflowNodeExecution with id {node_execution.id} not found after saving")
  599. with Session(bind=db.engine) as session, session.begin():
  600. draft_var_saver = DraftVariableSaver(
  601. session=session,
  602. app_id=app_model.id,
  603. node_id=workflow_node_execution.node_id,
  604. node_type=NodeType(workflow_node_execution.node_type),
  605. enclosing_node_id=enclosing_node_id,
  606. node_execution_id=node_execution.id,
  607. )
  608. draft_var_saver.save(process_data=node_execution.process_data, outputs=node_execution.outputs)
  609. session.commit()
  610. return workflow_node_execution
  611. def run_free_workflow_node(
  612. self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
  613. ) -> WorkflowNodeExecution:
  614. """
  615. Run free workflow node
  616. """
  617. # run free workflow node
  618. start_at = time.perf_counter()
  619. node_execution = self._handle_node_run_result(
  620. invoke_node_fn=lambda: WorkflowEntry.run_free_node(
  621. node_id=node_id,
  622. node_data=node_data,
  623. tenant_id=tenant_id,
  624. user_id=user_id,
  625. user_inputs=user_inputs,
  626. ),
  627. start_at=start_at,
  628. node_id=node_id,
  629. )
  630. return node_execution
  631. def _handle_node_run_result(
  632. self,
  633. invoke_node_fn: Callable[[], tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]],
  634. start_at: float,
  635. node_id: str,
  636. ) -> WorkflowNodeExecution:
  637. try:
  638. node, node_events = invoke_node_fn()
  639. node_run_result: NodeRunResult | None = None
  640. for event in node_events:
  641. if isinstance(event, RunCompletedEvent):
  642. node_run_result = event.run_result
  643. # sign output files
  644. # node_run_result.outputs = WorkflowEntry.handle_special_values(node_run_result.outputs)
  645. break
  646. if not node_run_result:
  647. raise ValueError("Node run failed with no run result")
  648. # single step debug mode error handling return
  649. if node_run_result.status == WorkflowNodeExecutionStatus.FAILED and node.continue_on_error:
  650. node_error_args: dict[str, Any] = {
  651. "status": WorkflowNodeExecutionStatus.EXCEPTION,
  652. "error": node_run_result.error,
  653. "inputs": node_run_result.inputs,
  654. "metadata": {"error_strategy": node.error_strategy},
  655. }
  656. if node.error_strategy is ErrorStrategy.DEFAULT_VALUE:
  657. node_run_result = NodeRunResult(
  658. **node_error_args,
  659. outputs={
  660. **node.default_value_dict,
  661. "error_message": node_run_result.error,
  662. "error_type": node_run_result.error_type,
  663. },
  664. )
  665. else:
  666. node_run_result = NodeRunResult(
  667. **node_error_args,
  668. outputs={
  669. "error_message": node_run_result.error,
  670. "error_type": node_run_result.error_type,
  671. },
  672. )
  673. run_succeeded = node_run_result.status in (
  674. WorkflowNodeExecutionStatus.SUCCEEDED,
  675. WorkflowNodeExecutionStatus.EXCEPTION,
  676. )
  677. error = node_run_result.error if not run_succeeded else None
  678. except WorkflowNodeRunFailedError as e:
  679. node = e.node
  680. run_succeeded = False
  681. node_run_result = None
  682. error = e.error
  683. # Create a NodeExecution domain model
  684. node_execution = WorkflowNodeExecution(
  685. id=str(uuid4()),
  686. workflow_id="", # This is a single-step execution, so no workflow ID
  687. index=1,
  688. node_id=node_id,
  689. node_type=node.type_,
  690. title=node.title,
  691. elapsed_time=time.perf_counter() - start_at,
  692. created_at=naive_utc_now(),
  693. finished_at=naive_utc_now(),
  694. )
  695. if run_succeeded and node_run_result:
  696. # Set inputs, process_data, and outputs as dictionaries (not JSON strings)
  697. inputs = WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None
  698. process_data = (
  699. WorkflowEntry.handle_special_values(node_run_result.process_data)
  700. if node_run_result.process_data
  701. else None
  702. )
  703. outputs = node_run_result.outputs
  704. node_execution.inputs = inputs
  705. node_execution.process_data = process_data
  706. node_execution.outputs = outputs
  707. node_execution.metadata = node_run_result.metadata
  708. # Map status from WorkflowNodeExecutionStatus to NodeExecutionStatus
  709. if node_run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED:
  710. node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED
  711. elif node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION:
  712. node_execution.status = WorkflowNodeExecutionStatus.EXCEPTION
  713. node_execution.error = node_run_result.error
  714. else:
  715. # Set failed status and error
  716. node_execution.status = WorkflowNodeExecutionStatus.FAILED
  717. node_execution.error = error
  718. return node_execution
  719. def convert_to_workflow(self, app_model: App, account: Account, args: dict) -> App:
  720. """
  721. Basic mode of chatbot app(expert mode) to workflow
  722. Completion App to Workflow App
  723. :param app_model: App instance
  724. :param account: Account instance
  725. :param args: dict
  726. :return:
  727. """
  728. # chatbot convert to workflow mode
  729. workflow_converter = WorkflowConverter()
  730. if app_model.mode not in {AppMode.CHAT, AppMode.COMPLETION}:
  731. raise ValueError(f"Current App mode: {app_model.mode} is not supported convert to workflow.")
  732. # convert to workflow
  733. new_app: App = workflow_converter.convert_to_workflow(
  734. app_model=app_model,
  735. account=account,
  736. name=args.get("name", "Default Name"),
  737. icon_type=args.get("icon_type", "emoji"),
  738. icon=args.get("icon", "🤖"),
  739. icon_background=args.get("icon_background", "#FFEAD5"),
  740. )
  741. return new_app
  742. def validate_features_structure(self, app_model: App, features: dict):
  743. if app_model.mode == AppMode.ADVANCED_CHAT:
  744. return AdvancedChatAppConfigManager.config_validate(
  745. tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
  746. )
  747. elif app_model.mode == AppMode.WORKFLOW:
  748. return WorkflowAppConfigManager.config_validate(
  749. tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
  750. )
  751. else:
  752. raise ValueError(f"Invalid app mode: {app_model.mode}")
  753. def update_workflow(
  754. self, *, session: Session, workflow_id: str, tenant_id: str, account_id: str, data: dict
  755. ) -> Workflow | None:
  756. """
  757. Update workflow attributes
  758. :param session: SQLAlchemy database session
  759. :param workflow_id: Workflow ID
  760. :param tenant_id: Tenant ID
  761. :param account_id: Account ID (for permission check)
  762. :param data: Dictionary containing fields to update
  763. :return: Updated workflow or None if not found
  764. """
  765. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  766. workflow = session.scalar(stmt)
  767. if not workflow:
  768. return None
  769. allowed_fields = ["marked_name", "marked_comment"]
  770. for field, value in data.items():
  771. if field in allowed_fields:
  772. setattr(workflow, field, value)
  773. workflow.updated_by = account_id
  774. workflow.updated_at = naive_utc_now()
  775. return workflow
  776. def delete_workflow(self, *, session: Session, workflow_id: str, tenant_id: str) -> bool:
  777. """
  778. Delete a workflow
  779. :param session: SQLAlchemy database session
  780. :param workflow_id: Workflow ID
  781. :param tenant_id: Tenant ID
  782. :return: True if successful
  783. :raises: ValueError if workflow not found
  784. :raises: WorkflowInUseError if workflow is in use
  785. :raises: DraftWorkflowDeletionError if workflow is a draft version
  786. """
  787. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  788. workflow = session.scalar(stmt)
  789. if not workflow:
  790. raise ValueError(f"Workflow with ID {workflow_id} not found")
  791. # Check if workflow is a draft version
  792. if workflow.version == Workflow.VERSION_DRAFT:
  793. raise DraftWorkflowDeletionError("Cannot delete draft workflow versions")
  794. # Check if this workflow is currently referenced by an app
  795. app_stmt = select(App).where(App.workflow_id == workflow_id)
  796. app = session.scalar(app_stmt)
  797. if app:
  798. # Cannot delete a workflow that's currently in use by an app
  799. raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.id}'")
  800. # Don't use workflow.tool_published as it's not accurate for specific workflow versions
  801. # Check if there's a tool provider using this specific workflow version
  802. tool_provider = (
  803. session.query(WorkflowToolProvider)
  804. .where(
  805. WorkflowToolProvider.tenant_id == workflow.tenant_id,
  806. WorkflowToolProvider.app_id == workflow.app_id,
  807. WorkflowToolProvider.version == workflow.version,
  808. )
  809. .first()
  810. )
  811. if tool_provider:
  812. # Cannot delete a workflow that's published as a tool
  813. raise WorkflowInUseError("Cannot delete workflow that is published as a tool")
  814. session.delete(workflow)
  815. return True
  816. def _setup_variable_pool(
  817. query: str,
  818. files: Sequence[File],
  819. user_id: str,
  820. user_inputs: Mapping[str, Any],
  821. workflow: Workflow,
  822. node_type: NodeType,
  823. conversation_id: str,
  824. conversation_variables: list[Variable],
  825. ):
  826. # Only inject system variables for START node type.
  827. if node_type == NodeType.START:
  828. system_variable = SystemVariable(
  829. user_id=user_id,
  830. app_id=workflow.app_id,
  831. workflow_id=workflow.id,
  832. files=files or [],
  833. workflow_execution_id=str(uuid.uuid4()),
  834. )
  835. # Only add chatflow-specific variables for non-workflow types
  836. if workflow.type != WorkflowType.WORKFLOW.value:
  837. system_variable.query = query
  838. system_variable.conversation_id = conversation_id
  839. system_variable.dialogue_count = 0
  840. else:
  841. system_variable = SystemVariable.empty()
  842. # init variable pool
  843. variable_pool = VariablePool(
  844. system_variables=system_variable,
  845. user_inputs=user_inputs,
  846. environment_variables=workflow.environment_variables,
  847. # Based on the definition of `VariableUnion`,
  848. # `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
  849. conversation_variables=cast(list[VariableUnion], conversation_variables), #
  850. )
  851. return variable_pool
  852. def _rebuild_file_for_user_inputs_in_start_node(
  853. tenant_id: str, start_node_data: StartNodeData, user_inputs: Mapping[str, Any]
  854. ) -> Mapping[str, Any]:
  855. inputs_copy = dict(user_inputs)
  856. for variable in start_node_data.variables:
  857. if variable.type not in (VariableEntityType.FILE, VariableEntityType.FILE_LIST):
  858. continue
  859. if variable.variable not in user_inputs:
  860. continue
  861. value = user_inputs[variable.variable]
  862. file = _rebuild_single_file(tenant_id=tenant_id, value=value, variable_entity_type=variable.type)
  863. inputs_copy[variable.variable] = file
  864. return inputs_copy
  865. def _rebuild_single_file(tenant_id: str, value: Any, variable_entity_type: VariableEntityType) -> File | Sequence[File]:
  866. if variable_entity_type == VariableEntityType.FILE:
  867. if not isinstance(value, dict):
  868. raise ValueError(f"expected dict for file object, got {type(value)}")
  869. return build_from_mapping(mapping=value, tenant_id=tenant_id)
  870. elif variable_entity_type == VariableEntityType.FILE_LIST:
  871. if not isinstance(value, list):
  872. raise ValueError(f"expected list for file list object, got {type(value)}")
  873. if len(value) == 0:
  874. return []
  875. if not isinstance(value[0], dict):
  876. raise ValueError(f"expected dict for first element in the file list, got {type(value)}")
  877. return build_from_mappings(mappings=value, tenant_id=tenant_id)
  878. else:
  879. raise Exception("unreachable")