You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

workflow_service.py 29KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784
  1. import json
  2. import time
  3. import uuid
  4. from collections.abc import Callable, Generator, Mapping, Sequence
  5. from typing import Any, Optional, cast
  6. from sqlalchemy import exists, select
  7. from sqlalchemy.orm import Session, sessionmaker
  8. from core.app.app_config.entities import VariableEntityType
  9. from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
  10. from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
  11. from core.file import File
  12. from core.repositories import DifyCoreRepositoryFactory
  13. from core.variables import Variable
  14. from core.variables.variables import VariableUnion
  15. from core.workflow.entities import VariablePool, WorkflowNodeExecution
  16. from core.workflow.enums import ErrorStrategy, WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus
  17. from core.workflow.errors import WorkflowNodeRunFailedError
  18. from core.workflow.graph_events import GraphNodeEventBase, NodeRunFailedEvent, NodeRunSucceededEvent
  19. from core.workflow.node_events import NodeRunResult
  20. from core.workflow.nodes import NodeType
  21. from core.workflow.nodes.base.node import Node
  22. from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
  23. from core.workflow.nodes.start.entities import StartNodeData
  24. from core.workflow.system_variable import SystemVariable
  25. from core.workflow.workflow_entry import WorkflowEntry
  26. from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
  27. from extensions.ext_database import db
  28. from extensions.ext_storage import storage
  29. from factories.file_factory import build_from_mapping, build_from_mappings
  30. from libs.datetime_utils import naive_utc_now
  31. from models.account import Account
  32. from models.model import App, AppMode
  33. from models.tools import WorkflowToolProvider
  34. from models.workflow import (
  35. Workflow,
  36. WorkflowNodeExecutionModel,
  37. WorkflowNodeExecutionTriggeredFrom,
  38. WorkflowType,
  39. )
  40. from repositories.factory import DifyAPIRepositoryFactory
  41. from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError
  42. from services.workflow.workflow_converter import WorkflowConverter
  43. from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError
  44. from .workflow_draft_variable_service import (
  45. DraftVariableSaver,
  46. DraftVarLoader,
  47. WorkflowDraftVariableService,
  48. )
  49. class WorkflowService:
  50. """
  51. Workflow Service
  52. """
  53. def __init__(self, session_maker: sessionmaker | None = None):
  54. """Initialize WorkflowService with repository dependencies."""
  55. if session_maker is None:
  56. session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
  57. self._node_execution_service_repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
  58. session_maker
  59. )
  60. def get_node_last_run(self, app_model: App, workflow: Workflow, node_id: str) -> WorkflowNodeExecutionModel | None:
  61. """
  62. Get the most recent execution for a specific node.
  63. Args:
  64. app_model: The application model
  65. workflow: The workflow model
  66. node_id: The node identifier
  67. Returns:
  68. The most recent WorkflowNodeExecutionModel for the node, or None if not found
  69. """
  70. return self._node_execution_service_repo.get_node_last_execution(
  71. tenant_id=app_model.tenant_id,
  72. app_id=app_model.id,
  73. workflow_id=workflow.id,
  74. node_id=node_id,
  75. )
  76. def is_workflow_exist(self, app_model: App) -> bool:
  77. stmt = select(
  78. exists().where(
  79. Workflow.tenant_id == app_model.tenant_id,
  80. Workflow.app_id == app_model.id,
  81. Workflow.version == Workflow.VERSION_DRAFT,
  82. )
  83. )
  84. return db.session.execute(stmt).scalar_one()
  85. def get_draft_workflow(self, app_model: App, workflow_id: Optional[str] = None) -> Optional[Workflow]:
  86. """
  87. Get draft workflow
  88. """
  89. if workflow_id:
  90. return self.get_published_workflow_by_id(app_model, workflow_id)
  91. # fetch draft workflow by app_model
  92. workflow = (
  93. db.session.query(Workflow)
  94. .where(
  95. Workflow.tenant_id == app_model.tenant_id,
  96. Workflow.app_id == app_model.id,
  97. Workflow.version == Workflow.VERSION_DRAFT,
  98. )
  99. .first()
  100. )
  101. # return draft workflow
  102. return workflow
  103. def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Optional[Workflow]:
  104. """
  105. fetch published workflow by workflow_id
  106. """
  107. workflow = (
  108. db.session.query(Workflow)
  109. .where(
  110. Workflow.tenant_id == app_model.tenant_id,
  111. Workflow.app_id == app_model.id,
  112. Workflow.id == workflow_id,
  113. )
  114. .first()
  115. )
  116. if not workflow:
  117. return None
  118. if workflow.version == Workflow.VERSION_DRAFT:
  119. raise IsDraftWorkflowError(
  120. f"Cannot use draft workflow version. Workflow ID: {workflow_id}. "
  121. f"Please use a published workflow version or leave workflow_id empty."
  122. )
  123. return workflow
  124. def get_published_workflow(self, app_model: App) -> Optional[Workflow]:
  125. """
  126. Get published workflow
  127. """
  128. if not app_model.workflow_id:
  129. return None
  130. # fetch published workflow by workflow_id
  131. workflow = (
  132. db.session.query(Workflow)
  133. .where(
  134. Workflow.tenant_id == app_model.tenant_id,
  135. Workflow.app_id == app_model.id,
  136. Workflow.id == app_model.workflow_id,
  137. )
  138. .first()
  139. )
  140. return workflow
  141. def get_all_published_workflow(
  142. self,
  143. *,
  144. session: Session,
  145. app_model: App,
  146. page: int,
  147. limit: int,
  148. user_id: str | None,
  149. named_only: bool = False,
  150. ) -> tuple[Sequence[Workflow], bool]:
  151. """
  152. Get published workflow with pagination
  153. """
  154. if not app_model.workflow_id:
  155. return [], False
  156. stmt = (
  157. select(Workflow)
  158. .where(Workflow.app_id == app_model.id)
  159. .order_by(Workflow.version.desc())
  160. .limit(limit + 1)
  161. .offset((page - 1) * limit)
  162. )
  163. if user_id:
  164. stmt = stmt.where(Workflow.created_by == user_id)
  165. if named_only:
  166. stmt = stmt.where(Workflow.marked_name != "")
  167. workflows = session.scalars(stmt).all()
  168. has_more = len(workflows) > limit
  169. if has_more:
  170. workflows = workflows[:-1]
  171. return workflows, has_more
  172. def sync_draft_workflow(
  173. self,
  174. *,
  175. app_model: App,
  176. graph: dict,
  177. features: dict,
  178. unique_hash: Optional[str],
  179. account: Account,
  180. environment_variables: Sequence[Variable],
  181. conversation_variables: Sequence[Variable],
  182. ) -> Workflow:
  183. """
  184. Sync draft workflow
  185. :raises WorkflowHashNotEqualError
  186. """
  187. # fetch draft workflow by app_model
  188. workflow = self.get_draft_workflow(app_model=app_model)
  189. if workflow and workflow.unique_hash != unique_hash:
  190. raise WorkflowHashNotEqualError()
  191. # validate features structure
  192. self.validate_features_structure(app_model=app_model, features=features)
  193. # create draft workflow if not found
  194. if not workflow:
  195. workflow = Workflow(
  196. tenant_id=app_model.tenant_id,
  197. app_id=app_model.id,
  198. type=WorkflowType.from_app_mode(app_model.mode).value,
  199. version=Workflow.VERSION_DRAFT,
  200. graph=json.dumps(graph),
  201. features=json.dumps(features),
  202. created_by=account.id,
  203. environment_variables=environment_variables,
  204. conversation_variables=conversation_variables,
  205. )
  206. db.session.add(workflow)
  207. # update draft workflow if found
  208. else:
  209. workflow.graph = json.dumps(graph)
  210. workflow.features = json.dumps(features)
  211. workflow.updated_by = account.id
  212. workflow.updated_at = naive_utc_now()
  213. workflow.environment_variables = environment_variables
  214. workflow.conversation_variables = conversation_variables
  215. # commit db session changes
  216. db.session.commit()
  217. # trigger app workflow events
  218. app_draft_workflow_was_synced.send(app_model, synced_draft_workflow=workflow)
  219. # return draft workflow
  220. return workflow
  221. def publish_workflow(
  222. self,
  223. *,
  224. session: Session,
  225. app_model: App,
  226. account: Account,
  227. marked_name: str = "",
  228. marked_comment: str = "",
  229. ) -> Workflow:
  230. draft_workflow_stmt = select(Workflow).where(
  231. Workflow.tenant_id == app_model.tenant_id,
  232. Workflow.app_id == app_model.id,
  233. Workflow.version == Workflow.VERSION_DRAFT,
  234. )
  235. draft_workflow = session.scalar(draft_workflow_stmt)
  236. if not draft_workflow:
  237. raise ValueError("No valid workflow found.")
  238. # create new workflow
  239. workflow = Workflow.new(
  240. tenant_id=app_model.tenant_id,
  241. app_id=app_model.id,
  242. type=draft_workflow.type,
  243. version=Workflow.version_from_datetime(naive_utc_now()),
  244. graph=draft_workflow.graph,
  245. created_by=account.id,
  246. environment_variables=draft_workflow.environment_variables,
  247. conversation_variables=draft_workflow.conversation_variables,
  248. marked_name=marked_name,
  249. marked_comment=marked_comment,
  250. rag_pipeline_variables=draft_workflow.rag_pipeline_variables,
  251. features=draft_workflow.features,
  252. )
  253. # commit db session changes
  254. session.add(workflow)
  255. # trigger app workflow events
  256. app_published_workflow_was_updated.send(app_model, published_workflow=workflow)
  257. # return new workflow
  258. return workflow
  259. def get_default_block_configs(self) -> list[dict]:
  260. """
  261. Get default block configs
  262. """
  263. # return default block config
  264. default_block_configs = []
  265. for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
  266. node_class = node_class_mapping[LATEST_VERSION]
  267. default_config = node_class.get_default_config()
  268. if default_config:
  269. default_block_configs.append(default_config)
  270. return default_block_configs
  271. def get_default_block_config(self, node_type: str, filters: Optional[dict] = None) -> Optional[dict]:
  272. """
  273. Get default config of node.
  274. :param node_type: node type
  275. :param filters: filter by node config parameters.
  276. :return:
  277. """
  278. node_type_enum = NodeType(node_type)
  279. # return default block config
  280. if node_type_enum not in NODE_TYPE_CLASSES_MAPPING:
  281. return None
  282. node_class = NODE_TYPE_CLASSES_MAPPING[node_type_enum][LATEST_VERSION]
  283. default_config = node_class.get_default_config(filters=filters)
  284. if not default_config:
  285. return None
  286. return default_config
  287. def run_draft_workflow_node(
  288. self,
  289. app_model: App,
  290. draft_workflow: Workflow,
  291. node_id: str,
  292. user_inputs: Mapping[str, Any],
  293. account: Account,
  294. query: str = "",
  295. files: Sequence[File] | None = None,
  296. ) -> WorkflowNodeExecutionModel:
  297. """
  298. Run draft workflow node
  299. """
  300. files = files or []
  301. with Session(bind=db.engine, expire_on_commit=False) as session, session.begin():
  302. draft_var_srv = WorkflowDraftVariableService(session)
  303. draft_var_srv.prefill_conversation_variable_default_values(draft_workflow)
  304. node_config = draft_workflow.get_node_config_by_id(node_id)
  305. node_type = Workflow.get_node_type_from_node_config(node_config)
  306. node_data = node_config.get("data", {})
  307. if node_type == NodeType.START:
  308. with Session(bind=db.engine) as session, session.begin():
  309. draft_var_srv = WorkflowDraftVariableService(session)
  310. conversation_id = draft_var_srv.get_or_create_conversation(
  311. account_id=account.id,
  312. app=app_model,
  313. workflow=draft_workflow,
  314. )
  315. start_data = StartNodeData.model_validate(node_data)
  316. user_inputs = _rebuild_file_for_user_inputs_in_start_node(
  317. tenant_id=draft_workflow.tenant_id, start_node_data=start_data, user_inputs=user_inputs
  318. )
  319. # init variable pool
  320. variable_pool = _setup_variable_pool(
  321. query=query,
  322. files=files or [],
  323. user_id=account.id,
  324. user_inputs=user_inputs,
  325. workflow=draft_workflow,
  326. # NOTE(QuantumGhost): We rely on `DraftVarLoader` to load conversation variables.
  327. conversation_variables=[],
  328. node_type=node_type,
  329. conversation_id=conversation_id,
  330. )
  331. else:
  332. variable_pool = VariablePool(
  333. system_variables=SystemVariable.empty(),
  334. user_inputs=user_inputs,
  335. environment_variables=draft_workflow.environment_variables,
  336. conversation_variables=[],
  337. )
  338. variable_loader = DraftVarLoader(
  339. engine=db.engine,
  340. app_id=app_model.id,
  341. tenant_id=app_model.tenant_id,
  342. )
  343. enclosing_node_type_and_id = draft_workflow.get_enclosing_node_type_and_id(node_config)
  344. if enclosing_node_type_and_id:
  345. _, enclosing_node_id = enclosing_node_type_and_id
  346. else:
  347. enclosing_node_id = None
  348. run = WorkflowEntry.single_step_run(
  349. workflow=draft_workflow,
  350. node_id=node_id,
  351. user_inputs=user_inputs,
  352. user_id=account.id,
  353. variable_pool=variable_pool,
  354. variable_loader=variable_loader,
  355. )
  356. # run draft workflow node
  357. start_at = time.perf_counter()
  358. node_execution = self._handle_single_step_result(
  359. invoke_node_fn=lambda: run,
  360. start_at=start_at,
  361. node_id=node_id,
  362. )
  363. # Set workflow_id on the NodeExecution
  364. node_execution.workflow_id = draft_workflow.id
  365. # Create repository and save the node execution
  366. repository = DifyCoreRepositoryFactory.create_workflow_node_execution_repository(
  367. session_factory=db.engine,
  368. user=account,
  369. app_id=app_model.id,
  370. triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
  371. )
  372. repository.save(node_execution)
  373. workflow_node_execution = self._node_execution_service_repo.get_execution_by_id(node_execution.id)
  374. if workflow_node_execution is None:
  375. raise ValueError(f"WorkflowNodeExecution with id {node_execution.id} not found after saving")
  376. with Session(db.engine) as session:
  377. outputs = workflow_node_execution.load_full_outputs(session, storage)
  378. with Session(bind=db.engine) as session, session.begin():
  379. draft_var_saver = DraftVariableSaver(
  380. session=session,
  381. app_id=app_model.id,
  382. node_id=workflow_node_execution.node_id,
  383. node_type=NodeType(workflow_node_execution.node_type),
  384. enclosing_node_id=enclosing_node_id,
  385. node_execution_id=node_execution.id,
  386. user=account,
  387. )
  388. draft_var_saver.save(process_data=node_execution.process_data, outputs=outputs)
  389. session.commit()
  390. return workflow_node_execution
  391. def run_free_workflow_node(
  392. self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
  393. ) -> WorkflowNodeExecution:
  394. """
  395. Run free workflow node
  396. """
  397. # run free workflow node
  398. start_at = time.perf_counter()
  399. node_execution = self._handle_single_step_result(
  400. invoke_node_fn=lambda: WorkflowEntry.run_free_node(
  401. node_id=node_id,
  402. node_data=node_data,
  403. tenant_id=tenant_id,
  404. user_id=user_id,
  405. user_inputs=user_inputs,
  406. ),
  407. start_at=start_at,
  408. node_id=node_id,
  409. )
  410. return node_execution
  411. def _handle_single_step_result(
  412. self,
  413. invoke_node_fn: Callable[[], tuple[Node, Generator[GraphNodeEventBase, None, None]]],
  414. start_at: float,
  415. node_id: str,
  416. ) -> WorkflowNodeExecution:
  417. """
  418. Handle single step execution and return WorkflowNodeExecution.
  419. Args:
  420. invoke_node_fn: Function to invoke node execution
  421. start_at: Execution start time
  422. node_id: ID of the node being executed
  423. Returns:
  424. WorkflowNodeExecution: The execution result
  425. """
  426. node, node_run_result, run_succeeded, error = self._execute_node_safely(invoke_node_fn)
  427. # Create base node execution
  428. node_execution = WorkflowNodeExecution(
  429. id=str(uuid.uuid4()),
  430. workflow_id="", # Single-step execution has no workflow ID
  431. index=1,
  432. node_id=node_id,
  433. node_type=node.node_type,
  434. title=node.title,
  435. elapsed_time=time.perf_counter() - start_at,
  436. created_at=naive_utc_now(),
  437. finished_at=naive_utc_now(),
  438. )
  439. # Populate execution result data
  440. self._populate_execution_result(node_execution, node_run_result, run_succeeded, error)
  441. return node_execution
  442. def _execute_node_safely(
  443. self, invoke_node_fn: Callable[[], tuple[Node, Generator[GraphNodeEventBase, None, None]]]
  444. ) -> tuple[Node, NodeRunResult | None, bool, str | None]:
  445. """
  446. Execute node safely and handle errors according to error strategy.
  447. Returns:
  448. Tuple of (node, node_run_result, run_succeeded, error)
  449. """
  450. try:
  451. node, node_events = invoke_node_fn()
  452. node_run_result = next(
  453. (
  454. event.node_run_result
  455. for event in node_events
  456. if isinstance(event, (NodeRunSucceededEvent, NodeRunFailedEvent))
  457. ),
  458. None,
  459. )
  460. if not node_run_result:
  461. raise ValueError("Node execution failed - no result returned")
  462. # Apply error strategy if node failed
  463. if node_run_result.status == WorkflowNodeExecutionStatus.FAILED and node.error_strategy:
  464. node_run_result = self._apply_error_strategy(node, node_run_result)
  465. run_succeeded = node_run_result.status in (
  466. WorkflowNodeExecutionStatus.SUCCEEDED,
  467. WorkflowNodeExecutionStatus.EXCEPTION,
  468. )
  469. error = node_run_result.error if not run_succeeded else None
  470. return node, node_run_result, run_succeeded, error
  471. except WorkflowNodeRunFailedError as e:
  472. return e._node, None, False, e._error
  473. def _apply_error_strategy(self, node: Node, node_run_result: NodeRunResult) -> NodeRunResult:
  474. """Apply error strategy when node execution fails."""
  475. # TODO(Novice): Maybe we should apply error strategy to node level?
  476. error_outputs = {
  477. "error_message": node_run_result.error,
  478. "error_type": node_run_result.error_type,
  479. }
  480. # Add default values if strategy is DEFAULT_VALUE
  481. if node.error_strategy is ErrorStrategy.DEFAULT_VALUE:
  482. error_outputs.update(node.default_value_dict)
  483. return NodeRunResult(
  484. status=WorkflowNodeExecutionStatus.EXCEPTION,
  485. error=node_run_result.error,
  486. inputs=node_run_result.inputs,
  487. metadata={WorkflowNodeExecutionMetadataKey.ERROR_STRATEGY: node.error_strategy},
  488. outputs=error_outputs,
  489. )
  490. def _populate_execution_result(
  491. self,
  492. node_execution: WorkflowNodeExecution,
  493. node_run_result: NodeRunResult | None,
  494. run_succeeded: bool,
  495. error: str | None,
  496. ) -> None:
  497. """Populate node execution with result data."""
  498. if run_succeeded and node_run_result:
  499. node_execution.inputs = (
  500. WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None
  501. )
  502. node_execution.process_data = (
  503. WorkflowEntry.handle_special_values(node_run_result.process_data)
  504. if node_run_result.process_data
  505. else None
  506. )
  507. node_execution.outputs = node_run_result.outputs
  508. node_execution.metadata = node_run_result.metadata
  509. # Set status and error based on result
  510. node_execution.status = node_run_result.status
  511. if node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION:
  512. node_execution.error = node_run_result.error
  513. else:
  514. node_execution.status = WorkflowNodeExecutionStatus.FAILED
  515. node_execution.error = error
  516. def convert_to_workflow(self, app_model: App, account: Account, args: dict) -> App:
  517. """
  518. Basic mode of chatbot app(expert mode) to workflow
  519. Completion App to Workflow App
  520. :param app_model: App instance
  521. :param account: Account instance
  522. :param args: dict
  523. :return:
  524. """
  525. # chatbot convert to workflow mode
  526. workflow_converter = WorkflowConverter()
  527. if app_model.mode not in {AppMode.CHAT.value, AppMode.COMPLETION.value}:
  528. raise ValueError(f"Current App mode: {app_model.mode} is not supported convert to workflow.")
  529. # convert to workflow
  530. new_app: App = workflow_converter.convert_to_workflow(
  531. app_model=app_model,
  532. account=account,
  533. name=args.get("name", "Default Name"),
  534. icon_type=args.get("icon_type", "emoji"),
  535. icon=args.get("icon", "🤖"),
  536. icon_background=args.get("icon_background", "#FFEAD5"),
  537. )
  538. return new_app
  539. def validate_features_structure(self, app_model: App, features: dict):
  540. if app_model.mode == AppMode.ADVANCED_CHAT.value:
  541. return AdvancedChatAppConfigManager.config_validate(
  542. tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
  543. )
  544. elif app_model.mode == AppMode.WORKFLOW.value:
  545. return WorkflowAppConfigManager.config_validate(
  546. tenant_id=app_model.tenant_id, config=features, only_structure_validate=True
  547. )
  548. else:
  549. raise ValueError(f"Invalid app mode: {app_model.mode}")
  550. def update_workflow(
  551. self, *, session: Session, workflow_id: str, tenant_id: str, account_id: str, data: dict
  552. ) -> Optional[Workflow]:
  553. """
  554. Update workflow attributes
  555. :param session: SQLAlchemy database session
  556. :param workflow_id: Workflow ID
  557. :param tenant_id: Tenant ID
  558. :param account_id: Account ID (for permission check)
  559. :param data: Dictionary containing fields to update
  560. :return: Updated workflow or None if not found
  561. """
  562. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  563. workflow = session.scalar(stmt)
  564. if not workflow:
  565. return None
  566. allowed_fields = ["marked_name", "marked_comment"]
  567. for field, value in data.items():
  568. if field in allowed_fields:
  569. setattr(workflow, field, value)
  570. workflow.updated_by = account_id
  571. workflow.updated_at = naive_utc_now()
  572. return workflow
  573. def delete_workflow(self, *, session: Session, workflow_id: str, tenant_id: str) -> bool:
  574. """
  575. Delete a workflow
  576. :param session: SQLAlchemy database session
  577. :param workflow_id: Workflow ID
  578. :param tenant_id: Tenant ID
  579. :return: True if successful
  580. :raises: ValueError if workflow not found
  581. :raises: WorkflowInUseError if workflow is in use
  582. :raises: DraftWorkflowDeletionError if workflow is a draft version
  583. """
  584. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  585. workflow = session.scalar(stmt)
  586. if not workflow:
  587. raise ValueError(f"Workflow with ID {workflow_id} not found")
  588. # Check if workflow is a draft version
  589. if workflow.version == Workflow.VERSION_DRAFT:
  590. raise DraftWorkflowDeletionError("Cannot delete draft workflow versions")
  591. # Check if this workflow is currently referenced by an app
  592. app_stmt = select(App).where(App.workflow_id == workflow_id)
  593. app = session.scalar(app_stmt)
  594. if app:
  595. # Cannot delete a workflow that's currently in use by an app
  596. raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.id}'")
  597. # Don't use workflow.tool_published as it's not accurate for specific workflow versions
  598. # Check if there's a tool provider using this specific workflow version
  599. tool_provider = (
  600. session.query(WorkflowToolProvider)
  601. .where(
  602. WorkflowToolProvider.tenant_id == workflow.tenant_id,
  603. WorkflowToolProvider.app_id == workflow.app_id,
  604. WorkflowToolProvider.version == workflow.version,
  605. )
  606. .first()
  607. )
  608. if tool_provider:
  609. # Cannot delete a workflow that's published as a tool
  610. raise WorkflowInUseError("Cannot delete workflow that is published as a tool")
  611. session.delete(workflow)
  612. return True
  613. def _setup_variable_pool(
  614. query: str,
  615. files: Sequence[File],
  616. user_id: str,
  617. user_inputs: Mapping[str, Any],
  618. workflow: Workflow,
  619. node_type: NodeType,
  620. conversation_id: str,
  621. conversation_variables: list[Variable],
  622. ):
  623. # Only inject system variables for START node type.
  624. if node_type == NodeType.START:
  625. system_variable = SystemVariable(
  626. user_id=user_id,
  627. app_id=workflow.app_id,
  628. workflow_id=workflow.id,
  629. files=files or [],
  630. workflow_execution_id=str(uuid.uuid4()),
  631. )
  632. # Only add chatflow-specific variables for non-workflow types
  633. if workflow.type != WorkflowType.WORKFLOW.value:
  634. system_variable.query = query
  635. system_variable.conversation_id = conversation_id
  636. system_variable.dialogue_count = 0
  637. else:
  638. system_variable = SystemVariable.empty()
  639. # init variable pool
  640. variable_pool = VariablePool(
  641. system_variables=system_variable,
  642. user_inputs=user_inputs,
  643. environment_variables=workflow.environment_variables,
  644. # Based on the definition of `VariableUnion`,
  645. # `list[Variable]` can be safely used as `list[VariableUnion]` since they are compatible.
  646. conversation_variables=cast(list[VariableUnion], conversation_variables), #
  647. )
  648. return variable_pool
  649. def _rebuild_file_for_user_inputs_in_start_node(
  650. tenant_id: str, start_node_data: StartNodeData, user_inputs: Mapping[str, Any]
  651. ) -> Mapping[str, Any]:
  652. inputs_copy = dict(user_inputs)
  653. for variable in start_node_data.variables:
  654. if variable.type not in (VariableEntityType.FILE, VariableEntityType.FILE_LIST):
  655. continue
  656. if variable.variable not in user_inputs:
  657. continue
  658. value = user_inputs[variable.variable]
  659. file = _rebuild_single_file(tenant_id=tenant_id, value=value, variable_entity_type=variable.type)
  660. inputs_copy[variable.variable] = file
  661. return inputs_copy
  662. def _rebuild_single_file(tenant_id: str, value: Any, variable_entity_type: VariableEntityType) -> File | Sequence[File]:
  663. if variable_entity_type == VariableEntityType.FILE:
  664. if not isinstance(value, dict):
  665. raise ValueError(f"expected dict for file object, got {type(value)}")
  666. return build_from_mapping(mapping=value, tenant_id=tenant_id)
  667. elif variable_entity_type == VariableEntityType.FILE_LIST:
  668. if not isinstance(value, list):
  669. raise ValueError(f"expected list for file list object, got {type(value)}")
  670. if len(value) == 0:
  671. return []
  672. if not isinstance(value[0], dict):
  673. raise ValueError(f"expected dict for first element in the file list, got {type(value)}")
  674. return build_from_mappings(mappings=value, tenant_id=tenant_id)
  675. else:
  676. raise Exception("unreachable")