You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

rag_pipeline.py 30KB

6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
6 miesięcy temu
6 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
6 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
5 miesięcy temu
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796
  1. import json
  2. import threading
  3. import time
  4. from collections.abc import Callable, Generator, Sequence
  5. from datetime import UTC, datetime
  6. from typing import Any, Optional, cast
  7. from uuid import uuid4
  8. from flask_login import current_user
  9. from sqlalchemy import or_, select
  10. from sqlalchemy.orm import Session
  11. import contexts
  12. from configs import dify_config
  13. from core.datasource.entities.datasource_entities import (
  14. DatasourceProviderType,
  15. GetOnlineDocumentPagesResponse,
  16. GetWebsiteCrawlResponse,
  17. )
  18. from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
  19. from core.datasource.website_crawl.website_crawl_plugin import WebsiteCrawlDatasourcePlugin
  20. from core.model_runtime.utils.encoders import jsonable_encoder
  21. from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository
  22. from core.variables.variables import Variable
  23. from core.workflow.entities.node_entities import NodeRunResult
  24. from core.workflow.errors import WorkflowNodeRunFailedError
  25. from core.workflow.graph_engine.entities.event import InNodeEvent
  26. from core.workflow.nodes.base.node import BaseNode
  27. from core.workflow.nodes.enums import ErrorStrategy, NodeType
  28. from core.workflow.nodes.event.event import RunCompletedEvent
  29. from core.workflow.nodes.event.types import NodeEvent
  30. from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
  31. from core.workflow.repository.workflow_node_execution_repository import OrderConfig
  32. from core.workflow.workflow_entry import WorkflowEntry
  33. from extensions.ext_database import db
  34. from libs.infinite_scroll_pagination import InfiniteScrollPagination
  35. from models.account import Account
  36. from models.dataset import Pipeline, PipelineBuiltInTemplate, PipelineCustomizedTemplate # type: ignore
  37. from models.enums import CreatorUserRole, WorkflowRunTriggeredFrom
  38. from models.model import EndUser
  39. from models.workflow import (
  40. Workflow,
  41. WorkflowNodeExecution,
  42. WorkflowNodeExecutionStatus,
  43. WorkflowNodeExecutionTriggeredFrom,
  44. WorkflowRun,
  45. WorkflowType,
  46. )
  47. from services.dataset_service import DatasetService
  48. from services.entities.knowledge_entities.rag_pipeline_entities import (
  49. KnowledgeConfiguration,
  50. PipelineTemplateInfoEntity,
  51. )
  52. from services.errors.app import WorkflowHashNotEqualError
  53. from services.rag_pipeline.pipeline_template.pipeline_template_factory import PipelineTemplateRetrievalFactory
  54. class RagPipelineService:
  55. @classmethod
  56. def get_pipeline_templates(
  57. cls, type: str = "built-in", language: str = "en-US"
  58. ) -> dict:
  59. if type == "built-in":
  60. mode = dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_MODE
  61. retrieval_instance = PipelineTemplateRetrievalFactory.get_pipeline_template_factory(mode)()
  62. result = retrieval_instance.get_pipeline_templates(language)
  63. if not result.get("pipeline_templates") and language != "en-US":
  64. template_retrieval = PipelineTemplateRetrievalFactory.get_built_in_pipeline_template_retrieval()
  65. result = template_retrieval.fetch_pipeline_templates_from_builtin("en-US")
  66. return result
  67. else:
  68. mode = "customized"
  69. retrieval_instance = PipelineTemplateRetrievalFactory.get_pipeline_template_factory(mode)()
  70. result = retrieval_instance.get_pipeline_templates(language)
  71. return result
  72. @classmethod
  73. def get_pipeline_template_detail(cls, template_id: str) -> Optional[dict]:
  74. """
  75. Get pipeline template detail.
  76. :param template_id: template id
  77. :return:
  78. """
  79. mode = dify_config.HOSTED_FETCH_PIPELINE_TEMPLATES_MODE
  80. retrieval_instance = PipelineTemplateRetrievalFactory.get_pipeline_template_factory(mode)()
  81. result: Optional[dict] = retrieval_instance.get_pipeline_template_detail(template_id)
  82. return result
  83. @classmethod
  84. def update_customized_pipeline_template(cls, template_id: str, template_info: PipelineTemplateInfoEntity):
  85. """
  86. Update pipeline template.
  87. :param template_id: template id
  88. :param template_info: template info
  89. """
  90. customized_template: PipelineCustomizedTemplate | None = (
  91. db.query(PipelineCustomizedTemplate)
  92. .filter(
  93. PipelineCustomizedTemplate.id == template_id,
  94. PipelineCustomizedTemplate.tenant_id == current_user.current_tenant_id,
  95. )
  96. .first()
  97. )
  98. if not customized_template:
  99. raise ValueError("Customized pipeline template not found.")
  100. customized_template.name = template_info.name
  101. customized_template.description = template_info.description
  102. customized_template.icon = template_info.icon_info.model_dump()
  103. db.commit()
  104. return customized_template
  105. @classmethod
  106. def delete_customized_pipeline_template(cls, template_id: str):
  107. """
  108. Delete customized pipeline template.
  109. """
  110. customized_template: PipelineCustomizedTemplate | None = (
  111. db.query(PipelineCustomizedTemplate)
  112. .filter(
  113. PipelineCustomizedTemplate.id == template_id,
  114. PipelineCustomizedTemplate.tenant_id == current_user.current_tenant_id,
  115. )
  116. .first()
  117. )
  118. if not customized_template:
  119. raise ValueError("Customized pipeline template not found.")
  120. db.delete(customized_template)
  121. db.commit()
  122. def get_draft_workflow(self, pipeline: Pipeline) -> Optional[Workflow]:
  123. """
  124. Get draft workflow
  125. """
  126. # fetch draft workflow by rag pipeline
  127. workflow = (
  128. db.session.query(Workflow)
  129. .filter(
  130. Workflow.tenant_id == pipeline.tenant_id,
  131. Workflow.app_id == pipeline.id,
  132. Workflow.version == "draft",
  133. )
  134. .first()
  135. )
  136. # return draft workflow
  137. return workflow
  138. def get_published_workflow(self, pipeline: Pipeline) -> Optional[Workflow]:
  139. """
  140. Get published workflow
  141. """
  142. if not pipeline.workflow_id:
  143. return None
  144. # fetch published workflow by workflow_id
  145. workflow = (
  146. db.session.query(Workflow)
  147. .filter(
  148. Workflow.tenant_id == pipeline.tenant_id,
  149. Workflow.app_id == pipeline.id,
  150. Workflow.id == pipeline.workflow_id,
  151. )
  152. .first()
  153. )
  154. return workflow
  155. def get_all_published_workflow(
  156. self,
  157. *,
  158. session: Session,
  159. pipeline: Pipeline,
  160. page: int,
  161. limit: int,
  162. user_id: str | None,
  163. named_only: bool = False,
  164. ) -> tuple[Sequence[Workflow], bool]:
  165. """
  166. Get published workflow with pagination
  167. """
  168. if not pipeline.workflow_id:
  169. return [], False
  170. stmt = (
  171. select(Workflow)
  172. .where(Workflow.app_id == pipeline.id)
  173. .order_by(Workflow.version.desc())
  174. .limit(limit + 1)
  175. .offset((page - 1) * limit)
  176. )
  177. if user_id:
  178. stmt = stmt.where(Workflow.created_by == user_id)
  179. if named_only:
  180. stmt = stmt.where(Workflow.marked_name != "")
  181. workflows = session.scalars(stmt).all()
  182. has_more = len(workflows) > limit
  183. if has_more:
  184. workflows = workflows[:-1]
  185. return workflows, has_more
  186. def sync_draft_workflow(
  187. self,
  188. *,
  189. pipeline: Pipeline,
  190. graph: dict,
  191. unique_hash: Optional[str],
  192. account: Account,
  193. environment_variables: Sequence[Variable],
  194. conversation_variables: Sequence[Variable],
  195. rag_pipeline_variables: list,
  196. ) -> Workflow:
  197. """
  198. Sync draft workflow
  199. :raises WorkflowHashNotEqualError
  200. """
  201. # fetch draft workflow by app_model
  202. workflow = self.get_draft_workflow(pipeline=pipeline)
  203. if workflow and workflow.unique_hash != unique_hash:
  204. raise WorkflowHashNotEqualError()
  205. # create draft workflow if not found
  206. if not workflow:
  207. workflow = Workflow(
  208. tenant_id=pipeline.tenant_id,
  209. app_id=pipeline.id,
  210. features="{}",
  211. type=WorkflowType.RAG_PIPELINE.value,
  212. version="draft",
  213. graph=json.dumps(graph),
  214. created_by=account.id,
  215. environment_variables=environment_variables,
  216. conversation_variables=conversation_variables,
  217. rag_pipeline_variables=rag_pipeline_variables,
  218. )
  219. db.session.add(workflow)
  220. db.session.flush()
  221. pipeline.workflow_id = workflow.id
  222. # update draft workflow if found
  223. else:
  224. workflow.graph = json.dumps(graph)
  225. workflow.updated_by = account.id
  226. workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
  227. workflow.environment_variables = environment_variables
  228. workflow.conversation_variables = conversation_variables
  229. workflow.rag_pipeline_variables = rag_pipeline_variables
  230. # commit db session changes
  231. db.session.commit()
  232. # trigger workflow events TODO
  233. # app_draft_workflow_was_synced.send(pipeline, synced_draft_workflow=workflow)
  234. # return draft workflow
  235. return workflow
  236. def publish_workflow(
  237. self,
  238. *,
  239. session: Session,
  240. pipeline: Pipeline,
  241. account: Account,
  242. ) -> Workflow:
  243. draft_workflow_stmt = select(Workflow).where(
  244. Workflow.tenant_id == pipeline.tenant_id,
  245. Workflow.app_id == pipeline.id,
  246. Workflow.version == "draft",
  247. )
  248. draft_workflow = session.scalar(draft_workflow_stmt)
  249. if not draft_workflow:
  250. raise ValueError("No valid workflow found.")
  251. # create new workflow
  252. workflow = Workflow.new(
  253. tenant_id=pipeline.tenant_id,
  254. app_id=pipeline.id,
  255. type=draft_workflow.type,
  256. version=str(datetime.now(UTC).replace(tzinfo=None)),
  257. graph=draft_workflow.graph,
  258. features=draft_workflow.features,
  259. created_by=account.id,
  260. environment_variables=draft_workflow.environment_variables,
  261. conversation_variables=draft_workflow.conversation_variables,
  262. rag_pipeline_variables=draft_workflow.rag_pipeline_variables,
  263. marked_name="",
  264. marked_comment="",
  265. )
  266. # commit db session changes
  267. session.add(workflow)
  268. graph = workflow.graph_dict
  269. nodes = graph.get("nodes", [])
  270. for node in nodes:
  271. if node.get("data", {}).get("type") == "knowledge-index":
  272. knowledge_configuration = node.get("data", {})
  273. knowledge_configuration = KnowledgeConfiguration(**knowledge_configuration)
  274. # update dataset
  275. dataset = pipeline.dataset
  276. if not dataset:
  277. raise ValueError("Dataset not found")
  278. DatasetService.update_rag_pipeline_dataset_settings(
  279. session=session,
  280. dataset=dataset,
  281. knowledge_configuration=knowledge_configuration,
  282. has_published=pipeline.is_published
  283. )
  284. # return new workflow
  285. return workflow
  286. def get_default_block_configs(self) -> list[dict]:
  287. """
  288. Get default block configs
  289. """
  290. # return default block config
  291. default_block_configs = []
  292. for node_class_mapping in NODE_TYPE_CLASSES_MAPPING.values():
  293. node_class = node_class_mapping[LATEST_VERSION]
  294. default_config = node_class.get_default_config()
  295. if default_config:
  296. default_block_configs.append(default_config)
  297. return default_block_configs
  298. def get_default_block_config(self, node_type: str, filters: Optional[dict] = None) -> Optional[dict]:
  299. """
  300. Get default config of node.
  301. :param node_type: node type
  302. :param filters: filter by node config parameters.
  303. :return:
  304. """
  305. node_type_enum = NodeType(node_type)
  306. # return default block config
  307. if node_type_enum not in NODE_TYPE_CLASSES_MAPPING:
  308. return None
  309. node_class = NODE_TYPE_CLASSES_MAPPING[node_type_enum][LATEST_VERSION]
  310. default_config = node_class.get_default_config(filters=filters)
  311. if not default_config:
  312. return None
  313. return default_config
  314. def run_draft_workflow_node(
  315. self, pipeline: Pipeline, node_id: str, user_inputs: dict, account: Account
  316. ) -> WorkflowNodeExecution:
  317. """
  318. Run draft workflow node
  319. """
  320. # fetch draft workflow by app_model
  321. draft_workflow = self.get_draft_workflow(pipeline=pipeline)
  322. if not draft_workflow:
  323. raise ValueError("Workflow not initialized")
  324. # run draft workflow node
  325. start_at = time.perf_counter()
  326. workflow_node_execution = self._handle_node_run_result(
  327. getter=lambda: WorkflowEntry.single_step_run(
  328. workflow=draft_workflow,
  329. node_id=node_id,
  330. user_inputs=user_inputs,
  331. user_id=account.id,
  332. ),
  333. start_at=start_at,
  334. tenant_id=pipeline.tenant_id,
  335. node_id=node_id,
  336. )
  337. workflow_node_execution.app_id = pipeline.id
  338. workflow_node_execution.created_by = account.id
  339. workflow_node_execution.workflow_id = draft_workflow.id
  340. db.session.add(workflow_node_execution)
  341. db.session.commit()
  342. return workflow_node_execution
  343. def run_published_workflow_node(
  344. self, pipeline: Pipeline, node_id: str, user_inputs: dict, account: Account
  345. ) -> WorkflowNodeExecution:
  346. """
  347. Run published workflow node
  348. """
  349. # fetch published workflow by app_model
  350. published_workflow = self.get_published_workflow(pipeline=pipeline)
  351. if not published_workflow:
  352. raise ValueError("Workflow not initialized")
  353. # run draft workflow node
  354. start_at = time.perf_counter()
  355. workflow_node_execution = self._handle_node_run_result(
  356. getter=lambda: WorkflowEntry.single_step_run(
  357. workflow=published_workflow,
  358. node_id=node_id,
  359. user_inputs=user_inputs,
  360. user_id=account.id,
  361. ),
  362. start_at=start_at,
  363. tenant_id=pipeline.tenant_id,
  364. node_id=node_id,
  365. )
  366. workflow_node_execution.app_id = pipeline.id
  367. workflow_node_execution.created_by = account.id
  368. workflow_node_execution.workflow_id = published_workflow.id
  369. db.session.add(workflow_node_execution)
  370. db.session.commit()
  371. return workflow_node_execution
  372. def run_datasource_workflow_node(
  373. self, pipeline: Pipeline, node_id: str, user_inputs: dict, account: Account, datasource_type: str
  374. ) -> dict:
  375. """
  376. Run published workflow datasource
  377. """
  378. # fetch published workflow by app_model
  379. published_workflow = self.get_published_workflow(pipeline=pipeline)
  380. if not published_workflow:
  381. raise ValueError("Workflow not initialized")
  382. # run draft workflow node
  383. start_at = time.perf_counter()
  384. datasource_node_data = published_workflow.graph_dict.get("nodes", {}).get(node_id, {}).get("data", {})
  385. if not datasource_node_data:
  386. raise ValueError("Datasource node data not found")
  387. from core.datasource.datasource_manager import DatasourceManager
  388. datasource_runtime = DatasourceManager.get_datasource_runtime(
  389. provider_id=datasource_node_data.get("provider_id"),
  390. datasource_name=datasource_node_data.get("datasource_name"),
  391. tenant_id=pipeline.tenant_id,
  392. datasource_type=DatasourceProviderType(datasource_type),
  393. )
  394. if datasource_runtime.datasource_provider_type() == DatasourceProviderType.ONLINE_DOCUMENT:
  395. datasource_runtime = cast(OnlineDocumentDatasourcePlugin, datasource_runtime)
  396. online_document_result: GetOnlineDocumentPagesResponse = (
  397. datasource_runtime._get_online_document_pages(
  398. user_id=account.id,
  399. datasource_parameters=user_inputs,
  400. provider_type=datasource_runtime.datasource_provider_type(),
  401. )
  402. )
  403. return {
  404. "result": [page.model_dump() for page in online_document_result.result],
  405. "provider_type": datasource_node_data.get("provider_type"),
  406. }
  407. elif datasource_runtime.datasource_provider_type == DatasourceProviderType.WEBSITE_CRAWL:
  408. datasource_runtime = cast(WebsiteCrawlDatasourcePlugin, datasource_runtime)
  409. website_crawl_result: GetWebsiteCrawlResponse = datasource_runtime._get_website_crawl(
  410. user_id=account.id,
  411. datasource_parameters=user_inputs,
  412. provider_type=datasource_runtime.datasource_provider_type(),
  413. )
  414. return {
  415. "result": [result.model_dump() for result in website_crawl_result.result],
  416. "provider_type": datasource_node_data.get("provider_type"),
  417. }
  418. else:
  419. raise ValueError(f"Unsupported datasource provider: {datasource_runtime.datasource_provider_type}")
  420. def run_free_workflow_node(
  421. self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
  422. ) -> WorkflowNodeExecution:
  423. """
  424. Run draft workflow node
  425. """
  426. # run draft workflow node
  427. start_at = time.perf_counter()
  428. workflow_node_execution = self._handle_node_run_result(
  429. getter=lambda: WorkflowEntry.run_free_node(
  430. node_id=node_id,
  431. node_data=node_data,
  432. tenant_id=tenant_id,
  433. user_id=user_id,
  434. user_inputs=user_inputs,
  435. ),
  436. start_at=start_at,
  437. tenant_id=tenant_id,
  438. node_id=node_id,
  439. )
  440. return workflow_node_execution
  441. def _handle_node_run_result(
  442. self,
  443. getter: Callable[[], tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]],
  444. start_at: float,
  445. tenant_id: str,
  446. node_id: str,
  447. ) -> WorkflowNodeExecution:
  448. """
  449. Handle node run result
  450. :param getter: Callable[[], tuple[BaseNode, Generator[RunEvent | InNodeEvent, None, None]]]
  451. :param start_at: float
  452. :param tenant_id: str
  453. :param node_id: str
  454. """
  455. try:
  456. node_instance, generator = getter()
  457. node_run_result: NodeRunResult | None = None
  458. for event in generator:
  459. if isinstance(event, RunCompletedEvent):
  460. node_run_result = event.run_result
  461. # sign output files
  462. node_run_result.outputs = WorkflowEntry.handle_special_values(node_run_result.outputs)
  463. break
  464. if not node_run_result:
  465. raise ValueError("Node run failed with no run result")
  466. # single step debug mode error handling return
  467. if node_run_result.status == WorkflowNodeExecutionStatus.FAILED and node_instance.should_continue_on_error:
  468. node_error_args: dict[str, Any] = {
  469. "status": WorkflowNodeExecutionStatus.EXCEPTION,
  470. "error": node_run_result.error,
  471. "inputs": node_run_result.inputs,
  472. "metadata": {"error_strategy": node_instance.node_data.error_strategy},
  473. }
  474. if node_instance.node_data.error_strategy is ErrorStrategy.DEFAULT_VALUE:
  475. node_run_result = NodeRunResult(
  476. **node_error_args,
  477. outputs={
  478. **node_instance.node_data.default_value_dict,
  479. "error_message": node_run_result.error,
  480. "error_type": node_run_result.error_type,
  481. },
  482. )
  483. else:
  484. node_run_result = NodeRunResult(
  485. **node_error_args,
  486. outputs={
  487. "error_message": node_run_result.error,
  488. "error_type": node_run_result.error_type,
  489. },
  490. )
  491. run_succeeded = node_run_result.status in (
  492. WorkflowNodeExecutionStatus.SUCCEEDED,
  493. WorkflowNodeExecutionStatus.EXCEPTION,
  494. )
  495. error = node_run_result.error if not run_succeeded else None
  496. except WorkflowNodeRunFailedError as e:
  497. node_instance = e.node_instance
  498. run_succeeded = False
  499. node_run_result = None
  500. error = e.error
  501. workflow_node_execution = WorkflowNodeExecution()
  502. workflow_node_execution.id = str(uuid4())
  503. workflow_node_execution.tenant_id = tenant_id
  504. workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP.value
  505. workflow_node_execution.index = 1
  506. workflow_node_execution.node_id = node_id
  507. workflow_node_execution.node_type = node_instance.node_type
  508. workflow_node_execution.title = node_instance.node_data.title
  509. workflow_node_execution.elapsed_time = time.perf_counter() - start_at
  510. workflow_node_execution.created_by_role = CreatorUserRole.ACCOUNT.value
  511. workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
  512. workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
  513. if run_succeeded and node_run_result:
  514. # create workflow node execution
  515. inputs = WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None
  516. process_data = (
  517. WorkflowEntry.handle_special_values(node_run_result.process_data)
  518. if node_run_result.process_data
  519. else None
  520. )
  521. outputs = WorkflowEntry.handle_special_values(node_run_result.outputs) if node_run_result.outputs else None
  522. workflow_node_execution.inputs = json.dumps(inputs)
  523. workflow_node_execution.process_data = json.dumps(process_data)
  524. workflow_node_execution.outputs = json.dumps(outputs)
  525. workflow_node_execution.execution_metadata = (
  526. json.dumps(jsonable_encoder(node_run_result.metadata)) if node_run_result.metadata else None
  527. )
  528. if node_run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED:
  529. workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
  530. elif node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION:
  531. workflow_node_execution.status = WorkflowNodeExecutionStatus.EXCEPTION.value
  532. workflow_node_execution.error = node_run_result.error
  533. else:
  534. # create workflow node execution
  535. workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
  536. workflow_node_execution.error = error
  537. return workflow_node_execution
  538. def update_workflow(
  539. self, *, session: Session, workflow_id: str, tenant_id: str, account_id: str, data: dict
  540. ) -> Optional[Workflow]:
  541. """
  542. Update workflow attributes
  543. :param session: SQLAlchemy database session
  544. :param workflow_id: Workflow ID
  545. :param tenant_id: Tenant ID
  546. :param account_id: Account ID (for permission check)
  547. :param data: Dictionary containing fields to update
  548. :return: Updated workflow or None if not found
  549. """
  550. stmt = select(Workflow).where(Workflow.id == workflow_id, Workflow.tenant_id == tenant_id)
  551. workflow = session.scalar(stmt)
  552. if not workflow:
  553. return None
  554. allowed_fields = ["marked_name", "marked_comment"]
  555. for field, value in data.items():
  556. if field in allowed_fields:
  557. setattr(workflow, field, value)
  558. workflow.updated_by = account_id
  559. workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
  560. return workflow
  561. def get_published_second_step_parameters(self, pipeline: Pipeline, node_id: str) -> list[dict]:
  562. """
  563. Get second step parameters of rag pipeline
  564. """
  565. workflow = self.get_published_workflow(pipeline=pipeline)
  566. if not workflow:
  567. raise ValueError("Workflow not initialized")
  568. # get second step node
  569. rag_pipeline_variables = workflow.rag_pipeline_variables
  570. if not rag_pipeline_variables:
  571. return []
  572. # get datasource provider
  573. datasource_provider_variables = [
  574. item
  575. for item in rag_pipeline_variables
  576. if item.get("belong_to_node_id") == node_id or item.get("belong_to_node_id") == "shared"
  577. ]
  578. return datasource_provider_variables
  579. def get_draft_second_step_parameters(self, pipeline: Pipeline, node_id: str) -> list[dict]:
  580. """
  581. Get second step parameters of rag pipeline
  582. """
  583. workflow = self.get_draft_workflow(pipeline=pipeline)
  584. if not workflow:
  585. raise ValueError("Workflow not initialized")
  586. # get second step node
  587. rag_pipeline_variables = workflow.rag_pipeline_variables
  588. if not rag_pipeline_variables:
  589. return []
  590. # get datasource provider
  591. datasource_provider_variables = [
  592. item
  593. for item in rag_pipeline_variables
  594. if item.get("belong_to_node_id") == node_id or item.get("belong_to_node_id") == "shared"
  595. ]
  596. return datasource_provider_variables
  597. def get_rag_pipeline_paginate_workflow_runs(self, pipeline: Pipeline, args: dict) -> InfiniteScrollPagination:
  598. """
  599. Get debug workflow run list
  600. Only return triggered_from == debugging
  601. :param app_model: app model
  602. :param args: request args
  603. """
  604. limit = int(args.get("limit", 20))
  605. base_query = db.session.query(WorkflowRun).filter(
  606. WorkflowRun.tenant_id == pipeline.tenant_id,
  607. WorkflowRun.app_id == pipeline.id,
  608. or_(
  609. WorkflowRun.triggered_from == WorkflowRunTriggeredFrom.RAG_PIPELINE_RUN.value,
  610. WorkflowRun.triggered_from == WorkflowRunTriggeredFrom.RAG_PIPELINE_DEBUGGING.value
  611. )
  612. )
  613. if args.get("last_id"):
  614. last_workflow_run = base_query.filter(
  615. WorkflowRun.id == args.get("last_id"),
  616. ).first()
  617. if not last_workflow_run:
  618. raise ValueError("Last workflow run not exists")
  619. workflow_runs = (
  620. base_query.filter(
  621. WorkflowRun.created_at < last_workflow_run.created_at, WorkflowRun.id != last_workflow_run.id
  622. )
  623. .order_by(WorkflowRun.created_at.desc())
  624. .limit(limit)
  625. .all()
  626. )
  627. else:
  628. workflow_runs = base_query.order_by(WorkflowRun.created_at.desc()).limit(limit).all()
  629. has_more = False
  630. if len(workflow_runs) == limit:
  631. current_page_first_workflow_run = workflow_runs[-1]
  632. rest_count = base_query.filter(
  633. WorkflowRun.created_at < current_page_first_workflow_run.created_at,
  634. WorkflowRun.id != current_page_first_workflow_run.id,
  635. ).count()
  636. if rest_count > 0:
  637. has_more = True
  638. return InfiniteScrollPagination(data=workflow_runs, limit=limit, has_more=has_more)
  639. def get_rag_pipeline_workflow_run(self, pipeline: Pipeline, run_id: str) -> Optional[WorkflowRun]:
  640. """
  641. Get workflow run detail
  642. :param app_model: app model
  643. :param run_id: workflow run id
  644. """
  645. workflow_run = (
  646. db.session.query(WorkflowRun)
  647. .filter(
  648. WorkflowRun.tenant_id == pipeline.tenant_id,
  649. WorkflowRun.app_id == pipeline.id,
  650. WorkflowRun.id == run_id,
  651. )
  652. .first()
  653. )
  654. return workflow_run
  655. def get_rag_pipeline_workflow_run_node_executions(
  656. self,
  657. pipeline: Pipeline,
  658. run_id: str,
  659. user: Account | EndUser,
  660. ) -> list[WorkflowNodeExecution]:
  661. """
  662. Get workflow run node execution list
  663. """
  664. workflow_run = self.get_rag_pipeline_workflow_run(pipeline, run_id)
  665. contexts.plugin_tool_providers.set({})
  666. contexts.plugin_tool_providers_lock.set(threading.Lock())
  667. if not workflow_run:
  668. return []
  669. # Use the repository to get the node execution
  670. repository = SQLAlchemyWorkflowNodeExecutionRepository(
  671. session_factory=db.engine,
  672. app_id=pipeline.id,
  673. user=user,
  674. triggered_from=None
  675. )
  676. # Use the repository to get the node executions with ordering
  677. order_config = OrderConfig(order_by=["index"], order_direction="desc")
  678. node_executions = repository.get_by_workflow_run(workflow_run_id=run_id,
  679. order_config=order_config,
  680. triggered_from=WorkflowNodeExecutionTriggeredFrom.RAG_PIPELINE_RUN)
  681. # Convert domain models to database models
  682. workflow_node_executions = [repository.to_db_model(node_execution) for node_execution in node_executions]
  683. return workflow_node_executions
  684. @classmethod
  685. def publish_customized_pipeline_template(cls, pipeline_id: str, args: dict):
  686. """
  687. Publish customized pipeline template
  688. """
  689. pipeline = db.session.query(Pipeline).filter(Pipeline.id == pipeline_id).first()
  690. if not pipeline:
  691. raise ValueError("Pipeline not found")
  692. if not pipeline.workflow_id:
  693. raise ValueError("Pipeline workflow not found")
  694. workflow = db.session.query(Workflow).filter(Workflow.id == pipeline.workflow_id).first()
  695. if not workflow:
  696. raise ValueError("Workflow not found")
  697. db.session.commit()