Browse Source

fix(workflow): Take back LLM streaming output after IF-ELSE (#9875)

tags/0.10.2
-LAN- 1 year ago
parent
commit
72ea3d6b98
No account linked to committer's email address

+ 6
- 7
api/core/workflow/graph_engine/graph_engine.py View File

yield GraphRunStartedEvent() yield GraphRunStartedEvent()


try: try:
stream_processor_cls: type[AnswerStreamProcessor | EndStreamProcessor]
if self.init_params.workflow_type == WorkflowType.CHAT: if self.init_params.workflow_type == WorkflowType.CHAT:
stream_processor_cls = AnswerStreamProcessor
stream_processor = AnswerStreamProcessor(
graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool
)
else: else:
stream_processor_cls = EndStreamProcessor

stream_processor = stream_processor_cls(
graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool
)
stream_processor = EndStreamProcessor(
graph=self.graph, variable_pool=self.graph_runtime_state.variable_pool
)


# run graph # run graph
generator = stream_processor.process(self._run(start_node_id=self.graph.root_node_id)) generator = stream_processor.process(self._run(start_node_id=self.graph.root_node_id))

+ 4
- 4
api/core/workflow/nodes/answer/answer_stream_generate_router.py View File

source_node_id = edge.source_node_id source_node_id = edge.source_node_id
source_node_type = node_id_config_mapping[source_node_id].get("data", {}).get("type") source_node_type = node_id_config_mapping[source_node_id].get("data", {}).get("type")
if source_node_type in { if source_node_type in {
NodeType.ANSWER.value,
NodeType.IF_ELSE.value,
NodeType.QUESTION_CLASSIFIER.value,
NodeType.ITERATION.value,
NodeType.ANSWER,
NodeType.IF_ELSE,
NodeType.QUESTION_CLASSIFIER,
NodeType.ITERATION,
}: }:
answer_dependencies[answer_node_id].append(source_node_id) answer_dependencies[answer_node_id].append(source_node_id)
else: else:

+ 1
- 1
api/core/workflow/nodes/answer/answer_stream_processor.py View File

super().__init__(graph, variable_pool) super().__init__(graph, variable_pool)
self.generate_routes = graph.answer_stream_generate_routes self.generate_routes = graph.answer_stream_generate_routes
self.route_position = {} self.route_position = {}
for answer_node_id, route_chunks in self.generate_routes.answer_generate_route.items():
for answer_node_id in self.generate_routes.answer_generate_route:
self.route_position[answer_node_id] = 0 self.route_position[answer_node_id] = 0
self.current_stream_chunk_generating_node_ids: dict[str, list[str]] = {} self.current_stream_chunk_generating_node_ids: dict[str, list[str]] = {}



+ 0
- 1
api/core/workflow/nodes/answer/base_stream_processor.py View File

continue continue
else: else:
unreachable_first_node_ids.append(edge.target_node_id) unreachable_first_node_ids.append(edge.target_node_id)
unreachable_first_node_ids.extend(self._fetch_node_ids_in_reachable_branch(edge.target_node_id))


for node_id in unreachable_first_node_ids: for node_id in unreachable_first_node_ids:
self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids) self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids)

+ 2
- 1
api/core/workflow/nodes/answer/entities.py View File

from collections.abc import Sequence
from enum import Enum from enum import Enum


from pydantic import BaseModel, Field from pydantic import BaseModel, Field


type: GenerateRouteChunk.ChunkType = GenerateRouteChunk.ChunkType.VAR type: GenerateRouteChunk.ChunkType = GenerateRouteChunk.ChunkType.VAR
"""generate route chunk type""" """generate route chunk type"""
value_selector: list[str] = Field(..., description="value selector")
value_selector: Sequence[str] = Field(..., description="value selector")




class TextGenerateRouteChunk(GenerateRouteChunk): class TextGenerateRouteChunk(GenerateRouteChunk):

Loading…
Cancel
Save