Pārlūkot izejas kodu

refactor: Change _queue_manager to public attribute queue_manager in task pipelines (#23747)

tags/1.8.0
-LAN- pirms 2 mēnešiem
vecāks
revīzija
332e8e68ee
Revīzijas autora e-pasta adrese nav piesaistīta nevienam kontam

+ 5
- 5
api/core/app/apps/advanced_chat/generate_task_pipeline.py Parādīt failu

) )


yield workflow_finish_resp yield workflow_finish_resp
self._base_task_pipeline._queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE)
self._base_task_pipeline.queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE)


def _handle_workflow_partial_success_event( def _handle_workflow_partial_success_event(
self, self,
) )


yield workflow_finish_resp yield workflow_finish_resp
self._base_task_pipeline._queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE)
self._base_task_pipeline.queue_manager.publish(QueueAdvancedChatMessageEndEvent(), PublishFrom.TASK_PIPELINE)


def _handle_workflow_failed_event( def _handle_workflow_failed_event(
self, self,
# Initialize graph runtime state # Initialize graph runtime state
graph_runtime_state: Optional[GraphRuntimeState] = None graph_runtime_state: Optional[GraphRuntimeState] = None


for queue_message in self._base_task_pipeline._queue_manager.listen():
for queue_message in self._base_task_pipeline.queue_manager.listen():
event = queue_message.event event = queue_message.event


match event: match event:
if self._base_task_pipeline._output_moderation_handler: if self._base_task_pipeline._output_moderation_handler:
if self._base_task_pipeline._output_moderation_handler.should_direct_output(): if self._base_task_pipeline._output_moderation_handler.should_direct_output():
self._task_state.answer = self._base_task_pipeline._output_moderation_handler.get_final_output() self._task_state.answer = self._base_task_pipeline._output_moderation_handler.get_final_output()
self._base_task_pipeline._queue_manager.publish(
self._base_task_pipeline.queue_manager.publish(
QueueTextChunkEvent(text=self._task_state.answer), PublishFrom.TASK_PIPELINE QueueTextChunkEvent(text=self._task_state.answer), PublishFrom.TASK_PIPELINE
) )


self._base_task_pipeline._queue_manager.publish(
self._base_task_pipeline.queue_manager.publish(
QueueStopEvent(stopped_by=QueueStopEvent.StopBy.OUTPUT_MODERATION), PublishFrom.TASK_PIPELINE QueueStopEvent(stopped_by=QueueStopEvent.StopBy.OUTPUT_MODERATION), PublishFrom.TASK_PIPELINE
) )
return True return True

+ 1
- 1
api/core/app/apps/workflow/generate_task_pipeline.py Parādīt failu

# Initialize graph runtime state # Initialize graph runtime state
graph_runtime_state = None graph_runtime_state = None


for queue_message in self._base_task_pipeline._queue_manager.listen():
for queue_message in self._base_task_pipeline.queue_manager.listen():
event = queue_message.event event = queue_message.event


match event: match event:

+ 2
- 2
api/core/app/task_pipeline/based_generate_task_pipeline.py Parādīt failu

stream: bool, stream: bool,
) -> None: ) -> None:
self._application_generate_entity = application_generate_entity self._application_generate_entity = application_generate_entity
self._queue_manager = queue_manager
self.queue_manager = queue_manager
self._start_at = time.perf_counter() self._start_at = time.perf_counter()
self._output_moderation_handler = self._init_output_moderation() self._output_moderation_handler = self._init_output_moderation()
self._stream = stream self._stream = stream
tenant_id=app_config.tenant_id, tenant_id=app_config.tenant_id,
app_id=app_config.app_id, app_id=app_config.app_id,
rule=ModerationRule(type=sensitive_word_avoidance.type, config=sensitive_word_avoidance.config), rule=ModerationRule(type=sensitive_word_avoidance.type, config=sensitive_word_avoidance.config),
queue_manager=self._queue_manager,
queue_manager=self.queue_manager,
) )
return None return None



+ 3
- 3
api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py Parādīt failu

Process stream response. Process stream response.
:return: :return:
""" """
for message in self._queue_manager.listen():
for message in self.queue_manager.listen():
if publisher: if publisher:
publisher.publish(message) publisher.publish(message)
event = message.event event = message.event
if self._output_moderation_handler.should_direct_output(): if self._output_moderation_handler.should_direct_output():
# stop subscribe new token when output moderation should direct output # stop subscribe new token when output moderation should direct output
self._task_state.llm_result.message.content = self._output_moderation_handler.get_final_output() self._task_state.llm_result.message.content = self._output_moderation_handler.get_final_output()
self._queue_manager.publish(
self.queue_manager.publish(
QueueLLMChunkEvent( QueueLLMChunkEvent(
chunk=LLMResultChunk( chunk=LLMResultChunk(
model=self._task_state.llm_result.model, model=self._task_state.llm_result.model,
PublishFrom.TASK_PIPELINE, PublishFrom.TASK_PIPELINE,
) )


self._queue_manager.publish(
self.queue_manager.publish(
QueueStopEvent(stopped_by=QueueStopEvent.StopBy.OUTPUT_MODERATION), PublishFrom.TASK_PIPELINE QueueStopEvent(stopped_by=QueueStopEvent.StopBy.OUTPUT_MODERATION), PublishFrom.TASK_PIPELINE
) )
return True return True

Notiek ielāde…
Atcelt
Saglabāt