瀏覽代碼

Chore: correct inconsistent logging and typo (#25945)

tags/1.9.0
Yongtao Huang 1 月之前
父節點
當前提交
b3dafd913b
No account linked to committer's email address

+ 1
- 3
api/core/repositories/sqlalchemy_workflow_node_execution_repository.py 查看文件

@@ -417,12 +417,10 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)

if db_model is not None:
offload_data = db_model.offload_data

else:
db_model = self._to_db_model(domain_model)
offload_data = []
offload_data = db_model.offload_data

offload_data = db_model.offload_data
if domain_model.inputs is not None:
result = self._truncate_and_upload(
domain_model.inputs,

+ 1
- 1
api/core/workflow/graph_engine/layers/execution_limits.py 查看文件

@@ -147,4 +147,4 @@ class ExecutionLimitsLayer(GraphEngineLayer):
self.logger.debug("Abort command sent to engine")

except Exception:
self.logger.exception("Failed to send abort command: %s")
self.logger.exception("Failed to send abort command")

+ 7
- 7
api/services/rag_pipeline/rag_pipeline.py 查看文件

@@ -1327,14 +1327,14 @@ class RagPipelineService:
"""
Retry error document
"""
document_pipeline_excution_log = (
document_pipeline_execution_log = (
db.session.query(DocumentPipelineExecutionLog)
.where(DocumentPipelineExecutionLog.document_id == document.id)
.first()
)
if not document_pipeline_excution_log:
if not document_pipeline_execution_log:
raise ValueError("Document pipeline execution log not found")
pipeline = db.session.query(Pipeline).where(Pipeline.id == document_pipeline_excution_log.pipeline_id).first()
pipeline = db.session.query(Pipeline).where(Pipeline.id == document_pipeline_execution_log.pipeline_id).first()
if not pipeline:
raise ValueError("Pipeline not found")
# convert to app config
@@ -1346,10 +1346,10 @@ class RagPipelineService:
workflow=workflow,
user=user,
args={
"inputs": document_pipeline_excution_log.input_data,
"start_node_id": document_pipeline_excution_log.datasource_node_id,
"datasource_type": document_pipeline_excution_log.datasource_type,
"datasource_info_list": [json.loads(document_pipeline_excution_log.datasource_info)],
"inputs": document_pipeline_execution_log.input_data,
"start_node_id": document_pipeline_execution_log.datasource_node_id,
"datasource_type": document_pipeline_execution_log.datasource_type,
"datasource_info_list": [json.loads(document_pipeline_execution_log.datasource_info)],
"original_document_id": document.id,
},
invoke_from=InvokeFrom.PUBLISHED,

+ 3
- 3
api/services/rag_pipeline/rag_pipeline_transform_service.py 查看文件

@@ -38,11 +38,11 @@ class RagPipelineTransformService:
indexing_technique = dataset.indexing_technique

if not datasource_type and not indexing_technique:
return self._transfrom_to_empty_pipeline(dataset)
return self._transform_to_empty_pipeline(dataset)

doc_form = dataset.doc_form
if not doc_form:
return self._transfrom_to_empty_pipeline(dataset)
return self._transform_to_empty_pipeline(dataset)
retrieval_model = dataset.retrieval_model
pipeline_yaml = self._get_transform_yaml(doc_form, datasource_type, indexing_technique)
# deal dependencies
@@ -263,7 +263,7 @@ class RagPipelineTransformService:
logger.debug("Installing missing pipeline plugins %s", need_install_plugin_unique_identifiers)
PluginService.install_from_marketplace_pkg(tenant_id, need_install_plugin_unique_identifiers)

def _transfrom_to_empty_pipeline(self, dataset: Dataset):
def _transform_to_empty_pipeline(self, dataset: Dataset):
pipeline = Pipeline(
tenant_id=dataset.tenant_id,
name=dataset.name,

Loading…
取消
儲存