|
|
|
|
|
|
|
|
# get all metadata field |
|
|
# get all metadata field |
|
|
metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all() |
|
|
metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all() |
|
|
all_metadata_fields = [metadata_field.name for metadata_field in metadata_fields] |
|
|
all_metadata_fields = [metadata_field.name for metadata_field in metadata_fields] |
|
|
|
|
|
if node_data.metadata_model_config is None: |
|
|
|
|
|
raise ValueError("metadata_model_config is required") |
|
|
# get metadata model instance and fetch model config |
|
|
# get metadata model instance and fetch model config |
|
|
model_instance, model_config = self.get_model_config(node_data.metadata_model_config) |
|
|
model_instance, model_config = self.get_model_config(node_data.metadata_model_config) |
|
|
# fetch prompt messages |
|
|
# fetch prompt messages |
|
|
|
|
|
|
|
|
) |
|
|
) |
|
|
|
|
|
|
|
|
def _get_prompt_template(self, node_data: KnowledgeRetrievalNodeData, metadata_fields: list, query: str): |
|
|
def _get_prompt_template(self, node_data: KnowledgeRetrievalNodeData, metadata_fields: list, query: str): |
|
|
model_mode = ModelMode(node_data.metadata_model_config.mode) |
|
|
|
|
|
|
|
|
model_mode = ModelMode(node_data.metadata_model_config.mode) # type: ignore |
|
|
input_text = query |
|
|
input_text = query |
|
|
|
|
|
|
|
|
prompt_messages: list[LLMNodeChatModelMessage] = [] |
|
|
prompt_messages: list[LLMNodeChatModelMessage] = [] |