|
|
|
@@ -3,7 +3,7 @@ import io |
|
|
|
import json |
|
|
|
import logging |
|
|
|
from collections.abc import Generator, Mapping, Sequence |
|
|
|
from typing import TYPE_CHECKING, Any, Optional, cast |
|
|
|
from typing import TYPE_CHECKING, Any, Optional |
|
|
|
|
|
|
|
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity |
|
|
|
from core.file import FileType, file_manager |
|
|
|
@@ -33,12 +33,10 @@ from core.model_runtime.entities.message_entities import ( |
|
|
|
UserPromptMessage, |
|
|
|
) |
|
|
|
from core.model_runtime.entities.model_entities import ( |
|
|
|
AIModelEntity, |
|
|
|
ModelFeature, |
|
|
|
ModelPropertyKey, |
|
|
|
ModelType, |
|
|
|
) |
|
|
|
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel |
|
|
|
from core.model_runtime.utils.encoders import jsonable_encoder |
|
|
|
from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig |
|
|
|
from core.prompt.utils.prompt_message_util import PromptMessageUtil |
|
|
|
@@ -1006,21 +1004,6 @@ class LLMNode(BaseNode): |
|
|
|
) |
|
|
|
return saved_file |
|
|
|
|
|
|
|
def _fetch_model_schema(self, provider: str) -> AIModelEntity | None: |
|
|
|
""" |
|
|
|
Fetch model schema |
|
|
|
""" |
|
|
|
model_name = self._node_data.model.name |
|
|
|
model_manager = ModelManager() |
|
|
|
model_instance = model_manager.get_model_instance( |
|
|
|
tenant_id=self.tenant_id, model_type=ModelType.LLM, provider=provider, model=model_name |
|
|
|
) |
|
|
|
model_type_instance = model_instance.model_type_instance |
|
|
|
model_type_instance = cast(LargeLanguageModel, model_type_instance) |
|
|
|
model_credentials = model_instance.credentials |
|
|
|
model_schema = model_type_instance.get_model_schema(model_name, model_credentials) |
|
|
|
return model_schema |
|
|
|
|
|
|
|
@staticmethod |
|
|
|
def fetch_structured_output_schema( |
|
|
|
*, |