Quellcode durchsuchen

Enhance LLM model configuration validation to include active status c… (#25759)

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
tags/1.9.0
Xiyuan Chen vor 1 Monat
Ursprung
Commit
8635aacb46
Es ist kein Account mit der E-Mail-Adresse des Committers verbunden
1 geänderte Dateien mit 19 neuen und 1 gelöschten Zeilen
  1. 19
    1
      api/services/workflow_service.py

+ 19
- 1
api/services/workflow_service.py Datei anzeigen

@@ -375,13 +375,14 @@ class WorkflowService:

def _validate_llm_model_config(self, tenant_id: str, provider: str, model_name: str) -> None:
"""
Validate that an LLM model configuration can fetch valid credentials.
Validate that an LLM model configuration can fetch valid credentials and has active status.

This method attempts to get the model instance and validates that:
1. The provider exists and is configured
2. The model exists in the provider
3. Credentials can be fetched for the model
4. The credentials pass policy compliance checks
5. The model status is ACTIVE (not NO_CONFIGURE, DISABLED, etc.)

:param tenant_id: The tenant ID
:param provider: The provider name
@@ -391,6 +392,7 @@ class WorkflowService:
try:
from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType
from core.provider_manager import ProviderManager

# Get model instance to validate provider+model combination
model_manager = ModelManager()
@@ -402,6 +404,22 @@ class WorkflowService:
# via ProviderConfiguration.get_current_credentials() -> _check_credential_policy_compliance()
# If it fails, an exception will be raised

# Additionally, check the model status to ensure it's ACTIVE
provider_manager = ProviderManager()
provider_configurations = provider_manager.get_configurations(tenant_id)
models = provider_configurations.get_models(provider=provider, model_type=ModelType.LLM)

target_model = None
for model in models:
if model.model == model_name and model.provider.provider == provider:
target_model = model
break

if target_model:
target_model.raise_for_status()
else:
raise ValueError(f"Model {model_name} not found for provider {provider}")

except Exception as e:
raise ValueError(
f"Failed to validate LLM model configuration (provider: {provider}, model: {model_name}): {str(e)}"

Laden…
Abbrechen
Speichern