| @@ -7,6 +7,7 @@ def append_variables_recursively( | |||
| ): | |||
| """ | |||
| Append variables recursively | |||
| :param pool: variable pool to append variables to | |||
| :param node_id: node id | |||
| :param variable_key_list: variable key list | |||
| :param variable_value: variable value | |||
| @@ -300,7 +300,7 @@ class WorkflowEntry: | |||
| return node_instance, generator | |||
| except Exception as e: | |||
| logger.exception( | |||
| "error while running node_instance, workflow_id=%s, node_id=%s, type=%s, version=%s", | |||
| "error while running node_instance, node_id=%s, type=%s, version=%s", | |||
| node_instance.id, | |||
| node_instance.node_type, | |||
| node_instance.version(), | |||
| @@ -384,7 +384,7 @@ def get_file_type_by_mime_type(mime_type: str) -> FileType: | |||
| class StorageKeyLoader: | |||
| """FileKeyLoader load the storage key from database for a list of files. | |||
| This loader is batched, the | |||
| This loader is batched, the database query count is constant regardless of the input size. | |||
| """ | |||
| def __init__(self, session: Session, tenant_id: str) -> None: | |||
| @@ -445,10 +445,10 @@ class StorageKeyLoader: | |||
| if file.transfer_method in (FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL): | |||
| upload_file_row = upload_files.get(model_id) | |||
| if upload_file_row is None: | |||
| raise ValueError(...) | |||
| raise ValueError(f"Upload file not found for id: {model_id}") | |||
| file._storage_key = upload_file_row.key | |||
| elif file.transfer_method == FileTransferMethod.TOOL_FILE: | |||
| tool_file_row = tool_files.get(model_id) | |||
| if tool_file_row is None: | |||
| raise ValueError(...) | |||
| raise ValueError(f"Tool file not found for id: {model_id}") | |||
| file._storage_key = tool_file_row.file_key | |||
| @@ -586,6 +586,10 @@ class DatasetService: | |||
| ) | |||
| except ProviderTokenNotInitError: | |||
| # If we can't get the embedding model, preserve existing settings | |||
| logging.warning( | |||
| f"Failed to initialize embedding model {data['embedding_model_provider']}/{data['embedding_model']}, " | |||
| f"preserving existing settings" | |||
| ) | |||
| if dataset.embedding_model_provider and dataset.embedding_model: | |||
| filtered_data["embedding_model_provider"] = dataset.embedding_model_provider | |||
| filtered_data["embedding_model"] = dataset.embedding_model | |||