| @@ -945,7 +945,7 @@ class DocumentRetryApi(DocumentResource): | |||
| raise DocumentAlreadyFinishedError() | |||
| retry_documents.append(document) | |||
| except Exception as e: | |||
| logging.error(f"Document {document_id} retry failed: {str(e)}") | |||
| logging.exception(f"Document {document_id} retry failed: {str(e)}") | |||
| continue | |||
| # retry document | |||
| DocumentService.retry_document(dataset_id, retry_documents) | |||
| @@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc | |||
| start_listener_time = time.time() | |||
| yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) | |||
| except Exception as e: | |||
| logger.error(e) | |||
| logger.exception(e) | |||
| break | |||
| if tts_publisher: | |||
| yield MessageAudioEndStreamResponse(audio="", task_id=task_id) | |||
| @@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa | |||
| else: | |||
| yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) | |||
| except Exception as e: | |||
| logger.error(e) | |||
| logger.exception(e) | |||
| break | |||
| if tts_publisher: | |||
| yield MessageAudioEndStreamResponse(audio="", task_id=task_id) | |||
| @@ -47,9 +47,9 @@ class AzureRerankModel(RerankModel): | |||
| result = response.read() | |||
| return json.loads(result) | |||
| except urllib.error.HTTPError as error: | |||
| logger.error(f"The request failed with status code: {error.code}") | |||
| logger.error(error.info()) | |||
| logger.error(error.read().decode("utf8", "ignore")) | |||
| logger.exception(f"The request failed with status code: {error.code}") | |||
| logger.exception(error.info()) | |||
| logger.exception(error.read().decode("utf8", "ignore")) | |||
| raise | |||
| def _invoke( | |||
| @@ -126,6 +126,6 @@ class OutputModeration(BaseModel): | |||
| result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) | |||
| return result | |||
| except Exception as e: | |||
| logger.error("Moderation Output error: %s", e) | |||
| logger.exception("Moderation Output error: %s", e) | |||
| return None | |||
| @@ -708,7 +708,7 @@ class TraceQueueManager: | |||
| trace_task.app_id = self.app_id | |||
| trace_manager_queue.put(trace_task) | |||
| except Exception as e: | |||
| logging.error(f"Error adding trace task: {e}") | |||
| logging.exception(f"Error adding trace task: {e}") | |||
| finally: | |||
| self.start_timer() | |||
| @@ -727,7 +727,7 @@ class TraceQueueManager: | |||
| if tasks: | |||
| self.send_to_celery(tasks) | |||
| except Exception as e: | |||
| logging.error(f"Error processing trace tasks: {e}") | |||
| logging.exception(f"Error processing trace tasks: {e}") | |||
| def start_timer(self): | |||
| global trace_manager_timer | |||
| @@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector): | |||
| try: | |||
| self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() | |||
| except Exception as e: | |||
| logger.error(e) | |||
| logger.exception(e) | |||
| def delete_by_document_id(self, document_id: str): | |||
| query = f""" | |||
| @@ -79,7 +79,7 @@ class LindormVectorStore(BaseVector): | |||
| existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False) | |||
| return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} | |||
| except Exception as e: | |||
| logger.error(f"Error fetching batch {batch_ids}: {e}") | |||
| logger.exception(f"Error fetching batch {batch_ids}: {e}") | |||
| return set() | |||
| @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) | |||
| @@ -96,7 +96,7 @@ class LindormVectorStore(BaseVector): | |||
| ) | |||
| return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} | |||
| except Exception as e: | |||
| logger.error(f"Error fetching batch {batch_ids}: {e}") | |||
| logger.exception(f"Error fetching batch {batch_ids}: {e}") | |||
| return set() | |||
| if ids is None: | |||
| @@ -177,7 +177,7 @@ class LindormVectorStore(BaseVector): | |||
| else: | |||
| logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") | |||
| except Exception as e: | |||
| logger.error(f"Error occurred while deleting the index: {e}") | |||
| logger.exception(f"Error occurred while deleting the index: {e}") | |||
| raise e | |||
| def text_exists(self, id: str) -> bool: | |||
| @@ -201,7 +201,7 @@ class LindormVectorStore(BaseVector): | |||
| try: | |||
| response = self._client.search(index=self._collection_name, body=query) | |||
| except Exception as e: | |||
| logger.error(f"Error executing search: {e}") | |||
| logger.exception(f"Error executing search: {e}") | |||
| raise | |||
| docs_and_scores = [] | |||
| @@ -86,7 +86,7 @@ class MilvusVector(BaseVector): | |||
| ids = self._client.insert(collection_name=self._collection_name, data=batch_insert_list) | |||
| pks.extend(ids) | |||
| except MilvusException as e: | |||
| logger.error("Failed to insert batch starting at entity: %s/%s", i, total_count) | |||
| logger.exception("Failed to insert batch starting at entity: %s/%s", i, total_count) | |||
| raise e | |||
| return pks | |||
| @@ -142,7 +142,7 @@ class MyScaleVector(BaseVector): | |||
| for r in self._client.query(sql).named_results() | |||
| ] | |||
| except Exception as e: | |||
| logging.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") | |||
| logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") | |||
| return [] | |||
| def delete(self) -> None: | |||
| @@ -129,7 +129,7 @@ class OpenSearchVector(BaseVector): | |||
| if status == 404: | |||
| logger.warning(f"Document not found for deletion: {doc_id}") | |||
| else: | |||
| logger.error(f"Error deleting document: {error}") | |||
| logger.exception(f"Error deleting document: {error}") | |||
| def delete(self) -> None: | |||
| self._client.indices.delete(index=self._collection_name.lower()) | |||
| @@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector): | |||
| try: | |||
| response = self._client.search(index=self._collection_name.lower(), body=query) | |||
| except Exception as e: | |||
| logger.error(f"Error executing search: {e}") | |||
| logger.exception(f"Error executing search: {e}") | |||
| raise | |||
| docs = [] | |||
| @@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings): | |||
| db.session.rollback() | |||
| except Exception as ex: | |||
| db.session.rollback() | |||
| logger.error("Failed to embed documents: %s", ex) | |||
| logger.exception("Failed to embed documents: %s", ex) | |||
| raise ex | |||
| return text_embeddings | |||
| @@ -230,7 +230,7 @@ class WordExtractor(BaseExtractor): | |||
| for i in url_pattern.findall(x.text): | |||
| hyperlinks_url = str(i) | |||
| except Exception as e: | |||
| logger.error(e) | |||
| logger.exception(e) | |||
| def parse_paragraph(paragraph): | |||
| paragraph_content = [] | |||
| @@ -98,7 +98,7 @@ class ToolFileManager: | |||
| response.raise_for_status() | |||
| blob = response.content | |||
| except Exception as e: | |||
| logger.error(f"Failed to download file from {file_url}: {e}") | |||
| logger.exception(f"Failed to download file from {file_url}: {e}") | |||
| raise | |||
| mimetype = guess_type(file_url)[0] or "octet/stream" | |||
| @@ -388,7 +388,7 @@ class ToolManager: | |||
| yield provider | |||
| except Exception as e: | |||
| logger.error(f"load builtin provider {provider} error: {e}") | |||
| logger.exception(f"load builtin provider {provider} error: {e}") | |||
| continue | |||
| # set builtin providers loaded | |||
| cls._builtin_providers_loaded = True | |||
| @@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]): | |||
| try: | |||
| result = self._run() | |||
| except Exception as e: | |||
| logger.error(f"Node {self.node_id} failed to run: {e}") | |||
| logger.exception(f"Node {self.node_id} failed to run: {e}") | |||
| result = NodeRunResult( | |||
| status=WorkflowNodeExecutionStatus.FAILED, | |||
| error=str(e), | |||
| @@ -127,7 +127,7 @@ class QuestionClassifierNode(LLMNode): | |||
| category_id = category_id_result | |||
| except OutputParserError: | |||
| logging.error(f"Failed to parse result text: {result_text}") | |||
| logging.exception(f"Failed to parse result text: {result_text}") | |||
| try: | |||
| process_data = { | |||
| "model_mode": model_config.mode, | |||
| @@ -39,13 +39,13 @@ class SMTPClient: | |||
| smtp.sendmail(self._from, mail["to"], msg.as_string()) | |||
| except smtplib.SMTPException as e: | |||
| logging.error(f"SMTP error occurred: {str(e)}") | |||
| logging.exception(f"SMTP error occurred: {str(e)}") | |||
| raise | |||
| except TimeoutError as e: | |||
| logging.error(f"Timeout occurred while sending email: {str(e)}") | |||
| logging.exception(f"Timeout occurred while sending email: {str(e)}") | |||
| raise | |||
| except Exception as e: | |||
| logging.error(f"Unexpected error occurred while sending email: {str(e)}") | |||
| logging.exception(f"Unexpected error occurred while sending email: {str(e)}") | |||
| raise | |||
| finally: | |||
| if smtp: | |||
| @@ -34,6 +34,7 @@ select = [ | |||
| "RUF101", # redirected-noqa | |||
| "S506", # unsafe-yaml-load | |||
| "SIM", # flake8-simplify rules | |||
| "TRY400", # error-instead-of-exception | |||
| "UP", # pyupgrade rules | |||
| "W191", # tab-indentation | |||
| "W605", # invalid-escape-sequence | |||
| @@ -821,7 +821,7 @@ class RegisterService: | |||
| db.session.rollback() | |||
| except Exception as e: | |||
| db.session.rollback() | |||
| logging.error(f"Register failed: {e}") | |||
| logging.exception(f"Register failed: {e}") | |||
| raise AccountRegisterError(f"Registration failed: {e}") from e | |||
| return account | |||
| @@ -193,7 +193,7 @@ class ApiToolManageService: | |||
| # try to parse schema, avoid SSRF attack | |||
| ApiToolManageService.parser_api_schema(schema) | |||
| except Exception as e: | |||
| logger.error(f"parse api schema error: {str(e)}") | |||
| logger.exception(f"parse api schema error: {str(e)}") | |||
| raise ValueError("invalid schema, please check the url you provided") | |||
| return {"schema": schema} | |||
| @@ -183,7 +183,7 @@ class ToolTransformService: | |||
| try: | |||
| username = db_provider.user.name | |||
| except Exception as e: | |||
| logger.error(f"failed to get user name for api provider {db_provider.id}: {str(e)}") | |||
| logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}") | |||
| # add provider into providers | |||
| credentials = db_provider.credentials | |||
| result = UserToolProvider( | |||