| raise DocumentAlreadyFinishedError() | raise DocumentAlreadyFinishedError() | ||||
| retry_documents.append(document) | retry_documents.append(document) | ||||
| except Exception as e: | except Exception as e: | ||||
| logging.error(f"Document {document_id} retry failed: {str(e)}") | |||||
| logging.exception(f"Document {document_id} retry failed: {str(e)}") | |||||
| continue | continue | ||||
| # retry document | # retry document | ||||
| DocumentService.retry_document(dataset_id, retry_documents) | DocumentService.retry_document(dataset_id, retry_documents) |
| start_listener_time = time.time() | start_listener_time = time.time() | ||||
| yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) | yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(e) | |||||
| logger.exception(e) | |||||
| break | break | ||||
| if tts_publisher: | if tts_publisher: | ||||
| yield MessageAudioEndStreamResponse(audio="", task_id=task_id) | yield MessageAudioEndStreamResponse(audio="", task_id=task_id) |
| else: | else: | ||||
| yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) | yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(e) | |||||
| logger.exception(e) | |||||
| break | break | ||||
| if tts_publisher: | if tts_publisher: | ||||
| yield MessageAudioEndStreamResponse(audio="", task_id=task_id) | yield MessageAudioEndStreamResponse(audio="", task_id=task_id) |
| result = response.read() | result = response.read() | ||||
| return json.loads(result) | return json.loads(result) | ||||
| except urllib.error.HTTPError as error: | except urllib.error.HTTPError as error: | ||||
| logger.error(f"The request failed with status code: {error.code}") | |||||
| logger.error(error.info()) | |||||
| logger.error(error.read().decode("utf8", "ignore")) | |||||
| logger.exception(f"The request failed with status code: {error.code}") | |||||
| logger.exception(error.info()) | |||||
| logger.exception(error.read().decode("utf8", "ignore")) | |||||
| raise | raise | ||||
| def _invoke( | def _invoke( |
| result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) | result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) | ||||
| return result | return result | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error("Moderation Output error: %s", e) | |||||
| logger.exception("Moderation Output error: %s", e) | |||||
| return None | return None |
| trace_task.app_id = self.app_id | trace_task.app_id = self.app_id | ||||
| trace_manager_queue.put(trace_task) | trace_manager_queue.put(trace_task) | ||||
| except Exception as e: | except Exception as e: | ||||
| logging.error(f"Error adding trace task: {e}") | |||||
| logging.exception(f"Error adding trace task: {e}") | |||||
| finally: | finally: | ||||
| self.start_timer() | self.start_timer() | ||||
| if tasks: | if tasks: | ||||
| self.send_to_celery(tasks) | self.send_to_celery(tasks) | ||||
| except Exception as e: | except Exception as e: | ||||
| logging.error(f"Error processing trace tasks: {e}") | |||||
| logging.exception(f"Error processing trace tasks: {e}") | |||||
| def start_timer(self): | def start_timer(self): | ||||
| global trace_manager_timer | global trace_manager_timer |
| try: | try: | ||||
| self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() | self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(e) | |||||
| logger.exception(e) | |||||
| def delete_by_document_id(self, document_id: str): | def delete_by_document_id(self, document_id: str): | ||||
| query = f""" | query = f""" |
| existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False) | existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False) | ||||
| return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} | return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Error fetching batch {batch_ids}: {e}") | |||||
| logger.exception(f"Error fetching batch {batch_ids}: {e}") | |||||
| return set() | return set() | ||||
| @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) | @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) | ||||
| ) | ) | ||||
| return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} | return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Error fetching batch {batch_ids}: {e}") | |||||
| logger.exception(f"Error fetching batch {batch_ids}: {e}") | |||||
| return set() | return set() | ||||
| if ids is None: | if ids is None: | ||||
| else: | else: | ||||
| logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") | logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Error occurred while deleting the index: {e}") | |||||
| logger.exception(f"Error occurred while deleting the index: {e}") | |||||
| raise e | raise e | ||||
| def text_exists(self, id: str) -> bool: | def text_exists(self, id: str) -> bool: | ||||
| try: | try: | ||||
| response = self._client.search(index=self._collection_name, body=query) | response = self._client.search(index=self._collection_name, body=query) | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Error executing search: {e}") | |||||
| logger.exception(f"Error executing search: {e}") | |||||
| raise | raise | ||||
| docs_and_scores = [] | docs_and_scores = [] |
| ids = self._client.insert(collection_name=self._collection_name, data=batch_insert_list) | ids = self._client.insert(collection_name=self._collection_name, data=batch_insert_list) | ||||
| pks.extend(ids) | pks.extend(ids) | ||||
| except MilvusException as e: | except MilvusException as e: | ||||
| logger.error("Failed to insert batch starting at entity: %s/%s", i, total_count) | |||||
| logger.exception("Failed to insert batch starting at entity: %s/%s", i, total_count) | |||||
| raise e | raise e | ||||
| return pks | return pks | ||||
| for r in self._client.query(sql).named_results() | for r in self._client.query(sql).named_results() | ||||
| ] | ] | ||||
| except Exception as e: | except Exception as e: | ||||
| logging.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") | |||||
| logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") | |||||
| return [] | return [] | ||||
| def delete(self) -> None: | def delete(self) -> None: |
| if status == 404: | if status == 404: | ||||
| logger.warning(f"Document not found for deletion: {doc_id}") | logger.warning(f"Document not found for deletion: {doc_id}") | ||||
| else: | else: | ||||
| logger.error(f"Error deleting document: {error}") | |||||
| logger.exception(f"Error deleting document: {error}") | |||||
| def delete(self) -> None: | def delete(self) -> None: | ||||
| self._client.indices.delete(index=self._collection_name.lower()) | self._client.indices.delete(index=self._collection_name.lower()) | ||||
| try: | try: | ||||
| response = self._client.search(index=self._collection_name.lower(), body=query) | response = self._client.search(index=self._collection_name.lower(), body=query) | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Error executing search: {e}") | |||||
| logger.exception(f"Error executing search: {e}") | |||||
| raise | raise | ||||
| docs = [] | docs = [] |
| db.session.rollback() | db.session.rollback() | ||||
| except Exception as ex: | except Exception as ex: | ||||
| db.session.rollback() | db.session.rollback() | ||||
| logger.error("Failed to embed documents: %s", ex) | |||||
| logger.exception("Failed to embed documents: %s", ex) | |||||
| raise ex | raise ex | ||||
| return text_embeddings | return text_embeddings |
| for i in url_pattern.findall(x.text): | for i in url_pattern.findall(x.text): | ||||
| hyperlinks_url = str(i) | hyperlinks_url = str(i) | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(e) | |||||
| logger.exception(e) | |||||
| def parse_paragraph(paragraph): | def parse_paragraph(paragraph): | ||||
| paragraph_content = [] | paragraph_content = [] |
| response.raise_for_status() | response.raise_for_status() | ||||
| blob = response.content | blob = response.content | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Failed to download file from {file_url}: {e}") | |||||
| logger.exception(f"Failed to download file from {file_url}: {e}") | |||||
| raise | raise | ||||
| mimetype = guess_type(file_url)[0] or "octet/stream" | mimetype = guess_type(file_url)[0] or "octet/stream" |
| yield provider | yield provider | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"load builtin provider {provider} error: {e}") | |||||
| logger.exception(f"load builtin provider {provider} error: {e}") | |||||
| continue | continue | ||||
| # set builtin providers loaded | # set builtin providers loaded | ||||
| cls._builtin_providers_loaded = True | cls._builtin_providers_loaded = True |
| try: | try: | ||||
| result = self._run() | result = self._run() | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"Node {self.node_id} failed to run: {e}") | |||||
| logger.exception(f"Node {self.node_id} failed to run: {e}") | |||||
| result = NodeRunResult( | result = NodeRunResult( | ||||
| status=WorkflowNodeExecutionStatus.FAILED, | status=WorkflowNodeExecutionStatus.FAILED, | ||||
| error=str(e), | error=str(e), |
| category_id = category_id_result | category_id = category_id_result | ||||
| except OutputParserError: | except OutputParserError: | ||||
| logging.error(f"Failed to parse result text: {result_text}") | |||||
| logging.exception(f"Failed to parse result text: {result_text}") | |||||
| try: | try: | ||||
| process_data = { | process_data = { | ||||
| "model_mode": model_config.mode, | "model_mode": model_config.mode, |
| smtp.sendmail(self._from, mail["to"], msg.as_string()) | smtp.sendmail(self._from, mail["to"], msg.as_string()) | ||||
| except smtplib.SMTPException as e: | except smtplib.SMTPException as e: | ||||
| logging.error(f"SMTP error occurred: {str(e)}") | |||||
| logging.exception(f"SMTP error occurred: {str(e)}") | |||||
| raise | raise | ||||
| except TimeoutError as e: | except TimeoutError as e: | ||||
| logging.error(f"Timeout occurred while sending email: {str(e)}") | |||||
| logging.exception(f"Timeout occurred while sending email: {str(e)}") | |||||
| raise | raise | ||||
| except Exception as e: | except Exception as e: | ||||
| logging.error(f"Unexpected error occurred while sending email: {str(e)}") | |||||
| logging.exception(f"Unexpected error occurred while sending email: {str(e)}") | |||||
| raise | raise | ||||
| finally: | finally: | ||||
| if smtp: | if smtp: |
| "RUF101", # redirected-noqa | "RUF101", # redirected-noqa | ||||
| "S506", # unsafe-yaml-load | "S506", # unsafe-yaml-load | ||||
| "SIM", # flake8-simplify rules | "SIM", # flake8-simplify rules | ||||
| "TRY400", # error-instead-of-exception | |||||
| "UP", # pyupgrade rules | "UP", # pyupgrade rules | ||||
| "W191", # tab-indentation | "W191", # tab-indentation | ||||
| "W605", # invalid-escape-sequence | "W605", # invalid-escape-sequence |
| db.session.rollback() | db.session.rollback() | ||||
| except Exception as e: | except Exception as e: | ||||
| db.session.rollback() | db.session.rollback() | ||||
| logging.error(f"Register failed: {e}") | |||||
| logging.exception(f"Register failed: {e}") | |||||
| raise AccountRegisterError(f"Registration failed: {e}") from e | raise AccountRegisterError(f"Registration failed: {e}") from e | ||||
| return account | return account |
| # try to parse schema, avoid SSRF attack | # try to parse schema, avoid SSRF attack | ||||
| ApiToolManageService.parser_api_schema(schema) | ApiToolManageService.parser_api_schema(schema) | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"parse api schema error: {str(e)}") | |||||
| logger.exception(f"parse api schema error: {str(e)}") | |||||
| raise ValueError("invalid schema, please check the url you provided") | raise ValueError("invalid schema, please check the url you provided") | ||||
| return {"schema": schema} | return {"schema": schema} |
| try: | try: | ||||
| username = db_provider.user.name | username = db_provider.user.name | ||||
| except Exception as e: | except Exception as e: | ||||
| logger.error(f"failed to get user name for api provider {db_provider.id}: {str(e)}") | |||||
| logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}") | |||||
| # add provider into providers | # add provider into providers | ||||
| credentials = db_provider.credentials | credentials = db_provider.credentials | ||||
| result = UserToolProvider( | result = UserToolProvider( |