diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 8e784dc70b..6d6886fcb8 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -945,7 +945,7 @@ class DocumentRetryApi(DocumentResource): raise DocumentAlreadyFinishedError() retry_documents.append(document) except Exception as e: - logging.error(f"Document {document_id} retry failed: {str(e)}") + logging.exception(f"Document {document_id} retry failed: {str(e)}") continue # retry document DocumentService.retry_document(dataset_id, retry_documents) diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 1fc7ffe2c7..1d4c0ea0fa 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc start_listener_time = time.time() yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception as e: - logger.error(e) + logger.exception(e) break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index d119d94a61..aaa4824fe8 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa else: yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception as e: - logger.error(e) + logger.exception(e) break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py index b6dfb20b66..84672520e0 100644 --- a/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/azure_ai_studio/rerank/rerank.py @@ -47,9 +47,9 @@ class AzureRerankModel(RerankModel): result = response.read() return json.loads(result) except urllib.error.HTTPError as error: - logger.error(f"The request failed with status code: {error.code}") - logger.error(error.info()) - logger.error(error.read().decode("utf8", "ignore")) + logger.exception(f"The request failed with status code: {error.code}") + logger.exception(error.info()) + logger.exception(error.read().decode("utf8", "ignore")) raise def _invoke( diff --git a/api/core/moderation/output_moderation.py b/api/core/moderation/output_moderation.py index d8d794be18..83f4d2d57d 100644 --- a/api/core/moderation/output_moderation.py +++ b/api/core/moderation/output_moderation.py @@ -126,6 +126,6 @@ class OutputModeration(BaseModel): result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) return result except Exception as e: - logger.error("Moderation Output error: %s", e) + logger.exception("Moderation Output error: %s", e) return None diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 764944f799..986749f056 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -708,7 +708,7 @@ class TraceQueueManager: trace_task.app_id = self.app_id trace_manager_queue.put(trace_task) except Exception as e: - logging.error(f"Error adding trace task: {e}") + logging.exception(f"Error adding trace task: {e}") finally: self.start_timer() @@ -727,7 +727,7 @@ class TraceQueueManager: if tasks: self.send_to_celery(tasks) except Exception as e: - logging.error(f"Error processing trace tasks: {e}") + logging.exception(f"Error processing trace tasks: {e}") def start_timer(self): global trace_manager_timer diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py index 3f88d2ca2b..98da5e3d5e 100644 --- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector): try: self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() except Exception as e: - logger.error(e) + logger.exception(e) def delete_by_document_id(self, document_id: str): query = f""" diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index abd8261a69..30d7f09ec2 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -79,7 +79,7 @@ class LindormVectorStore(BaseVector): existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False) return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} except Exception as e: - logger.error(f"Error fetching batch {batch_ids}: {e}") + logger.exception(f"Error fetching batch {batch_ids}: {e}") return set() @retry(stop=stop_after_attempt(3), wait=wait_fixed(60)) @@ -96,7 +96,7 @@ class LindormVectorStore(BaseVector): ) return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]} except Exception as e: - logger.error(f"Error fetching batch {batch_ids}: {e}") + logger.exception(f"Error fetching batch {batch_ids}: {e}") return set() if ids is None: @@ -177,7 +177,7 @@ class LindormVectorStore(BaseVector): else: logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") except Exception as e: - logger.error(f"Error occurred while deleting the index: {e}") + logger.exception(f"Error occurred while deleting the index: {e}") raise e def text_exists(self, id: str) -> bool: @@ -201,7 +201,7 @@ class LindormVectorStore(BaseVector): try: response = self._client.search(index=self._collection_name, body=query) except Exception as e: - logger.error(f"Error executing search: {e}") + logger.exception(f"Error executing search: {e}") raise docs_and_scores = [] diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 080a1ef567..5a263d6e78 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -86,7 +86,7 @@ class MilvusVector(BaseVector): ids = self._client.insert(collection_name=self._collection_name, data=batch_insert_list) pks.extend(ids) except MilvusException as e: - logger.error("Failed to insert batch starting at entity: %s/%s", i, total_count) + logger.exception("Failed to insert batch starting at entity: %s/%s", i, total_count) raise e return pks diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py index 1fca926a2d..2610b60a77 100644 --- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py +++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py @@ -142,7 +142,7 @@ class MyScaleVector(BaseVector): for r in self._client.query(sql).named_results() ] except Exception as e: - logging.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") + logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") return [] def delete(self) -> None: diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 0e0f107268..49eb00f140 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -129,7 +129,7 @@ class OpenSearchVector(BaseVector): if status == 404: logger.warning(f"Document not found for deletion: {doc_id}") else: - logger.error(f"Error deleting document: {error}") + logger.exception(f"Error deleting document: {error}") def delete(self) -> None: self._client.indices.delete(index=self._collection_name.lower()) @@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector): try: response = self._client.search(index=self._collection_name.lower(), body=query) except Exception as e: - logger.error(f"Error executing search: {e}") + logger.exception(f"Error executing search: {e}") raise docs = [] diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index b3e93ce760..3ac65b88bb 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings): db.session.rollback() except Exception as ex: db.session.rollback() - logger.error("Failed to embed documents: %s", ex) + logger.exception("Failed to embed documents: %s", ex) raise ex return text_embeddings diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index d4434ea28f..576bba0142 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -230,7 +230,7 @@ class WordExtractor(BaseExtractor): for i in url_pattern.findall(x.text): hyperlinks_url = str(i) except Exception as e: - logger.error(e) + logger.exception(e) def parse_paragraph(paragraph): paragraph_content = [] diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index 1a28df31bc..ff56e20e87 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -98,7 +98,7 @@ class ToolFileManager: response.raise_for_status() blob = response.content except Exception as e: - logger.error(f"Failed to download file from {file_url}: {e}") + logger.exception(f"Failed to download file from {file_url}: {e}") raise mimetype = guess_type(file_url)[0] or "octet/stream" diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 6abe0a9cba..bf2ad13620 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -388,7 +388,7 @@ class ToolManager: yield provider except Exception as e: - logger.error(f"load builtin provider {provider} error: {e}") + logger.exception(f"load builtin provider {provider} error: {e}") continue # set builtin providers loaded cls._builtin_providers_loaded = True diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index 053a339ba7..1433c8eaed 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]): try: result = self._run() except Exception as e: - logger.error(f"Node {self.node_id} failed to run: {e}") + logger.exception(f"Node {self.node_id} failed to run: {e}") result = NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=str(e), diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 0489020e5e..744dfd3d8d 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -127,7 +127,7 @@ class QuestionClassifierNode(LLMNode): category_id = category_id_result except OutputParserError: - logging.error(f"Failed to parse result text: {result_text}") + logging.exception(f"Failed to parse result text: {result_text}") try: process_data = { "model_mode": model_config.mode, diff --git a/api/libs/smtp.py b/api/libs/smtp.py index bd7de7dd68..d57d99f3b7 100644 --- a/api/libs/smtp.py +++ b/api/libs/smtp.py @@ -39,13 +39,13 @@ class SMTPClient: smtp.sendmail(self._from, mail["to"], msg.as_string()) except smtplib.SMTPException as e: - logging.error(f"SMTP error occurred: {str(e)}") + logging.exception(f"SMTP error occurred: {str(e)}") raise except TimeoutError as e: - logging.error(f"Timeout occurred while sending email: {str(e)}") + logging.exception(f"Timeout occurred while sending email: {str(e)}") raise except Exception as e: - logging.error(f"Unexpected error occurred while sending email: {str(e)}") + logging.exception(f"Unexpected error occurred while sending email: {str(e)}") raise finally: if smtp: diff --git a/api/pyproject.toml b/api/pyproject.toml index 4438cf61db..928dee975b 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -34,6 +34,7 @@ select = [ "RUF101", # redirected-noqa "S506", # unsafe-yaml-load "SIM", # flake8-simplify rules + "TRY400", # error-instead-of-exception "UP", # pyupgrade rules "W191", # tab-indentation "W605", # invalid-escape-sequence diff --git a/api/services/account_service.py b/api/services/account_service.py index dceca06185..963a055948 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -821,7 +821,7 @@ class RegisterService: db.session.rollback() except Exception as e: db.session.rollback() - logging.error(f"Register failed: {e}") + logging.exception(f"Register failed: {e}") raise AccountRegisterError(f"Registration failed: {e}") from e return account diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 257c6cf52b..4a93891855 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -193,7 +193,7 @@ class ApiToolManageService: # try to parse schema, avoid SSRF attack ApiToolManageService.parser_api_schema(schema) except Exception as e: - logger.error(f"parse api schema error: {str(e)}") + logger.exception(f"parse api schema error: {str(e)}") raise ValueError("invalid schema, please check the url you provided") return {"schema": schema} diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 4af73d5063..e535ddb575 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -183,7 +183,7 @@ class ToolTransformService: try: username = db_provider.user.name except Exception as e: - logger.error(f"failed to get user name for api provider {db_provider.id}: {str(e)}") + logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}") # add provider into providers credentials = db_provider.credentials result = UserToolProvider(