mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-11 10:08:58 +08:00
chore(lint): Use logging.exception instead of logging.error (#10415)
This commit is contained in:
parent
dd5ffaf058
commit
574c4a264f
@ -945,7 +945,7 @@ class DocumentRetryApi(DocumentResource):
|
|||||||
raise DocumentAlreadyFinishedError()
|
raise DocumentAlreadyFinishedError()
|
||||||
retry_documents.append(document)
|
retry_documents.append(document)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Document {document_id} retry failed: {str(e)}")
|
logging.exception(f"Document {document_id} retry failed: {str(e)}")
|
||||||
continue
|
continue
|
||||||
# retry document
|
# retry document
|
||||||
DocumentService.retry_document(dataset_id, retry_documents)
|
DocumentService.retry_document(dataset_id, retry_documents)
|
||||||
|
@ -242,7 +242,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
|
|||||||
start_listener_time = time.time()
|
start_listener_time = time.time()
|
||||||
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
|
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.exception(e)
|
||||||
break
|
break
|
||||||
if tts_publisher:
|
if tts_publisher:
|
||||||
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
|
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
|
||||||
|
@ -216,7 +216,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
|
|||||||
else:
|
else:
|
||||||
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
|
yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.exception(e)
|
||||||
break
|
break
|
||||||
if tts_publisher:
|
if tts_publisher:
|
||||||
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
|
yield MessageAudioEndStreamResponse(audio="", task_id=task_id)
|
||||||
|
@ -47,9 +47,9 @@ class AzureRerankModel(RerankModel):
|
|||||||
result = response.read()
|
result = response.read()
|
||||||
return json.loads(result)
|
return json.loads(result)
|
||||||
except urllib.error.HTTPError as error:
|
except urllib.error.HTTPError as error:
|
||||||
logger.error(f"The request failed with status code: {error.code}")
|
logger.exception(f"The request failed with status code: {error.code}")
|
||||||
logger.error(error.info())
|
logger.exception(error.info())
|
||||||
logger.error(error.read().decode("utf8", "ignore"))
|
logger.exception(error.read().decode("utf8", "ignore"))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _invoke(
|
def _invoke(
|
||||||
|
@ -126,6 +126,6 @@ class OutputModeration(BaseModel):
|
|||||||
result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer)
|
result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer)
|
||||||
return result
|
return result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Moderation Output error: %s", e)
|
logger.exception("Moderation Output error: %s", e)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
@ -708,7 +708,7 @@ class TraceQueueManager:
|
|||||||
trace_task.app_id = self.app_id
|
trace_task.app_id = self.app_id
|
||||||
trace_manager_queue.put(trace_task)
|
trace_manager_queue.put(trace_task)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error adding trace task: {e}")
|
logging.exception(f"Error adding trace task: {e}")
|
||||||
finally:
|
finally:
|
||||||
self.start_timer()
|
self.start_timer()
|
||||||
|
|
||||||
@ -727,7 +727,7 @@ class TraceQueueManager:
|
|||||||
if tasks:
|
if tasks:
|
||||||
self.send_to_celery(tasks)
|
self.send_to_celery(tasks)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error processing trace tasks: {e}")
|
logging.exception(f"Error processing trace tasks: {e}")
|
||||||
|
|
||||||
def start_timer(self):
|
def start_timer(self):
|
||||||
global trace_manager_timer
|
global trace_manager_timer
|
||||||
|
@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector):
|
|||||||
try:
|
try:
|
||||||
self._cluster.query(query, named_parameters={"doc_ids": ids}).execute()
|
self._cluster.query(query, named_parameters={"doc_ids": ids}).execute()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.exception(e)
|
||||||
|
|
||||||
def delete_by_document_id(self, document_id: str):
|
def delete_by_document_id(self, document_id: str):
|
||||||
query = f"""
|
query = f"""
|
||||||
|
@ -79,7 +79,7 @@ class LindormVectorStore(BaseVector):
|
|||||||
existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False)
|
existing_docs = self._client.mget(index=self._collection_name, body={"ids": batch_ids}, _source=False)
|
||||||
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
|
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error fetching batch {batch_ids}: {e}")
|
logger.exception(f"Error fetching batch {batch_ids}: {e}")
|
||||||
return set()
|
return set()
|
||||||
|
|
||||||
@retry(stop=stop_after_attempt(3), wait=wait_fixed(60))
|
@retry(stop=stop_after_attempt(3), wait=wait_fixed(60))
|
||||||
@ -96,7 +96,7 @@ class LindormVectorStore(BaseVector):
|
|||||||
)
|
)
|
||||||
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
|
return {doc["_id"] for doc in existing_docs["docs"] if doc["found"]}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error fetching batch {batch_ids}: {e}")
|
logger.exception(f"Error fetching batch {batch_ids}: {e}")
|
||||||
return set()
|
return set()
|
||||||
|
|
||||||
if ids is None:
|
if ids is None:
|
||||||
@ -177,7 +177,7 @@ class LindormVectorStore(BaseVector):
|
|||||||
else:
|
else:
|
||||||
logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.")
|
logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error occurred while deleting the index: {e}")
|
logger.exception(f"Error occurred while deleting the index: {e}")
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
def text_exists(self, id: str) -> bool:
|
def text_exists(self, id: str) -> bool:
|
||||||
@ -201,7 +201,7 @@ class LindormVectorStore(BaseVector):
|
|||||||
try:
|
try:
|
||||||
response = self._client.search(index=self._collection_name, body=query)
|
response = self._client.search(index=self._collection_name, body=query)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error executing search: {e}")
|
logger.exception(f"Error executing search: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
docs_and_scores = []
|
docs_and_scores = []
|
||||||
|
@ -86,7 +86,7 @@ class MilvusVector(BaseVector):
|
|||||||
ids = self._client.insert(collection_name=self._collection_name, data=batch_insert_list)
|
ids = self._client.insert(collection_name=self._collection_name, data=batch_insert_list)
|
||||||
pks.extend(ids)
|
pks.extend(ids)
|
||||||
except MilvusException as e:
|
except MilvusException as e:
|
||||||
logger.error("Failed to insert batch starting at entity: %s/%s", i, total_count)
|
logger.exception("Failed to insert batch starting at entity: %s/%s", i, total_count)
|
||||||
raise e
|
raise e
|
||||||
return pks
|
return pks
|
||||||
|
|
||||||
|
@ -142,7 +142,7 @@ class MyScaleVector(BaseVector):
|
|||||||
for r in self._client.query(sql).named_results()
|
for r in self._client.query(sql).named_results()
|
||||||
]
|
]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m")
|
logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def delete(self) -> None:
|
def delete(self) -> None:
|
||||||
|
@ -129,7 +129,7 @@ class OpenSearchVector(BaseVector):
|
|||||||
if status == 404:
|
if status == 404:
|
||||||
logger.warning(f"Document not found for deletion: {doc_id}")
|
logger.warning(f"Document not found for deletion: {doc_id}")
|
||||||
else:
|
else:
|
||||||
logger.error(f"Error deleting document: {error}")
|
logger.exception(f"Error deleting document: {error}")
|
||||||
|
|
||||||
def delete(self) -> None:
|
def delete(self) -> None:
|
||||||
self._client.indices.delete(index=self._collection_name.lower())
|
self._client.indices.delete(index=self._collection_name.lower())
|
||||||
@ -158,7 +158,7 @@ class OpenSearchVector(BaseVector):
|
|||||||
try:
|
try:
|
||||||
response = self._client.search(index=self._collection_name.lower(), body=query)
|
response = self._client.search(index=self._collection_name.lower(), body=query)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error executing search: {e}")
|
logger.exception(f"Error executing search: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
docs = []
|
docs = []
|
||||||
|
@ -89,7 +89,7 @@ class CacheEmbedding(Embeddings):
|
|||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
logger.error("Failed to embed documents: %s", ex)
|
logger.exception("Failed to embed documents: %s", ex)
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
return text_embeddings
|
return text_embeddings
|
||||||
|
@ -230,7 +230,7 @@ class WordExtractor(BaseExtractor):
|
|||||||
for i in url_pattern.findall(x.text):
|
for i in url_pattern.findall(x.text):
|
||||||
hyperlinks_url = str(i)
|
hyperlinks_url = str(i)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.exception(e)
|
||||||
|
|
||||||
def parse_paragraph(paragraph):
|
def parse_paragraph(paragraph):
|
||||||
paragraph_content = []
|
paragraph_content = []
|
||||||
|
@ -98,7 +98,7 @@ class ToolFileManager:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
blob = response.content
|
blob = response.content
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to download file from {file_url}: {e}")
|
logger.exception(f"Failed to download file from {file_url}: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
mimetype = guess_type(file_url)[0] or "octet/stream"
|
mimetype = guess_type(file_url)[0] or "octet/stream"
|
||||||
|
@ -388,7 +388,7 @@ class ToolManager:
|
|||||||
yield provider
|
yield provider
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"load builtin provider {provider} error: {e}")
|
logger.exception(f"load builtin provider {provider} error: {e}")
|
||||||
continue
|
continue
|
||||||
# set builtin providers loaded
|
# set builtin providers loaded
|
||||||
cls._builtin_providers_loaded = True
|
cls._builtin_providers_loaded = True
|
||||||
|
@ -69,7 +69,7 @@ class BaseNode(Generic[GenericNodeData]):
|
|||||||
try:
|
try:
|
||||||
result = self._run()
|
result = self._run()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Node {self.node_id} failed to run: {e}")
|
logger.exception(f"Node {self.node_id} failed to run: {e}")
|
||||||
result = NodeRunResult(
|
result = NodeRunResult(
|
||||||
status=WorkflowNodeExecutionStatus.FAILED,
|
status=WorkflowNodeExecutionStatus.FAILED,
|
||||||
error=str(e),
|
error=str(e),
|
||||||
|
@ -127,7 +127,7 @@ class QuestionClassifierNode(LLMNode):
|
|||||||
category_id = category_id_result
|
category_id = category_id_result
|
||||||
|
|
||||||
except OutputParserError:
|
except OutputParserError:
|
||||||
logging.error(f"Failed to parse result text: {result_text}")
|
logging.exception(f"Failed to parse result text: {result_text}")
|
||||||
try:
|
try:
|
||||||
process_data = {
|
process_data = {
|
||||||
"model_mode": model_config.mode,
|
"model_mode": model_config.mode,
|
||||||
|
@ -39,13 +39,13 @@ class SMTPClient:
|
|||||||
|
|
||||||
smtp.sendmail(self._from, mail["to"], msg.as_string())
|
smtp.sendmail(self._from, mail["to"], msg.as_string())
|
||||||
except smtplib.SMTPException as e:
|
except smtplib.SMTPException as e:
|
||||||
logging.error(f"SMTP error occurred: {str(e)}")
|
logging.exception(f"SMTP error occurred: {str(e)}")
|
||||||
raise
|
raise
|
||||||
except TimeoutError as e:
|
except TimeoutError as e:
|
||||||
logging.error(f"Timeout occurred while sending email: {str(e)}")
|
logging.exception(f"Timeout occurred while sending email: {str(e)}")
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Unexpected error occurred while sending email: {str(e)}")
|
logging.exception(f"Unexpected error occurred while sending email: {str(e)}")
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
if smtp:
|
if smtp:
|
||||||
|
@ -34,6 +34,7 @@ select = [
|
|||||||
"RUF101", # redirected-noqa
|
"RUF101", # redirected-noqa
|
||||||
"S506", # unsafe-yaml-load
|
"S506", # unsafe-yaml-load
|
||||||
"SIM", # flake8-simplify rules
|
"SIM", # flake8-simplify rules
|
||||||
|
"TRY400", # error-instead-of-exception
|
||||||
"UP", # pyupgrade rules
|
"UP", # pyupgrade rules
|
||||||
"W191", # tab-indentation
|
"W191", # tab-indentation
|
||||||
"W605", # invalid-escape-sequence
|
"W605", # invalid-escape-sequence
|
||||||
|
@ -821,7 +821,7 @@ class RegisterService:
|
|||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
logging.error(f"Register failed: {e}")
|
logging.exception(f"Register failed: {e}")
|
||||||
raise AccountRegisterError(f"Registration failed: {e}") from e
|
raise AccountRegisterError(f"Registration failed: {e}") from e
|
||||||
|
|
||||||
return account
|
return account
|
||||||
|
@ -193,7 +193,7 @@ class ApiToolManageService:
|
|||||||
# try to parse schema, avoid SSRF attack
|
# try to parse schema, avoid SSRF attack
|
||||||
ApiToolManageService.parser_api_schema(schema)
|
ApiToolManageService.parser_api_schema(schema)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"parse api schema error: {str(e)}")
|
logger.exception(f"parse api schema error: {str(e)}")
|
||||||
raise ValueError("invalid schema, please check the url you provided")
|
raise ValueError("invalid schema, please check the url you provided")
|
||||||
|
|
||||||
return {"schema": schema}
|
return {"schema": schema}
|
||||||
|
@ -183,7 +183,7 @@ class ToolTransformService:
|
|||||||
try:
|
try:
|
||||||
username = db_provider.user.name
|
username = db_provider.user.name
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"failed to get user name for api provider {db_provider.id}: {str(e)}")
|
logger.exception(f"failed to get user name for api provider {db_provider.id}: {str(e)}")
|
||||||
# add provider into providers
|
# add provider into providers
|
||||||
credentials = db_provider.credentials
|
credentials = db_provider.credentials
|
||||||
result = UserToolProvider(
|
result = UserToolProvider(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user