feat: Deprecate datetime.utcnow() in favor of datetime.now(timezone.utc).replace(tzinfo=None) for better timezone handling (#3408) (#3416)

This commit is contained in:
LIU HONGWEI 2024-04-12 16:22:24 +08:00 committed by GitHub
parent 4d54637921
commit c227f3d985
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 112 additions and 112 deletions

View File

@ -1,4 +1,4 @@
from datetime import datetime from datetime import datetime, timezone
import pytz import pytz
from flask_login import current_user from flask_login import current_user
@ -262,7 +262,7 @@ def _get_conversation(app_model, conversation_id):
raise NotFound("Conversation Not Exists.") raise NotFound("Conversation Not Exists.")
if not conversation.read_at: if not conversation.read_at:
conversation.read_at = datetime.utcnow() conversation.read_at = datetime.now(timezone.utc).replace(tzinfo=None)
conversation.read_account_id = current_user.id conversation.read_account_id = current_user.id
db.session.commit() db.session.commit()

View File

@ -1,6 +1,6 @@
import base64 import base64
import datetime
import secrets import secrets
from datetime import datetime
from flask_restful import Resource, reqparse from flask_restful import Resource, reqparse
@ -66,7 +66,7 @@ class ActivateApi(Resource):
account.timezone = args['timezone'] account.timezone = args['timezone']
account.interface_theme = 'light' account.interface_theme = 'light'
account.status = AccountStatus.ACTIVE.value account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.utcnow() account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return {'result': 'success'} return {'result': 'success'}

View File

@ -1,5 +1,5 @@
import logging import logging
from datetime import datetime from datetime import datetime, timezone
from typing import Optional from typing import Optional
import requests import requests
@ -73,7 +73,7 @@ class OAuthCallback(Resource):
if account.status == AccountStatus.PENDING.value: if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.utcnow() account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
TenantService.create_owner_tenant_if_not_exist(account) TenantService.create_owner_tenant_if_not_exist(account)

View File

@ -80,7 +80,7 @@ class DataSourceApi(Resource):
if action == 'enable': if action == 'enable':
if data_source_binding.disabled: if data_source_binding.disabled:
data_source_binding.disabled = False data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.utcnow() data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(data_source_binding) db.session.add(data_source_binding)
db.session.commit() db.session.commit()
else: else:
@ -89,7 +89,7 @@ class DataSourceApi(Resource):
if action == 'disable': if action == 'disable':
if not data_source_binding.disabled: if not data_source_binding.disabled:
data_source_binding.disabled = True data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.utcnow() data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(data_source_binding) db.session.add(data_source_binding)
db.session.commit() db.session.commit()
else: else:

View File

@ -1,4 +1,4 @@
from datetime import datetime from datetime import datetime, timezone
from flask import request from flask import request
from flask_login import current_user from flask_login import current_user
@ -637,7 +637,7 @@ class DocumentProcessingApi(DocumentResource):
raise InvalidActionError('Document not in indexing state.') raise InvalidActionError('Document not in indexing state.')
document.paused_by = current_user.id document.paused_by = current_user.id
document.paused_at = datetime.utcnow() document.paused_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.is_paused = True document.is_paused = True
db.session.commit() db.session.commit()
@ -717,7 +717,7 @@ class DocumentMetadataApi(DocumentResource):
document.doc_metadata[key] = value document.doc_metadata[key] = value
document.doc_type = doc_type document.doc_type = doc_type
document.updated_at = datetime.utcnow() document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return {'result': 'success', 'message': 'Document metadata updated.'}, 200 return {'result': 'success', 'message': 'Document metadata updated.'}, 200
@ -755,7 +755,7 @@ class DocumentStatusApi(DocumentResource):
document.enabled = True document.enabled = True
document.disabled_at = None document.disabled_at = None
document.disabled_by = None document.disabled_by = None
document.updated_at = datetime.utcnow() document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
# Set cache to prevent indexing the same document multiple times # Set cache to prevent indexing the same document multiple times
@ -772,9 +772,9 @@ class DocumentStatusApi(DocumentResource):
raise InvalidActionError('Document already disabled.') raise InvalidActionError('Document already disabled.')
document.enabled = False document.enabled = False
document.disabled_at = datetime.utcnow() document.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.disabled_by = current_user.id document.disabled_by = current_user.id
document.updated_at = datetime.utcnow() document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
# Set cache to prevent indexing the same document multiple times # Set cache to prevent indexing the same document multiple times
@ -789,9 +789,9 @@ class DocumentStatusApi(DocumentResource):
raise InvalidActionError('Document already archived.') raise InvalidActionError('Document already archived.')
document.archived = True document.archived = True
document.archived_at = datetime.utcnow() document.archived_at = datetime.now(timezone.utc).replace(tzinfo=None)
document.archived_by = current_user.id document.archived_by = current_user.id
document.updated_at = datetime.utcnow() document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
if document.enabled: if document.enabled:
@ -808,7 +808,7 @@ class DocumentStatusApi(DocumentResource):
document.archived = False document.archived = False
document.archived_at = None document.archived_at = None
document.archived_by = None document.archived_by = None
document.updated_at = datetime.utcnow() document.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
# Set cache to prevent indexing the same document multiple times # Set cache to prevent indexing the same document multiple times

View File

@ -1,5 +1,5 @@
import uuid import uuid
from datetime import datetime from datetime import datetime, timezone
import pandas as pd import pandas as pd
from flask import request from flask import request
@ -192,7 +192,7 @@ class DatasetDocumentSegmentApi(Resource):
raise InvalidActionError("Segment is already disabled.") raise InvalidActionError("Segment is already disabled.")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.utcnow() segment.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None)
segment.disabled_by = current_user.id segment.disabled_by = current_user.id
db.session.commit() db.session.commit()

View File

@ -1,5 +1,5 @@
import logging import logging
from datetime import datetime from datetime import datetime, timezone
from flask_login import current_user from flask_login import current_user
from flask_restful import reqparse from flask_restful import reqparse
@ -47,7 +47,7 @@ class CompletionApi(InstalledAppResource):
streaming = args['response_mode'] == 'streaming' streaming = args['response_mode'] == 'streaming'
args['auto_generate_name'] = False args['auto_generate_name'] = False
installed_app.last_used_at = datetime.utcnow() installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
try: try:
@ -110,7 +110,7 @@ class ChatApi(InstalledAppResource):
args['auto_generate_name'] = False args['auto_generate_name'] = False
installed_app.last_used_at = datetime.utcnow() installed_app.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
try: try:

View File

@ -1,4 +1,4 @@
from datetime import datetime from datetime import datetime, timezone
from flask_login import current_user from flask_login import current_user
from flask_restful import Resource, inputs, marshal_with, reqparse from flask_restful import Resource, inputs, marshal_with, reqparse
@ -81,7 +81,7 @@ class InstalledAppsListApi(Resource):
tenant_id=current_tenant_id, tenant_id=current_tenant_id,
app_owner_tenant_id=app.tenant_id, app_owner_tenant_id=app.tenant_id,
is_pinned=False, is_pinned=False,
last_used_at=datetime.utcnow() last_used_at=datetime.now(timezone.utc).replace(tzinfo=None)
) )
db.session.add(new_installed_app) db.session.add(new_installed_app)
db.session.commit() db.session.commit()

View File

@ -1,4 +1,4 @@
from datetime import datetime import datetime
import pytz import pytz
from flask import current_app, request from flask import current_app, request
@ -59,7 +59,7 @@ class AccountInitApi(Resource):
raise InvalidInvitationCodeError() raise InvalidInvitationCodeError()
invitation_code.status = 'used' invitation_code.status = 'used'
invitation_code.used_at = datetime.utcnow() invitation_code.used_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
invitation_code.used_by_tenant_id = account.current_tenant_id invitation_code.used_by_tenant_id = account.current_tenant_id
invitation_code.used_by_account_id = account.id invitation_code.used_by_account_id = account.id
@ -67,7 +67,7 @@ class AccountInitApi(Resource):
account.timezone = args['timezone'] account.timezone = args['timezone']
account.interface_theme = 'light' account.interface_theme = 'light'
account.status = 'active' account.status = 'active'
account.initialized_at = datetime.utcnow() account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return {'result': 'success'} return {'result': 'success'}

View File

@ -1,5 +1,5 @@
from collections.abc import Callable from collections.abc import Callable
from datetime import datetime from datetime import datetime, timezone
from enum import Enum from enum import Enum
from functools import wraps from functools import wraps
from typing import Optional from typing import Optional
@ -183,7 +183,7 @@ def validate_and_get_api_token(scope=None):
if not api_token: if not api_token:
raise Unauthorized("Access token is invalid") raise Unauthorized("Access token is invalid")
api_token.last_used_at = datetime.utcnow() api_token.last_used_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return api_token return api_token

View File

@ -1,7 +1,7 @@
import json import json
import logging import logging
import uuid import uuid
from datetime import datetime from datetime import datetime, timezone
from typing import Optional, Union, cast from typing import Optional, Union, cast
from core.agent.entities import AgentEntity, AgentToolEntity from core.agent.entities import AgentEntity, AgentToolEntity
@ -440,7 +440,7 @@ class BaseAgentRunner(AppRunner):
ToolConversationVariables.conversation_id == self.message.conversation_id, ToolConversationVariables.conversation_id == self.message.conversation_id,
).first() ).first()
db_variables.updated_at = datetime.utcnow() db_variables.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool)) db_variables.variables_str = json.dumps(jsonable_encoder(tool_variables.pool))
db.session.commit() db.session.commit()
db.session.close() db.session.close()

View File

@ -1,6 +1,6 @@
import json import json
import time import time
from datetime import datetime from datetime import datetime, timezone
from typing import Any, Optional, Union, cast from typing import Any, Optional, Union, cast
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
@ -120,7 +120,7 @@ class WorkflowCycleManage:
workflow_run.elapsed_time = time.perf_counter() - start_at workflow_run.elapsed_time = time.perf_counter() - start_at
workflow_run.total_tokens = total_tokens workflow_run.total_tokens = total_tokens
workflow_run.total_steps = total_steps workflow_run.total_steps = total_steps
workflow_run.finished_at = datetime.utcnow() workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
db.session.refresh(workflow_run) db.session.refresh(workflow_run)
@ -149,7 +149,7 @@ class WorkflowCycleManage:
workflow_run.elapsed_time = time.perf_counter() - start_at workflow_run.elapsed_time = time.perf_counter() - start_at
workflow_run.total_tokens = total_tokens workflow_run.total_tokens = total_tokens
workflow_run.total_steps = total_steps workflow_run.total_steps = total_steps
workflow_run.finished_at = datetime.utcnow() workflow_run.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
db.session.refresh(workflow_run) db.session.refresh(workflow_run)
@ -223,7 +223,7 @@ class WorkflowCycleManage:
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None
workflow_node_execution.execution_metadata = json.dumps(jsonable_encoder(execution_metadata)) \ workflow_node_execution.execution_metadata = json.dumps(jsonable_encoder(execution_metadata)) \
if execution_metadata else None if execution_metadata else None
workflow_node_execution.finished_at = datetime.utcnow() workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
db.session.refresh(workflow_node_execution) db.session.refresh(workflow_node_execution)
@ -251,7 +251,7 @@ class WorkflowCycleManage:
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
workflow_node_execution.error = error workflow_node_execution.error = error
workflow_node_execution.elapsed_time = time.perf_counter() - start_at workflow_node_execution.elapsed_time = time.perf_counter() - start_at
workflow_node_execution.finished_at = datetime.utcnow() workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.inputs = json.dumps(inputs) if inputs else None workflow_node_execution.inputs = json.dumps(inputs) if inputs else None
workflow_node_execution.process_data = json.dumps(process_data) if process_data else None workflow_node_execution.process_data = json.dumps(process_data) if process_data else None
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None

View File

@ -203,7 +203,7 @@ class ProviderConfiguration(BaseModel):
if provider_record: if provider_record:
provider_record.encrypted_config = json.dumps(credentials) provider_record.encrypted_config = json.dumps(credentials)
provider_record.is_valid = True provider_record.is_valid = True
provider_record.updated_at = datetime.datetime.utcnow() provider_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
provider_record = Provider( provider_record = Provider(
@ -351,7 +351,7 @@ class ProviderConfiguration(BaseModel):
if provider_model_record: if provider_model_record:
provider_model_record.encrypted_config = json.dumps(credentials) provider_model_record.encrypted_config = json.dumps(credentials)
provider_model_record.is_valid = True provider_model_record.is_valid = True
provider_model_record.updated_at = datetime.datetime.utcnow() provider_model_record.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
else: else:
provider_model_record = ProviderModel( provider_model_record = ProviderModel(

View File

@ -81,7 +81,7 @@ class IndexingRunner:
except ProviderTokenNotInitError as e: except ProviderTokenNotInitError as e:
dataset_document.indexing_status = 'error' dataset_document.indexing_status = 'error'
dataset_document.error = str(e.description) dataset_document.error = str(e.description)
dataset_document.stopped_at = datetime.datetime.utcnow() dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
except ObjectDeletedError: except ObjectDeletedError:
logging.warning('Document deleted, document id: {}'.format(dataset_document.id)) logging.warning('Document deleted, document id: {}'.format(dataset_document.id))
@ -89,7 +89,7 @@ class IndexingRunner:
logging.exception("consume document failed") logging.exception("consume document failed")
dataset_document.indexing_status = 'error' dataset_document.indexing_status = 'error'
dataset_document.error = str(e) dataset_document.error = str(e)
dataset_document.stopped_at = datetime.datetime.utcnow() dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
def run_in_splitting_status(self, dataset_document: DatasetDocument): def run_in_splitting_status(self, dataset_document: DatasetDocument):
@ -140,13 +140,13 @@ class IndexingRunner:
except ProviderTokenNotInitError as e: except ProviderTokenNotInitError as e:
dataset_document.indexing_status = 'error' dataset_document.indexing_status = 'error'
dataset_document.error = str(e.description) dataset_document.error = str(e.description)
dataset_document.stopped_at = datetime.datetime.utcnow() dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
except Exception as e: except Exception as e:
logging.exception("consume document failed") logging.exception("consume document failed")
dataset_document.indexing_status = 'error' dataset_document.indexing_status = 'error'
dataset_document.error = str(e) dataset_document.error = str(e)
dataset_document.stopped_at = datetime.datetime.utcnow() dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
def run_in_indexing_status(self, dataset_document: DatasetDocument): def run_in_indexing_status(self, dataset_document: DatasetDocument):
@ -202,13 +202,13 @@ class IndexingRunner:
except ProviderTokenNotInitError as e: except ProviderTokenNotInitError as e:
dataset_document.indexing_status = 'error' dataset_document.indexing_status = 'error'
dataset_document.error = str(e.description) dataset_document.error = str(e.description)
dataset_document.stopped_at = datetime.datetime.utcnow() dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
except Exception as e: except Exception as e:
logging.exception("consume document failed") logging.exception("consume document failed")
dataset_document.indexing_status = 'error' dataset_document.indexing_status = 'error'
dataset_document.error = str(e) dataset_document.error = str(e)
dataset_document.stopped_at = datetime.datetime.utcnow() dataset_document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
def indexing_estimate(self, tenant_id: str, extract_settings: list[ExtractSetting], tmp_processing_rule: dict, def indexing_estimate(self, tenant_id: str, extract_settings: list[ExtractSetting], tmp_processing_rule: dict,
@ -382,7 +382,7 @@ class IndexingRunner:
after_indexing_status="splitting", after_indexing_status="splitting",
extra_update_params={ extra_update_params={
DatasetDocument.word_count: sum([len(text_doc.page_content) for text_doc in text_docs]), DatasetDocument.word_count: sum([len(text_doc.page_content) for text_doc in text_docs]),
DatasetDocument.parsing_completed_at: datetime.datetime.utcnow() DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
} }
) )
@ -467,7 +467,7 @@ class IndexingRunner:
doc_store.add_documents(documents) doc_store.add_documents(documents)
# update document status to indexing # update document status to indexing
cur_time = datetime.datetime.utcnow() cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
self._update_document_index_status( self._update_document_index_status(
document_id=dataset_document.id, document_id=dataset_document.id,
after_indexing_status="indexing", after_indexing_status="indexing",
@ -482,7 +482,7 @@ class IndexingRunner:
dataset_document_id=dataset_document.id, dataset_document_id=dataset_document.id,
update_params={ update_params={
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.utcnow() DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
} }
) )
@ -685,7 +685,7 @@ class IndexingRunner:
after_indexing_status="completed", after_indexing_status="completed",
extra_update_params={ extra_update_params={
DatasetDocument.tokens: tokens, DatasetDocument.tokens: tokens,
DatasetDocument.completed_at: datetime.datetime.utcnow(), DatasetDocument.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at, DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at,
} }
) )
@ -706,7 +706,7 @@ class IndexingRunner:
).update({ ).update({
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.enabled: True, DocumentSegment.enabled: True,
DocumentSegment.completed_at: datetime.datetime.utcnow() DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
}) })
db.session.commit() db.session.commit()
@ -739,7 +739,7 @@ class IndexingRunner:
).update({ ).update({
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.enabled: True, DocumentSegment.enabled: True,
DocumentSegment.completed_at: datetime.datetime.utcnow() DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
}) })
db.session.commit() db.session.commit()
@ -838,7 +838,7 @@ class IndexingRunner:
doc_store.add_documents(documents) doc_store.add_documents(documents)
# update document status to indexing # update document status to indexing
cur_time = datetime.datetime.utcnow() cur_time = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
self._update_document_index_status( self._update_document_index_status(
document_id=dataset_document.id, document_id=dataset_document.id,
after_indexing_status="indexing", after_indexing_status="indexing",
@ -853,7 +853,7 @@ class IndexingRunner:
dataset_document_id=dataset_document.id, dataset_document_id=dataset_document.id,
update_params={ update_params={
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.utcnow() DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
} }
) )
pass pass

View File

@ -29,7 +29,7 @@ def handle(sender, **kwargs):
raise NotFound('Document not found') raise NotFound('Document not found')
document.indexing_status = 'parsing' document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow() document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
documents.append(document) documents.append(document)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()

View File

@ -1,4 +1,4 @@
from datetime import datetime from datetime import datetime, timezone
from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity
from events.message_event import message_was_created from events.message_event import message_was_created
@ -17,5 +17,5 @@ def handle(sender, **kwargs):
db.session.query(Provider).filter( db.session.query(Provider).filter(
Provider.tenant_id == application_generate_entity.app_config.tenant_id, Provider.tenant_id == application_generate_entity.app_config.tenant_id,
Provider.provider_name == application_generate_entity.model_config.provider Provider.provider_name == application_generate_entity.model_config.provider
).update({'last_used': datetime.utcnow()}) ).update({'last_used': datetime.now(timezone.utc).replace(tzinfo=None)})
db.session.commit() db.session.commit()

View File

@ -2,7 +2,7 @@ import os
import shutil import shutil
from collections.abc import Generator from collections.abc import Generator
from contextlib import closing from contextlib import closing
from datetime import datetime, timedelta from datetime import datetime, timedelta, timezone
from typing import Union from typing import Union
import boto3 import boto3
@ -38,7 +38,7 @@ class Storage:
account_key=app.config.get('AZURE_BLOB_ACCOUNT_KEY'), account_key=app.config.get('AZURE_BLOB_ACCOUNT_KEY'),
resource_types=ResourceTypes(service=True, container=True, object=True), resource_types=ResourceTypes(service=True, container=True, object=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True), permission=AccountSasPermissions(read=True, write=True, delete=True, list=True, add=True, create=True),
expiry=datetime.utcnow() + timedelta(hours=1) expiry=datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(hours=1)
) )
self.client = BlobServiceClient(account_url=app.config.get('AZURE_BLOB_ACCOUNT_URL'), self.client = BlobServiceClient(account_url=app.config.get('AZURE_BLOB_ACCOUNT_URL'),
credential=sas_token) credential=sas_token)

View File

@ -1,4 +1,4 @@
from datetime import datetime from datetime import datetime, timezone
from celery import states from celery import states
@ -15,8 +15,8 @@ class CeleryTask(db.Model):
task_id = db.Column(db.String(155), unique=True) task_id = db.Column(db.String(155), unique=True)
status = db.Column(db.String(50), default=states.PENDING) status = db.Column(db.String(50), default=states.PENDING)
result = db.Column(db.PickleType, nullable=True) result = db.Column(db.PickleType, nullable=True)
date_done = db.Column(db.DateTime, default=datetime.utcnow, date_done = db.Column(db.DateTime, default=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
onupdate=datetime.utcnow, nullable=True) onupdate=lambda: datetime.now(timezone.utc).replace(tzinfo=None), nullable=True)
traceback = db.Column(db.Text, nullable=True) traceback = db.Column(db.Text, nullable=True)
name = db.Column(db.String(155), nullable=True) name = db.Column(db.String(155), nullable=True)
args = db.Column(db.LargeBinary, nullable=True) args = db.Column(db.LargeBinary, nullable=True)
@ -35,5 +35,5 @@ class CeleryTaskSet(db.Model):
autoincrement=True, primary_key=True) autoincrement=True, primary_key=True)
taskset_id = db.Column(db.String(155), unique=True) taskset_id = db.Column(db.String(155), unique=True)
result = db.Column(db.PickleType, nullable=True) result = db.Column(db.PickleType, nullable=True)
date_done = db.Column(db.DateTime, default=datetime.utcnow, date_done = db.Column(db.DateTime, default=lambda: datetime.now(timezone.utc).replace(tzinfo=None),
nullable=True) nullable=True)

View File

@ -2,7 +2,7 @@ import base64
import logging import logging
import secrets import secrets
import uuid import uuid
from datetime import datetime, timedelta from datetime import datetime, timedelta, timezone
from hashlib import sha256 from hashlib import sha256
from typing import Any, Optional from typing import Any, Optional
@ -59,8 +59,8 @@ class AccountService:
available_ta.current = True available_ta.current = True
db.session.commit() db.session.commit()
if datetime.utcnow() - account.last_active_at > timedelta(minutes=10): if datetime.now(timezone.utc).replace(tzinfo=None) - account.last_active_at > timedelta(minutes=10):
account.last_active_at = datetime.utcnow() account.last_active_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return account return account
@ -70,7 +70,7 @@ class AccountService:
def get_account_jwt_token(account): def get_account_jwt_token(account):
payload = { payload = {
"user_id": account.id, "user_id": account.id,
"exp": datetime.utcnow() + timedelta(days=30), "exp": datetime.now(timezone.utc).replace(tzinfo=None) + timedelta(days=30),
"iss": current_app.config['EDITION'], "iss": current_app.config['EDITION'],
"sub": 'Console API Passport', "sub": 'Console API Passport',
} }
@ -91,7 +91,7 @@ class AccountService:
if account.status == AccountStatus.PENDING.value: if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.utcnow() account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
if account.password is None or not compare_password(password, account.password, account.password_salt): if account.password is None or not compare_password(password, account.password, account.password_salt):
@ -163,7 +163,7 @@ class AccountService:
# If it exists, update the record # If it exists, update the record
account_integrate.open_id = open_id account_integrate.open_id = open_id
account_integrate.encrypted_token = "" # todo account_integrate.encrypted_token = "" # todo
account_integrate.updated_at = datetime.utcnow() account_integrate.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
else: else:
# If it does not exist, create a new record # If it does not exist, create a new record
account_integrate = AccountIntegrate(account_id=account.id, provider=provider, open_id=open_id, account_integrate = AccountIntegrate(account_id=account.id, provider=provider, open_id=open_id,
@ -197,7 +197,7 @@ class AccountService:
@staticmethod @staticmethod
def update_last_login(account: Account, request) -> None: def update_last_login(account: Account, request) -> None:
"""Update last login time and ip""" """Update last login time and ip"""
account.last_login_at = datetime.utcnow() account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None)
account.last_login_ip = get_remote_ip(request) account.last_login_ip = get_remote_ip(request)
db.session.add(account) db.session.add(account)
db.session.commit() db.session.commit()
@ -431,7 +431,7 @@ class RegisterService:
password=password password=password
) )
account.status = AccountStatus.ACTIVE.value if not status else status.value account.status = AccountStatus.ACTIVE.value if not status else status.value
account.initialized_at = datetime.utcnow() account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
if open_id is not None or provider is not None: if open_id is not None or provider is not None:
AccountService.link_account_integrate(provider, open_id, account) AccountService.link_account_integrate(provider, open_id, account)

View File

@ -415,7 +415,7 @@ class AppAnnotationService:
raise NotFound("App annotation not found") raise NotFound("App annotation not found")
annotation_setting.score_threshold = args['score_threshold'] annotation_setting.score_threshold = args['score_threshold']
annotation_setting.updated_user_id = current_user.id annotation_setting.updated_user_id = current_user.id
annotation_setting.updated_at = datetime.datetime.utcnow() annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(annotation_setting) db.session.add(annotation_setting)
db.session.commit() db.session.commit()

View File

@ -1,6 +1,6 @@
import json import json
import logging import logging
from datetime import datetime from datetime import datetime, timezone
from typing import cast from typing import cast
import yaml import yaml
@ -251,7 +251,7 @@ class AppService:
app.description = args.get('description', '') app.description = args.get('description', '')
app.icon = args.get('icon') app.icon = args.get('icon')
app.icon_background = args.get('icon_background') app.icon_background = args.get('icon_background')
app.updated_at = datetime.utcnow() app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return app return app
@ -264,7 +264,7 @@ class AppService:
:return: App instance :return: App instance
""" """
app.name = name app.name = name
app.updated_at = datetime.utcnow() app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return app return app
@ -279,7 +279,7 @@ class AppService:
""" """
app.icon = icon app.icon = icon
app.icon_background = icon_background app.icon_background = icon_background
app.updated_at = datetime.utcnow() app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return app return app
@ -295,7 +295,7 @@ class AppService:
return app return app
app.enable_site = enable_site app.enable_site = enable_site
app.updated_at = datetime.utcnow() app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return app return app
@ -311,7 +311,7 @@ class AppService:
return app return app
app.enable_api = enable_api app.enable_api = enable_api
app.updated_at = datetime.utcnow() app.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
return app return app

View File

@ -415,7 +415,7 @@ class DocumentService:
# update document to be paused # update document to be paused
document.is_paused = True document.is_paused = True
document.paused_by = current_user.id document.paused_by = current_user.id
document.paused_at = datetime.datetime.utcnow() document.paused_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()
@ -739,7 +739,7 @@ class DocumentService:
document.parsing_completed_at = None document.parsing_completed_at = None
document.cleaning_completed_at = None document.cleaning_completed_at = None
document.splitting_completed_at = None document.splitting_completed_at = None
document.updated_at = datetime.datetime.utcnow() document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
document.created_from = created_from document.created_from = created_from
document.doc_form = document_data['doc_form'] document.doc_form = document_data['doc_form']
db.session.add(document) db.session.add(document)
@ -1062,8 +1062,8 @@ class SegmentService:
word_count=len(content), word_count=len(content),
tokens=tokens, tokens=tokens,
status='completed', status='completed',
indexing_at=datetime.datetime.utcnow(), indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.utcnow(), completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_by=current_user.id created_by=current_user.id
) )
if document.doc_form == 'qa_model': if document.doc_form == 'qa_model':
@ -1078,7 +1078,7 @@ class SegmentService:
except Exception as e: except Exception as e:
logging.exception("create segment index failed") logging.exception("create segment index failed")
segment_document.enabled = False segment_document.enabled = False
segment_document.disabled_at = datetime.datetime.utcnow() segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment_document.status = 'error' segment_document.status = 'error'
segment_document.error = str(e) segment_document.error = str(e)
db.session.commit() db.session.commit()
@ -1128,8 +1128,8 @@ class SegmentService:
word_count=len(content), word_count=len(content),
tokens=tokens, tokens=tokens,
status='completed', status='completed',
indexing_at=datetime.datetime.utcnow(), indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
completed_at=datetime.datetime.utcnow(), completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
created_by=current_user.id created_by=current_user.id
) )
if document.doc_form == 'qa_model': if document.doc_form == 'qa_model':
@ -1147,7 +1147,7 @@ class SegmentService:
logging.exception("create segment index failed") logging.exception("create segment index failed")
for segment_document in segment_data_list: for segment_document in segment_data_list:
segment_document.enabled = False segment_document.enabled = False
segment_document.disabled_at = datetime.datetime.utcnow() segment_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment_document.status = 'error' segment_document.status = 'error'
segment_document.error = str(e) segment_document.error = str(e)
db.session.commit() db.session.commit()
@ -1208,10 +1208,10 @@ class SegmentService:
segment.word_count = len(content) segment.word_count = len(content)
segment.tokens = tokens segment.tokens = tokens
segment.status = 'completed' segment.status = 'completed'
segment.indexing_at = datetime.datetime.utcnow() segment.indexing_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.completed_at = datetime.datetime.utcnow() segment.completed_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.updated_by = current_user.id segment.updated_by = current_user.id
segment.updated_at = datetime.datetime.utcnow() segment.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
if document.doc_form == 'qa_model': if document.doc_form == 'qa_model':
segment.answer = args['answer'] segment.answer = args['answer']
db.session.add(segment) db.session.add(segment)
@ -1221,7 +1221,7 @@ class SegmentService:
except Exception as e: except Exception as e:
logging.exception("update segment index failed") logging.exception("update segment index failed")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.datetime.utcnow() segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.status = 'error' segment.status = 'error'
segment.error = str(e) segment.error = str(e)
db.session.commit() db.session.commit()

View File

@ -81,7 +81,7 @@ class FileService:
mime_type=file.mimetype, mime_type=file.mimetype,
created_by_role=('account' if isinstance(user, Account) else 'end_user'), created_by_role=('account' if isinstance(user, Account) else 'end_user'),
created_by=user.id, created_by=user.id,
created_at=datetime.datetime.utcnow(), created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used=False, used=False,
hash=hashlib.sha3_256(file_content).hexdigest() hash=hashlib.sha3_256(file_content).hexdigest()
) )
@ -111,10 +111,10 @@ class FileService:
extension='txt', extension='txt',
mime_type='text/plain', mime_type='text/plain',
created_by=current_user.id, created_by=current_user.id,
created_at=datetime.datetime.utcnow(), created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used=True, used=True,
used_by=current_user.id, used_by=current_user.id,
used_at=datetime.datetime.utcnow() used_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
) )
db.session.add(upload_file) db.session.add(upload_file)

View File

@ -1,6 +1,6 @@
import json import json
import time import time
from datetime import datetime from datetime import datetime, timezone
from typing import Optional from typing import Optional
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
@ -93,7 +93,7 @@ class WorkflowService:
workflow.graph = json.dumps(graph) workflow.graph = json.dumps(graph)
workflow.features = json.dumps(features) workflow.features = json.dumps(features)
workflow.updated_by = account.id workflow.updated_by = account.id
workflow.updated_at = datetime.utcnow() workflow.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
# commit db session changes # commit db session changes
db.session.commit() db.session.commit()
@ -123,7 +123,7 @@ class WorkflowService:
tenant_id=app_model.tenant_id, tenant_id=app_model.tenant_id,
app_id=app_model.id, app_id=app_model.id,
type=draft_workflow.type, type=draft_workflow.type,
version=str(datetime.utcnow()), version=str(datetime.now(timezone.utc).replace(tzinfo=None)),
graph=draft_workflow.graph, graph=draft_workflow.graph,
features=draft_workflow.features, features=draft_workflow.features,
created_by=account.id created_by=account.id
@ -202,8 +202,8 @@ class WorkflowService:
elapsed_time=time.perf_counter() - start_at, elapsed_time=time.perf_counter() - start_at,
created_by_role=CreatedByRole.ACCOUNT.value, created_by_role=CreatedByRole.ACCOUNT.value,
created_by=account.id, created_by=account.id,
created_at=datetime.utcnow(), created_at=datetime.now(timezone.utc).replace(tzinfo=None),
finished_at=datetime.utcnow() finished_at=datetime.now(timezone.utc).replace(tzinfo=None)
) )
db.session.add(workflow_node_execution) db.session.add(workflow_node_execution)
db.session.commit() db.session.commit()
@ -230,8 +230,8 @@ class WorkflowService:
elapsed_time=time.perf_counter() - start_at, elapsed_time=time.perf_counter() - start_at,
created_by_role=CreatedByRole.ACCOUNT.value, created_by_role=CreatedByRole.ACCOUNT.value,
created_by=account.id, created_by=account.id,
created_at=datetime.utcnow(), created_at=datetime.now(timezone.utc).replace(tzinfo=None),
finished_at=datetime.utcnow() finished_at=datetime.now(timezone.utc).replace(tzinfo=None)
) )
else: else:
# create workflow node execution # create workflow node execution
@ -249,8 +249,8 @@ class WorkflowService:
elapsed_time=time.perf_counter() - start_at, elapsed_time=time.perf_counter() - start_at,
created_by_role=CreatedByRole.ACCOUNT.value, created_by_role=CreatedByRole.ACCOUNT.value,
created_by=account.id, created_by=account.id,
created_at=datetime.utcnow(), created_at=datetime.now(timezone.utc).replace(tzinfo=None),
finished_at=datetime.utcnow() finished_at=datetime.now(timezone.utc).replace(tzinfo=None)
) )
db.session.add(workflow_node_execution) db.session.add(workflow_node_execution)

View File

@ -70,7 +70,7 @@ def add_document_to_index_task(dataset_document_id: str):
except Exception as e: except Exception as e:
logging.exception("add document to index failed") logging.exception("add document to index failed")
dataset_document.enabled = False dataset_document.enabled = False
dataset_document.disabled_at = datetime.datetime.utcnow() dataset_document.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
dataset_document.status = 'error' dataset_document.status = 'error'
dataset_document.error = str(e) dataset_document.error = str(e)
db.session.commit() db.session.commit()

View File

@ -50,7 +50,7 @@ def enable_annotation_reply_task(job_id: str, app_id: str, user_id: str, tenant_
annotation_setting.score_threshold = score_threshold annotation_setting.score_threshold = score_threshold
annotation_setting.collection_binding_id = dataset_collection_binding.id annotation_setting.collection_binding_id = dataset_collection_binding.id
annotation_setting.updated_user_id = user_id annotation_setting.updated_user_id = user_id
annotation_setting.updated_at = datetime.datetime.utcnow() annotation_setting.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(annotation_setting) db.session.add(annotation_setting)
else: else:
new_app_annotation_setting = AppAnnotationSetting( new_app_annotation_setting = AppAnnotationSetting(

View File

@ -85,9 +85,9 @@ def batch_create_segment_to_index_task(job_id: str, content: list, dataset_id: s
word_count=len(content), word_count=len(content),
tokens=tokens, tokens=tokens,
created_by=user_id, created_by=user_id,
indexing_at=datetime.datetime.utcnow(), indexing_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
status='completed', status='completed',
completed_at=datetime.datetime.utcnow() completed_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
) )
if dataset_document.doc_form == 'qa_model': if dataset_document.doc_form == 'qa_model':
segment_document.answer = segment['answer'] segment_document.answer = segment['answer']

View File

@ -38,7 +38,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
# update segment status to indexing # update segment status to indexing
update_params = { update_params = {
DocumentSegment.status: "indexing", DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.utcnow() DocumentSegment.indexing_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
} }
DocumentSegment.query.filter_by(id=segment.id).update(update_params) DocumentSegment.query.filter_by(id=segment.id).update(update_params)
db.session.commit() db.session.commit()
@ -75,7 +75,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
# update segment to completed # update segment to completed
update_params = { update_params = {
DocumentSegment.status: "completed", DocumentSegment.status: "completed",
DocumentSegment.completed_at: datetime.datetime.utcnow() DocumentSegment.completed_at: datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
} }
DocumentSegment.query.filter_by(id=segment.id).update(update_params) DocumentSegment.query.filter_by(id=segment.id).update(update_params)
db.session.commit() db.session.commit()
@ -85,7 +85,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]]
except Exception as e: except Exception as e:
logging.exception("create segment to index failed") logging.exception("create segment to index failed")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.datetime.utcnow() segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.status = 'error' segment.status = 'error'
segment.error = str(e) segment.error = str(e)
db.session.commit() db.session.commit()

View File

@ -67,7 +67,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
# check the page is updated # check the page is updated
if last_edited_time != page_edited_time: if last_edited_time != page_edited_time:
document.indexing_status = 'parsing' document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow() document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
# delete all document segment and index # delete all document segment and index

View File

@ -47,7 +47,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
if document: if document:
document.indexing_status = 'error' document.indexing_status = 'error'
document.error = str(e) document.error = str(e)
document.stopped_at = datetime.datetime.utcnow() document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()
return return
@ -62,7 +62,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
if document: if document:
document.indexing_status = 'parsing' document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow() document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
documents.append(document) documents.append(document)
db.session.add(document) db.session.add(document)
db.session.commit() db.session.commit()

View File

@ -33,7 +33,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
raise NotFound('Document not found') raise NotFound('Document not found')
document.indexing_status = 'parsing' document.indexing_status = 'parsing'
document.processing_started_at = datetime.datetime.utcnow() document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
db.session.commit() db.session.commit()
# delete all document segment and index # delete all document segment and index

View File

@ -69,7 +69,7 @@ def enable_segment_to_index_task(segment_id: str):
except Exception as e: except Exception as e:
logging.exception("enable segment to index failed") logging.exception("enable segment to index failed")
segment.enabled = False segment.enabled = False
segment.disabled_at = datetime.datetime.utcnow() segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
segment.status = 'error' segment.status = 'error'
segment.error = str(e) segment.error = str(e)
db.session.commit() db.session.commit()