diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py index eb73cc285d..4e7e443c2c 100644 --- a/api/schedule/clean_unused_datasets_task.py +++ b/api/schedule/clean_unused_datasets_task.py @@ -168,23 +168,6 @@ def clean_unused_datasets_task(): else: plan = plan_cache.decode() if plan == "sandbox": - # add auto disable log - documents = ( - db.session.query(Document) - .filter( - Document.dataset_id == dataset.id, - Document.enabled == True, - Document.archived == False, - ) - .all() - ) - for document in documents: - dataset_auto_disable_log = DatasetAutoDisableLog( - tenant_id=dataset.tenant_id, - dataset_id=dataset.id, - document_id=document.id, - ) - db.session.add(dataset_auto_disable_log) # remove index index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor() index_processor.clean(dataset, None) diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 4821eb6696..701261d7cb 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -434,6 +434,12 @@ class DatasetService: @staticmethod def get_dataset_auto_disable_logs(dataset_id: str) -> dict: + features = FeatureService.get_features(current_user.current_tenant_id) + if not features.billing.enabled or features.billing.subscription.plan == "sandbox": + return { + "document_ids": [], + "count": 0, + } # get recent 30 days auto disable logs start_date = datetime.datetime.now() - datetime.timedelta(days=30) dataset_auto_disable_logs = DatasetAutoDisableLog.query.filter(