diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py index 408ed31096..14396e9920 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_model_config_updated.py @@ -15,11 +15,11 @@ def handle(sender, **kwargs): app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all() - removed_dataset_ids: set[int] = set() + removed_dataset_ids: set[str] = set() if not app_dataset_joins: added_dataset_ids = dataset_ids else: - old_dataset_ids: set[int] = set() + old_dataset_ids: set[str] = set() old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins) added_dataset_ids = dataset_ids - old_dataset_ids @@ -39,8 +39,8 @@ def handle(sender, **kwargs): db.session.commit() -def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[int]: - dataset_ids: set[int] = set() +def get_dataset_ids_from_model_config(app_model_config: AppModelConfig) -> set[str]: + dataset_ids: set[str] = set() if not app_model_config: return dataset_ids diff --git a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py index 7a31c82f6a..dd2efed94b 100644 --- a/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py +++ b/api/events/event_handlers/update_app_dataset_join_when_app_published_workflow_updated.py @@ -17,11 +17,11 @@ def handle(sender, **kwargs): dataset_ids = get_dataset_ids_from_workflow(published_workflow) app_dataset_joins = db.session.query(AppDatasetJoin).filter(AppDatasetJoin.app_id == app.id).all() - removed_dataset_ids: set[int] = set() + removed_dataset_ids: set[str] = set() if not app_dataset_joins: added_dataset_ids = dataset_ids else: - old_dataset_ids: set[int] = set() + old_dataset_ids: set[str] = set() old_dataset_ids.update(app_dataset_join.dataset_id for app_dataset_join in app_dataset_joins) added_dataset_ids = dataset_ids - old_dataset_ids @@ -41,8 +41,8 @@ def handle(sender, **kwargs): db.session.commit() -def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]: - dataset_ids: set[int] = set() +def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[str]: + dataset_ids: set[str] = set() graph = published_workflow.graph_dict if not graph: return dataset_ids @@ -60,7 +60,7 @@ def get_dataset_ids_from_workflow(published_workflow: Workflow) -> set[int]: for node in knowledge_retrieval_nodes: try: node_data = KnowledgeRetrievalNodeData(**node.get("data", {})) - dataset_ids.update(int(dataset_id) for dataset_id in node_data.dataset_ids) + dataset_ids.update(dataset_id for dataset_id in node_data.dataset_ids) except Exception as e: continue diff --git a/api/services/audio_service.py b/api/services/audio_service.py index ef52301c0a..f4178a69a4 100644 --- a/api/services/audio_service.py +++ b/api/services/audio_service.py @@ -139,7 +139,7 @@ class AudioService: return Response(stream_with_context(response), content_type="audio/mpeg") return response else: - if not text: + if text is None: raise ValueError("Text is required") response = invoke_tts(text, app_model, voice) if isinstance(response, Generator): diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index b146179c3a..4821eb6696 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -452,7 +452,7 @@ class DatasetService: class DocumentService: - DEFAULT_RULES = { + DEFAULT_RULES: dict[str, Any] = { "mode": "custom", "rules": { "pre_processing_rules": [ @@ -466,7 +466,7 @@ class DocumentService: }, } - DOCUMENT_METADATA_SCHEMA = { + DOCUMENT_METADATA_SCHEMA: dict[str, Any] = { "book": { "title": str, "language": str,