From 781d294f49e6f51f6d979545ef9aebdc351b5673 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Wed, 11 Sep 2024 18:55:00 +0800 Subject: [PATCH] chore: cleanup pycodestyle E rules (#8269) --- .../legacy/volc_sdk/base/auth.py | 2 +- .../legacy/volc_sdk/base/service.py | 6 +++--- .../volcengine_maas/legacy/volc_sdk/maas.py | 2 +- .../builtin/bing/tools/bing_web_search.py | 2 +- api/core/tools/provider/builtin/did/did_appx.py | 2 +- .../builtin/hap/tools/list_worksheet_records.py | 2 +- .../builtin/hap/tools/list_worksheets.py | 2 +- .../workflow/graph_engine/entities/graph.py | 2 +- api/core/workflow/graph_engine/graph_engine.py | 4 ++-- ...355ba0e_add_workflow_tool_label_and_tool_.py | 1 + .../9e98fbaffb88_add_workflow_tool_version.py | 1 + .../de95f5c77138_migration_serpapi_api_key.py | 2 +- api/pyproject.toml | 17 ++++++++--------- .../core/workflow/graph_engine/test_graph.py | 4 ++-- 14 files changed, 25 insertions(+), 24 deletions(-) diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py index 8f8139426c..7435720252 100644 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py +++ b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/auth.py @@ -74,7 +74,7 @@ class Signer: def sign(request, credentials): if request.path == "": request.path = "/" - if request.method != "GET" and not ("Content-Type" in request.headers): + if request.method != "GET" and "Content-Type" not in request.headers: request.headers["Content-Type"] = "application/x-www-form-urlencoded; charset=utf-8" format_date = Signer.get_current_format_date() diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py index 096339b3c7..33c41f3eb3 100644 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py +++ b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/base/service.py @@ -31,7 +31,7 @@ class Service: self.service_info.scheme = scheme def get(self, api, params, doseq=0): - if not (api in self.api_info): + if api not in self.api_info: raise Exception("no such api") api_info = self.api_info[api] @@ -49,7 +49,7 @@ class Service: raise Exception(resp.text) def post(self, api, params, form): - if not (api in self.api_info): + if api not in self.api_info: raise Exception("no such api") api_info = self.api_info[api] r = self.prepare_request(api_info, params) @@ -72,7 +72,7 @@ class Service: raise Exception(resp.text) def json(self, api, params, body): - if not (api in self.api_info): + if api not in self.api_info: raise Exception("no such api") api_info = self.api_info[api] r = self.prepare_request(api_info, params) diff --git a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py index 29c5c3c2d2..a3836685f1 100644 --- a/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py +++ b/api/core/model_runtime/model_providers/volcengine_maas/legacy/volc_sdk/maas.py @@ -109,7 +109,7 @@ class MaasService(Service): if not self._apikey and not credentials_exist: raise new_client_sdk_request_error("no valid credential", req_id) - if not (api in self.api_info): + if api not in self.api_info: raise new_client_sdk_request_error("no such api", req_id) def _call(self, endpoint_id, api, req_id, params, body, apikey=None, stream=False): diff --git a/api/core/tools/provider/builtin/bing/tools/bing_web_search.py b/api/core/tools/provider/builtin/bing/tools/bing_web_search.py index 0d9613c0cf..8bed2c556c 100644 --- a/api/core/tools/provider/builtin/bing/tools/bing_web_search.py +++ b/api/core/tools/provider/builtin/bing/tools/bing_web_search.py @@ -71,7 +71,7 @@ class BingSearchTool(BuiltinTool): text = "" if search_results: for i, result in enumerate(search_results): - text += f'{i+1}: {result.get("name", "")} - {result.get("snippet", "")}\n' + text += f'{i + 1}: {result.get("name", "")} - {result.get("snippet", "")}\n' if computation and "expression" in computation and "value" in computation: text += "\nComputation:\n" diff --git a/api/core/tools/provider/builtin/did/did_appx.py b/api/core/tools/provider/builtin/did/did_appx.py index 4cad12e4ee..c68878630d 100644 --- a/api/core/tools/provider/builtin/did/did_appx.py +++ b/api/core/tools/provider/builtin/did/did_appx.py @@ -83,5 +83,5 @@ class DIDApp: if status["status"] == "done": return status elif status["status"] == "error" or status["status"] == "rejected": - raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error",{}).get("description")}') + raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error", {}).get("description")}') time.sleep(poll_interval) diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py b/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py index 7e9f70f8e5..592fa230cf 100644 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py +++ b/api/core/tools/provider/builtin/hap/tools/list_worksheet_records.py @@ -142,7 +142,7 @@ class ListWorksheetRecordsTool(BuiltinTool): for control in controls: control_type_id = self.get_real_type_id(control) if (control_type_id in self._get_ignore_types()) or ( - allow_fields and not control["controlId"] in allow_fields + allow_fields and control["controlId"] not in allow_fields ): continue else: diff --git a/api/core/tools/provider/builtin/hap/tools/list_worksheets.py b/api/core/tools/provider/builtin/hap/tools/list_worksheets.py index b4193f00bf..4dba2df1f1 100644 --- a/api/core/tools/provider/builtin/hap/tools/list_worksheets.py +++ b/api/core/tools/provider/builtin/hap/tools/list_worksheets.py @@ -67,7 +67,7 @@ class ListWorksheetsTool(BuiltinTool): items = [] tables = "" for item in section.get("items", []): - if item.get("type") == 0 and (not "notes" in item or item.get("notes") != "NO"): + if item.get("type") == 0 and ("notes" not in item or item.get("notes") != "NO"): if type == "json": filtered_item = {"id": item["id"], "name": item["name"], "notes": item.get("notes", "")} items.append(filtered_item) diff --git a/api/core/workflow/graph_engine/entities/graph.py b/api/core/workflow/graph_engine/entities/graph.py index 0b83ee10cd..f1f677b8c1 100644 --- a/api/core/workflow/graph_engine/entities/graph.py +++ b/api/core/workflow/graph_engine/entities/graph.py @@ -310,7 +310,7 @@ class Graph(BaseModel): parallel_branch_node_ids["default"].append(graph_edge.target_node_id) else: condition_hash = graph_edge.run_condition.hash - if not condition_hash in condition_edge_mappings: + if condition_hash not in condition_edge_mappings: condition_edge_mappings[condition_hash] = [] condition_edge_mappings[condition_hash].append(graph_edge) diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index f4e87a42a7..1db9b690ab 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -90,9 +90,9 @@ class GraphEngine: thread_pool_max_submit_count = 100 thread_pool_max_workers = 10 - ## init thread pool + # init thread pool if thread_pool_id: - if not thread_pool_id in GraphEngine.workflow_thread_pool_mapping: + if thread_pool_id not in GraphEngine.workflow_thread_pool_mapping: raise ValueError(f"Max submit count {thread_pool_max_submit_count} of workflow thread pool reached.") self.thread_pool_id = thread_pool_id diff --git a/api/migrations/versions/03f98355ba0e_add_workflow_tool_label_and_tool_.py b/api/migrations/versions/03f98355ba0e_add_workflow_tool_label_and_tool_.py index 0fba6a87eb..8cd4ec552b 100644 --- a/api/migrations/versions/03f98355ba0e_add_workflow_tool_label_and_tool_.py +++ b/api/migrations/versions/03f98355ba0e_add_workflow_tool_label_and_tool_.py @@ -24,6 +24,7 @@ def upgrade(): with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: batch_op.add_column(sa.Column('label', sa.String(length=255), server_default='', nullable=False)) + def downgrade(): with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: batch_op.drop_column('label') diff --git a/api/migrations/versions/9e98fbaffb88_add_workflow_tool_version.py b/api/migrations/versions/9e98fbaffb88_add_workflow_tool_version.py index bfda7d619c..92f41f0abd 100644 --- a/api/migrations/versions/9e98fbaffb88_add_workflow_tool_version.py +++ b/api/migrations/versions/9e98fbaffb88_add_workflow_tool_version.py @@ -21,6 +21,7 @@ def upgrade(): with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: batch_op.add_column(sa.Column('version', sa.String(length=255), server_default='', nullable=False)) + def downgrade(): with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: batch_op.drop_column('version') diff --git a/api/migrations/versions/de95f5c77138_migration_serpapi_api_key.py b/api/migrations/versions/de95f5c77138_migration_serpapi_api_key.py index 2365766837..fcca705d21 100644 --- a/api/migrations/versions/de95f5c77138_migration_serpapi_api_key.py +++ b/api/migrations/versions/de95f5c77138_migration_serpapi_api_key.py @@ -99,7 +99,7 @@ def upgrade(): id=id, tenant_id=tenant_id, user_id=user_id, - provider='google', + provider='google', encrypted_credentials=encrypted_credentials, created_at=created_at, updated_at=updated_at diff --git a/api/pyproject.toml b/api/pyproject.toml index 45bd7e00d8..23e2b5c549 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -13,17 +13,11 @@ preview = true select = [ "B", # flake8-bugbear rules "C4", # flake8-comprehensions + "E", # pycodestyle E rules "F", # pyflakes rules "I", # isort rules "N", # pep8-naming "UP", # pyupgrade rules - "E101", # mixed-spaces-and-tabs - "E111", # indentation-with-invalid-multiple - "E112", # no-indented-block - "E113", # unexpected-indentation - "E115", # no-indented-block-comment - "E116", # unexpected-indentation-comment - "E117", # over-indented "RUF019", # unnecessary-key-check "RUF100", # unused-noqa "RUF101", # redirected-noqa @@ -33,10 +27,15 @@ select = [ "SIM910", # dict-get-with-none-default "W191", # tab-indentation "W605", # invalid-escape-sequence - "F601", # multi-value-repeated-key-literal - "F602", # multi-value-repeated-key-variable ] ignore = [ + "E501", # line-too-long + "E402", # module-import-not-at-top-of-file + "E711", # none-comparison + "E712", # true-false-comparison + "E721", # type-comparison + "E722", # bare-except + "E731", # lambda-assignment "F403", # undefined-local-with-import-star "F405", # undefined-local-with-import-star-usage "F821", # undefined-name diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_graph.py b/api/tests/unit_tests/core/workflow/graph_engine/test_graph.py index 65757cd604..13ba11016a 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_graph.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_graph.py @@ -247,9 +247,9 @@ def test_parallels_graph(): for i in range(3): start_edges = graph.edge_mapping.get("start") assert start_edges is not None - assert start_edges[i].target_node_id == f"llm{i+1}" + assert start_edges[i].target_node_id == f"llm{i + 1}" - llm_edges = graph.edge_mapping.get(f"llm{i+1}") + llm_edges = graph.edge_mapping.get(f"llm{i + 1}") assert llm_edges is not None assert llm_edges[0].target_node_id == "answer"