| @@ -74,7 +74,7 @@ class Signer: | |||
| def sign(request, credentials): | |||
| if request.path == "": | |||
| request.path = "/" | |||
| if request.method != "GET" and not ("Content-Type" in request.headers): | |||
| if request.method != "GET" and "Content-Type" not in request.headers: | |||
| request.headers["Content-Type"] = "application/x-www-form-urlencoded; charset=utf-8" | |||
| format_date = Signer.get_current_format_date() | |||
| @@ -31,7 +31,7 @@ class Service: | |||
| self.service_info.scheme = scheme | |||
| def get(self, api, params, doseq=0): | |||
| if not (api in self.api_info): | |||
| if api not in self.api_info: | |||
| raise Exception("no such api") | |||
| api_info = self.api_info[api] | |||
| @@ -49,7 +49,7 @@ class Service: | |||
| raise Exception(resp.text) | |||
| def post(self, api, params, form): | |||
| if not (api in self.api_info): | |||
| if api not in self.api_info: | |||
| raise Exception("no such api") | |||
| api_info = self.api_info[api] | |||
| r = self.prepare_request(api_info, params) | |||
| @@ -72,7 +72,7 @@ class Service: | |||
| raise Exception(resp.text) | |||
| def json(self, api, params, body): | |||
| if not (api in self.api_info): | |||
| if api not in self.api_info: | |||
| raise Exception("no such api") | |||
| api_info = self.api_info[api] | |||
| r = self.prepare_request(api_info, params) | |||
| @@ -109,7 +109,7 @@ class MaasService(Service): | |||
| if not self._apikey and not credentials_exist: | |||
| raise new_client_sdk_request_error("no valid credential", req_id) | |||
| if not (api in self.api_info): | |||
| if api not in self.api_info: | |||
| raise new_client_sdk_request_error("no such api", req_id) | |||
| def _call(self, endpoint_id, api, req_id, params, body, apikey=None, stream=False): | |||
| @@ -71,7 +71,7 @@ class BingSearchTool(BuiltinTool): | |||
| text = "" | |||
| if search_results: | |||
| for i, result in enumerate(search_results): | |||
| text += f'{i+1}: {result.get("name", "")} - {result.get("snippet", "")}\n' | |||
| text += f'{i + 1}: {result.get("name", "")} - {result.get("snippet", "")}\n' | |||
| if computation and "expression" in computation and "value" in computation: | |||
| text += "\nComputation:\n" | |||
| @@ -83,5 +83,5 @@ class DIDApp: | |||
| if status["status"] == "done": | |||
| return status | |||
| elif status["status"] == "error" or status["status"] == "rejected": | |||
| raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error",{}).get("description")}') | |||
| raise HTTPError(f'Talks {id} failed: {status["status"]} {status.get("error", {}).get("description")}') | |||
| time.sleep(poll_interval) | |||
| @@ -142,7 +142,7 @@ class ListWorksheetRecordsTool(BuiltinTool): | |||
| for control in controls: | |||
| control_type_id = self.get_real_type_id(control) | |||
| if (control_type_id in self._get_ignore_types()) or ( | |||
| allow_fields and not control["controlId"] in allow_fields | |||
| allow_fields and control["controlId"] not in allow_fields | |||
| ): | |||
| continue | |||
| else: | |||
| @@ -67,7 +67,7 @@ class ListWorksheetsTool(BuiltinTool): | |||
| items = [] | |||
| tables = "" | |||
| for item in section.get("items", []): | |||
| if item.get("type") == 0 and (not "notes" in item or item.get("notes") != "NO"): | |||
| if item.get("type") == 0 and ("notes" not in item or item.get("notes") != "NO"): | |||
| if type == "json": | |||
| filtered_item = {"id": item["id"], "name": item["name"], "notes": item.get("notes", "")} | |||
| items.append(filtered_item) | |||
| @@ -310,7 +310,7 @@ class Graph(BaseModel): | |||
| parallel_branch_node_ids["default"].append(graph_edge.target_node_id) | |||
| else: | |||
| condition_hash = graph_edge.run_condition.hash | |||
| if not condition_hash in condition_edge_mappings: | |||
| if condition_hash not in condition_edge_mappings: | |||
| condition_edge_mappings[condition_hash] = [] | |||
| condition_edge_mappings[condition_hash].append(graph_edge) | |||
| @@ -90,9 +90,9 @@ class GraphEngine: | |||
| thread_pool_max_submit_count = 100 | |||
| thread_pool_max_workers = 10 | |||
| ## init thread pool | |||
| # init thread pool | |||
| if thread_pool_id: | |||
| if not thread_pool_id in GraphEngine.workflow_thread_pool_mapping: | |||
| if thread_pool_id not in GraphEngine.workflow_thread_pool_mapping: | |||
| raise ValueError(f"Max submit count {thread_pool_max_submit_count} of workflow thread pool reached.") | |||
| self.thread_pool_id = thread_pool_id | |||
| @@ -24,6 +24,7 @@ def upgrade(): | |||
| with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: | |||
| batch_op.add_column(sa.Column('label', sa.String(length=255), server_default='', nullable=False)) | |||
| def downgrade(): | |||
| with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: | |||
| batch_op.drop_column('label') | |||
| @@ -21,6 +21,7 @@ def upgrade(): | |||
| with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: | |||
| batch_op.add_column(sa.Column('version', sa.String(length=255), server_default='', nullable=False)) | |||
| def downgrade(): | |||
| with op.batch_alter_table('tool_workflow_providers', schema=None) as batch_op: | |||
| batch_op.drop_column('version') | |||
| @@ -99,7 +99,7 @@ def upgrade(): | |||
| id=id, | |||
| tenant_id=tenant_id, | |||
| user_id=user_id, | |||
| provider='google', | |||
| provider='google', | |||
| encrypted_credentials=encrypted_credentials, | |||
| created_at=created_at, | |||
| updated_at=updated_at | |||
| @@ -13,17 +13,11 @@ preview = true | |||
| select = [ | |||
| "B", # flake8-bugbear rules | |||
| "C4", # flake8-comprehensions | |||
| "E", # pycodestyle E rules | |||
| "F", # pyflakes rules | |||
| "I", # isort rules | |||
| "N", # pep8-naming | |||
| "UP", # pyupgrade rules | |||
| "E101", # mixed-spaces-and-tabs | |||
| "E111", # indentation-with-invalid-multiple | |||
| "E112", # no-indented-block | |||
| "E113", # unexpected-indentation | |||
| "E115", # no-indented-block-comment | |||
| "E116", # unexpected-indentation-comment | |||
| "E117", # over-indented | |||
| "RUF019", # unnecessary-key-check | |||
| "RUF100", # unused-noqa | |||
| "RUF101", # redirected-noqa | |||
| @@ -33,10 +27,15 @@ select = [ | |||
| "SIM910", # dict-get-with-none-default | |||
| "W191", # tab-indentation | |||
| "W605", # invalid-escape-sequence | |||
| "F601", # multi-value-repeated-key-literal | |||
| "F602", # multi-value-repeated-key-variable | |||
| ] | |||
| ignore = [ | |||
| "E501", # line-too-long | |||
| "E402", # module-import-not-at-top-of-file | |||
| "E711", # none-comparison | |||
| "E712", # true-false-comparison | |||
| "E721", # type-comparison | |||
| "E722", # bare-except | |||
| "E731", # lambda-assignment | |||
| "F403", # undefined-local-with-import-star | |||
| "F405", # undefined-local-with-import-star-usage | |||
| "F821", # undefined-name | |||
| @@ -247,9 +247,9 @@ def test_parallels_graph(): | |||
| for i in range(3): | |||
| start_edges = graph.edge_mapping.get("start") | |||
| assert start_edges is not None | |||
| assert start_edges[i].target_node_id == f"llm{i+1}" | |||
| assert start_edges[i].target_node_id == f"llm{i + 1}" | |||
| llm_edges = graph.edge_mapping.get(f"llm{i+1}") | |||
| llm_edges = graph.edge_mapping.get(f"llm{i + 1}") | |||
| assert llm_edges is not None | |||
| assert llm_edges[0].target_node_id == "answer" | |||