diff --git a/api/apps/llm_app.py b/api/apps/llm_app.py index 4eb60db1a..f59cf2f5a 100644 --- a/api/apps/llm_app.py +++ b/api/apps/llm_app.py @@ -338,8 +338,6 @@ def list_app(): llm_set = set([m["llm_name"] + "@" + m["fid"] for m in llms]) for o in objs: - if not o.api_key: - continue if o.llm_name + "@" + o.llm_factory in llm_set: continue llms.append({"llm_name": o.llm_name, "model_type": o.model_type, "fid": o.llm_factory, "available": True}) diff --git a/api/apps/sdk/session.py b/api/apps/sdk/session.py index aeca09aaf..d29666adf 100644 --- a/api/apps/sdk/session.py +++ b/api/apps/sdk/session.py @@ -287,12 +287,6 @@ def chat_completion_openai_like(tenant_id, chat_id): answer = ans["answer"] incremental = answer[should_split_index:] token_used += len(incremental) - - """ - bugfix: When calling the Create chat completion API, the response data is incoherent. - bug code: token_used += len(incremental) - fix author: 任奇 - """ if incremental.endswith(""): response_data_len = len(incremental.rstrip("")) else: