From 2dbcc0a1bfe85258965a07745547c9984856f1e1 Mon Sep 17 00:00:00 2001 From: Stephen Hu Date: Wed, 30 Apr 2025 14:55:21 +0800 Subject: [PATCH] Fix: Tried to fix the fid mis match under some cases (#7426) ### What problem does this PR solve? https://github.com/infiniflow/ragflow/issues/7407 Based on this context, I think there should be some reasons that let some LLMs have a mismatch (add the wrong "@xxx"), So I think when use fid can not fetch llm then tried to just use name should can fetch it. ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- api/db/services/llm_service.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/api/db/services/llm_service.py b/api/db/services/llm_service.py index c368807f9..def50f9c1 100644 --- a/api/db/services/llm_service.py +++ b/api/db/services/llm_service.py @@ -100,9 +100,13 @@ class TenantLLMService(CommonService): model_config = cls.get_api_key(tenant_id, mdlnm) mdlnm, fid = TenantLLMService.split_model_name_and_factory(mdlnm) + if not model_config: # for some cases seems fid mismatch + model_config = cls.get_api_key(tenant_id, mdlnm) if model_config: model_config = model_config.to_dict() llm = LLMService.query(llm_name=mdlnm) if not fid else LLMService.query(llm_name=mdlnm, fid=fid) + if not llm and fid: # for some cases seems fid mismatch + llm = LLMService.query(llm_name=mdlnm) if llm: model_config["is_tools"] = llm[0].is_tools if not model_config: