mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-13 08:59:02 +08:00
fix LIGHTEN issue (#2806)
### What problem does this PR solve? ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
parent
3f065c75da
commit
2d1c83da59
@ -332,7 +332,7 @@ def my_llms():
|
||||
@login_required
|
||||
def list_app():
|
||||
self_deploied = ["Youdao","FastEmbed", "BAAI", "Ollama", "Xinference", "LocalAI", "LM-Studio"]
|
||||
weighted = ["Youdao","FastEmbed", "BAAI"] if LIGHTEN else []
|
||||
weighted = ["Youdao","FastEmbed", "BAAI"] if not LIGHTEN else []
|
||||
model_type = request.args.get("model_type")
|
||||
try:
|
||||
objs = TenantLLMService.query(tenant_id=current_user.id)
|
||||
|
@ -123,7 +123,7 @@ if not LIGHTEN:
|
||||
|
||||
CHAT_MDL = default_llm[LLM_FACTORY]["chat_model"]
|
||||
EMBEDDING_MDL = default_llm["BAAI"]["embedding_model"]
|
||||
RERANK_MDL = default_llm["BAAI"]["rerank_model"] if not LIGHTEN else ""
|
||||
RERANK_MDL = default_llm["BAAI"]["rerank_model"]
|
||||
ASR_MDL = default_llm[LLM_FACTORY]["asr_model"]
|
||||
IMAGE2TEXT_MDL = default_llm[LLM_FACTORY]["image2text_model"]
|
||||
else:
|
||||
|
@ -501,17 +501,6 @@ def naive_merge(sections, chunk_token_num=128, delimiter="\n。;!?"):
|
||||
|
||||
for sec, pos in sections:
|
||||
add_chunk(sec, pos)
|
||||
continue
|
||||
s, e = 0, 1
|
||||
while e < len(sec):
|
||||
if sec[e] in delimiter:
|
||||
add_chunk(sec[s: e + 1], pos)
|
||||
s = e + 1
|
||||
e = s + 1
|
||||
else:
|
||||
e += 1
|
||||
if s < e:
|
||||
add_chunk(sec[s: e], pos)
|
||||
|
||||
return cks
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user