mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-13 05:09:03 +08:00
exstract model dir from model‘s full name (#3368)
### What problem does this PR solve? When model’s group name contains 0-9,we can't find downloaded model,because we do not correctly exstract model dir's name from model‘s full name ### Type of change - [ ] Bug Fix (non-breaking change which fixes an issue) Co-authored-by: 王志鹏 <zhipeng3.wang@midea.com> Co-authored-by: Kevin Hu <kevinhu.sh@gmail.com>
This commit is contained in:
parent
667d0e5537
commit
fa54cd5f5c
@ -66,12 +66,12 @@ class DefaultEmbedding(Base):
|
|||||||
import torch
|
import torch
|
||||||
if not DefaultEmbedding._model:
|
if not DefaultEmbedding._model:
|
||||||
try:
|
try:
|
||||||
DefaultEmbedding._model = FlagModel(os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z]+/", "", model_name)),
|
DefaultEmbedding._model = FlagModel(os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
|
||||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||||
use_fp16=torch.cuda.is_available())
|
use_fp16=torch.cuda.is_available())
|
||||||
except Exception:
|
except Exception:
|
||||||
model_dir = snapshot_download(repo_id="BAAI/bge-large-zh-v1.5",
|
model_dir = snapshot_download(repo_id="BAAI/bge-large-zh-v1.5",
|
||||||
local_dir=os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z]+/", "", model_name)),
|
local_dir=os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
|
||||||
local_dir_use_symlinks=False)
|
local_dir_use_symlinks=False)
|
||||||
DefaultEmbedding._model = FlagModel(model_dir,
|
DefaultEmbedding._model = FlagModel(model_dir,
|
||||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||||
|
@ -65,12 +65,12 @@ class DefaultRerank(Base):
|
|||||||
if not DefaultRerank._model:
|
if not DefaultRerank._model:
|
||||||
try:
|
try:
|
||||||
DefaultRerank._model = FlagReranker(
|
DefaultRerank._model = FlagReranker(
|
||||||
os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z]+/", "", model_name)),
|
os.path.join(get_home_cache_dir(), re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
|
||||||
use_fp16=torch.cuda.is_available())
|
use_fp16=torch.cuda.is_available())
|
||||||
except Exception:
|
except Exception:
|
||||||
model_dir = snapshot_download(repo_id=model_name,
|
model_dir = snapshot_download(repo_id=model_name,
|
||||||
local_dir=os.path.join(get_home_cache_dir(),
|
local_dir=os.path.join(get_home_cache_dir(),
|
||||||
re.sub(r"^[a-zA-Z]+/", "", model_name)),
|
re.sub(r"^[a-zA-Z0-9]+/", "", model_name)),
|
||||||
local_dir_use_symlinks=False)
|
local_dir_use_symlinks=False)
|
||||||
DefaultRerank._model = FlagReranker(model_dir, use_fp16=torch.cuda.is_available())
|
DefaultRerank._model = FlagReranker(model_dir, use_fp16=torch.cuda.is_available())
|
||||||
self._model = DefaultRerank._model
|
self._model = DefaultRerank._model
|
||||||
@ -130,7 +130,7 @@ class YoudaoRerank(DefaultRerank):
|
|||||||
logger.info("LOADING BCE...")
|
logger.info("LOADING BCE...")
|
||||||
YoudaoRerank._model = RerankerModel(model_name_or_path=os.path.join(
|
YoudaoRerank._model = RerankerModel(model_name_or_path=os.path.join(
|
||||||
get_home_cache_dir(),
|
get_home_cache_dir(),
|
||||||
re.sub(r"^[a-zA-Z]+/", "", model_name)))
|
re.sub(r"^[a-zA-Z0-9]+/", "", model_name)))
|
||||||
except Exception:
|
except Exception:
|
||||||
YoudaoRerank._model = RerankerModel(
|
YoudaoRerank._model = RerankerModel(
|
||||||
model_name_or_path=model_name.replace(
|
model_name_or_path=model_name.replace(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user