mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-14 07:35:55 +08:00
mv service_conf.yaml to conf/ and fix: add 'answer' as a parameter to 'generate' (#3379)
### What problem does this PR solve? #3373 ### Type of change - [x] Refactoring - [x] Bug fix
This commit is contained in:
parent
1fe9a2e6fd
commit
ccf189cb7f
@ -399,7 +399,7 @@ class ComponentBase(ABC):
|
|||||||
self._param.check()
|
self._param.check()
|
||||||
|
|
||||||
def get_dependent_components(self):
|
def get_dependent_components(self):
|
||||||
cpnts = [para["component_id"] for para in self._param.query]
|
cpnts = [para["component_id"] for para in self._param.query if para["component_id"].lower().find("answer") < 0]
|
||||||
return cpnts
|
return cpnts
|
||||||
|
|
||||||
def run(self, history, **kwargs):
|
def run(self, history, **kwargs):
|
||||||
|
@ -63,7 +63,7 @@ class Generate(ComponentBase):
|
|||||||
component_name = "Generate"
|
component_name = "Generate"
|
||||||
|
|
||||||
def get_dependent_components(self):
|
def get_dependent_components(self):
|
||||||
cpnts = [para["component_id"] for para in self._param.parameters]
|
cpnts = [para["component_id"] for para in self._param.parameters if para["component_id"].lower().find("answer") < 0]
|
||||||
return cpnts
|
return cpnts
|
||||||
|
|
||||||
def set_cite(self, retrieval_res, answer):
|
def set_cite(self, retrieval_res, answer):
|
||||||
|
@ -1082,7 +1082,7 @@ def rm_chunk(tenant_id, dataset_id, document_id):
|
|||||||
if chunk_number != 0:
|
if chunk_number != 0:
|
||||||
DocumentService.decrement_chunk_num(document_id, dataset_id, 1, chunk_number, 0)
|
DocumentService.decrement_chunk_num(document_id, dataset_id, 1, chunk_number, 0)
|
||||||
if "chunk_ids" in req and chunk_number != len(req["chunk_ids"]):
|
if "chunk_ids" in req and chunk_number != len(req["chunk_ids"]):
|
||||||
return get_error_data_result(message=f"rm_chunk deleted chunks {chunk_number}, expect {len(req["chunk_ids"])}")
|
return get_error_data_result(message=f"rm_chunk deleted chunks {chunk_number}, expect {len(req['chunk_ids'])}")
|
||||||
return get_result(message=f"deleted {chunk_number} chunks")
|
return get_result(message=f"deleted {chunk_number} chunks")
|
||||||
|
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ AUTHENTICATION_DEFAULT_TIMEOUT = 7 * 24 * 60 * 60 # s
|
|||||||
PRIVILEGE_COMMAND_WHITELIST = []
|
PRIVILEGE_COMMAND_WHITELIST = []
|
||||||
CHECK_NODES_IDENTITY = False
|
CHECK_NODES_IDENTITY = False
|
||||||
|
|
||||||
if 'username' in get_base_config("es", {}):
|
if 'hosts' in get_base_config("es", {}):
|
||||||
docStoreConn = rag.utils.es_conn.ESConnection()
|
docStoreConn = rag.utils.es_conn.ESConnection()
|
||||||
else:
|
else:
|
||||||
docStoreConn = rag.utils.infinity_conn.InfinityConnection()
|
docStoreConn = rag.utils.infinity_conn.InfinityConnection()
|
||||||
|
@ -1 +0,0 @@
|
|||||||
../docker/service_conf.yaml
|
|
74
conf/service_conf.yaml
Normal file
74
conf/service_conf.yaml
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
ragflow:
|
||||||
|
host: 0.0.0.0
|
||||||
|
http_port: 9380
|
||||||
|
mysql:
|
||||||
|
name: 'rag_flow'
|
||||||
|
user: 'root'
|
||||||
|
password: 'infini_rag_flow'
|
||||||
|
host: 'mysql'
|
||||||
|
port: 3306
|
||||||
|
max_connections: 100
|
||||||
|
stale_timeout: 30
|
||||||
|
minio:
|
||||||
|
user: 'rag_flow'
|
||||||
|
password: 'infini_rag_flow'
|
||||||
|
host: 'minio:9000'
|
||||||
|
es:
|
||||||
|
hosts: 'http://es01:9200'
|
||||||
|
username: 'elastic'
|
||||||
|
password: 'infini_rag_flow'
|
||||||
|
redis:
|
||||||
|
db: 1
|
||||||
|
password: 'infini_rag_flow'
|
||||||
|
host: 'redis:6379'
|
||||||
|
|
||||||
|
# postgres:
|
||||||
|
# name: 'rag_flow'
|
||||||
|
# user: 'rag_flow'
|
||||||
|
# password: 'infini_rag_flow'
|
||||||
|
# host: 'postgres'
|
||||||
|
# port: 5432
|
||||||
|
# max_connections: 100
|
||||||
|
# stale_timeout: 30
|
||||||
|
# s3:
|
||||||
|
# endpoint: 'endpoint'
|
||||||
|
# access_key: 'access_key'
|
||||||
|
# secret_key: 'secret_key'
|
||||||
|
# region: 'region'
|
||||||
|
# azure:
|
||||||
|
# auth_type: 'sas'
|
||||||
|
# container_url: 'container_url'
|
||||||
|
# sas_token: 'sas_token'
|
||||||
|
# azure:
|
||||||
|
# auth_type: 'spn'
|
||||||
|
# account_url: 'account_url'
|
||||||
|
# client_id: 'client_id'
|
||||||
|
# secret: 'secret'
|
||||||
|
# tenant_id: 'tenant_id'
|
||||||
|
# container_name: 'container_name'
|
||||||
|
# user_default_llm:
|
||||||
|
# factory: 'Tongyi-Qianwen'
|
||||||
|
# api_key: 'sk-xxxxxxxxxxxxx'
|
||||||
|
# base_url: ''
|
||||||
|
# oauth:
|
||||||
|
# github:
|
||||||
|
# client_id: xxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
# secret_key: xxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
# url: https://github.com/login/oauth/access_token
|
||||||
|
# feishu:
|
||||||
|
# app_id: cli_xxxxxxxxxxxxxxxxxxx
|
||||||
|
# app_secret: xxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
# app_access_token_url: https://open.feishu.cn/open-apis/auth/v3/app_access_token/internal
|
||||||
|
# user_access_token_url: https://open.feishu.cn/open-apis/authen/v1/oidc/access_token
|
||||||
|
# grant_type: 'authorization_code'
|
||||||
|
# authentication:
|
||||||
|
# client:
|
||||||
|
# switch: false
|
||||||
|
# http_app_key:
|
||||||
|
# http_secret_key:
|
||||||
|
# site:
|
||||||
|
# switch: false
|
||||||
|
# permission:
|
||||||
|
# switch: false
|
||||||
|
# component: false
|
||||||
|
# dataset: false
|
@ -15,7 +15,6 @@ services:
|
|||||||
- 80:80
|
- 80:80
|
||||||
- 443:443
|
- 443:443
|
||||||
volumes:
|
volumes:
|
||||||
- ./service_conf.yaml:/ragflow/conf/service_conf.yaml
|
|
||||||
- ./ragflow-logs:/ragflow/logs
|
- ./ragflow-logs:/ragflow/logs
|
||||||
- ./nginx/ragflow.conf:/etc/nginx/conf.d/ragflow.conf
|
- ./nginx/ragflow.conf:/etc/nginx/conf.d/ragflow.conf
|
||||||
- ./nginx/proxy.conf:/etc/nginx/proxy.conf
|
- ./nginx/proxy.conf:/etc/nginx/proxy.conf
|
||||||
|
@ -1,74 +0,0 @@
|
|||||||
ragflow:
|
|
||||||
host: 0.0.0.0
|
|
||||||
http_port: 9380
|
|
||||||
mysql:
|
|
||||||
name: 'rag_flow'
|
|
||||||
user: 'root'
|
|
||||||
password: 'infini_rag_flow'
|
|
||||||
host: 'mysql'
|
|
||||||
port: 3306
|
|
||||||
max_connections: 100
|
|
||||||
stale_timeout: 30
|
|
||||||
minio:
|
|
||||||
user: 'rag_flow'
|
|
||||||
password: 'infini_rag_flow'
|
|
||||||
host: 'minio:9000'
|
|
||||||
es:
|
|
||||||
hosts: 'http://es01:9200'
|
|
||||||
username: 'elastic'
|
|
||||||
password: 'infini_rag_flow'
|
|
||||||
redis:
|
|
||||||
db: 1
|
|
||||||
password: 'infini_rag_flow'
|
|
||||||
host: 'redis:6379'
|
|
||||||
|
|
||||||
# postgres:
|
|
||||||
# name: 'rag_flow'
|
|
||||||
# user: 'rag_flow'
|
|
||||||
# password: 'infini_rag_flow'
|
|
||||||
# host: 'postgres'
|
|
||||||
# port: 5432
|
|
||||||
# max_connections: 100
|
|
||||||
# stale_timeout: 30
|
|
||||||
# s3:
|
|
||||||
# endpoint: 'endpoint'
|
|
||||||
# access_key: 'access_key'
|
|
||||||
# secret_key: 'secret_key'
|
|
||||||
# region: 'region'
|
|
||||||
# azure:
|
|
||||||
# auth_type: 'sas'
|
|
||||||
# container_url: 'container_url'
|
|
||||||
# sas_token: 'sas_token'
|
|
||||||
# azure:
|
|
||||||
# auth_type: 'spn'
|
|
||||||
# account_url: 'account_url'
|
|
||||||
# client_id: 'client_id'
|
|
||||||
# secret: 'secret'
|
|
||||||
# tenant_id: 'tenant_id'
|
|
||||||
# container_name: 'container_name'
|
|
||||||
# user_default_llm:
|
|
||||||
# factory: 'Tongyi-Qianwen'
|
|
||||||
# api_key: 'sk-xxxxxxxxxxxxx'
|
|
||||||
# base_url: ''
|
|
||||||
# oauth:
|
|
||||||
# github:
|
|
||||||
# client_id: xxxxxxxxxxxxxxxxxxxxxxxxx
|
|
||||||
# secret_key: xxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
|
||||||
# url: https://github.com/login/oauth/access_token
|
|
||||||
# feishu:
|
|
||||||
# app_id: cli_xxxxxxxxxxxxxxxxxxx
|
|
||||||
# app_secret: xxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
|
||||||
# app_access_token_url: https://open.feishu.cn/open-apis/auth/v3/app_access_token/internal
|
|
||||||
# user_access_token_url: https://open.feishu.cn/open-apis/authen/v1/oidc/access_token
|
|
||||||
# grant_type: 'authorization_code'
|
|
||||||
# authentication:
|
|
||||||
# client:
|
|
||||||
# switch: false
|
|
||||||
# http_app_key:
|
|
||||||
# http_secret_key:
|
|
||||||
# site:
|
|
||||||
# switch: false
|
|
||||||
# permission:
|
|
||||||
# switch: false
|
|
||||||
# component: false
|
|
||||||
# dataset: false
|
|
@ -33,6 +33,7 @@ from tqdm import tqdm
|
|||||||
global max_docs
|
global max_docs
|
||||||
max_docs = sys.maxsize
|
max_docs = sys.maxsize
|
||||||
|
|
||||||
|
|
||||||
class Benchmark:
|
class Benchmark:
|
||||||
def __init__(self, kb_id):
|
def __init__(self, kb_id):
|
||||||
self.kb_id = kb_id
|
self.kb_id = kb_id
|
||||||
|
@ -312,7 +312,7 @@ class InfinityConnection(DocStoreConnection):
|
|||||||
for k, v in d.items():
|
for k, v in d.items():
|
||||||
if k.endswith("_kwd") and isinstance(v, list):
|
if k.endswith("_kwd") and isinstance(v, list):
|
||||||
d[k] = " ".join(v)
|
d[k] = " ".join(v)
|
||||||
ids = [f"'{d["id"]}'" for d in documents]
|
ids = [f"{d['id']}" for d in documents]
|
||||||
str_ids = ", ".join(ids)
|
str_ids = ", ".join(ids)
|
||||||
str_filter = f"id IN ({str_ids})"
|
str_filter = f"id IN ({str_ids})"
|
||||||
table_instance.delete(str_filter)
|
table_instance.delete(str_filter)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user