mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-14 13:06:00 +08:00
add interpreter to graph (#1347)
### What problem does this PR solve? #918 ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
f6159ee4d3
commit
7c9ea5cad9
@ -19,6 +19,7 @@ from functools import partial
|
|||||||
from flask import request, Response
|
from flask import request, Response
|
||||||
from flask_login import login_required, current_user
|
from flask_login import login_required, current_user
|
||||||
|
|
||||||
|
from api.db.db_models import UserCanvas
|
||||||
from api.db.services.canvas_service import CanvasTemplateService, UserCanvasService
|
from api.db.services.canvas_service import CanvasTemplateService, UserCanvasService
|
||||||
from api.utils import get_uuid
|
from api.utils import get_uuid
|
||||||
from api.utils.api_utils import get_json_result, server_error_response, validate_request
|
from api.utils.api_utils import get_json_result, server_error_response, validate_request
|
||||||
@ -34,8 +35,9 @@ def templates():
|
|||||||
@manager.route('/list', methods=['GET'])
|
@manager.route('/list', methods=['GET'])
|
||||||
@login_required
|
@login_required
|
||||||
def canvas_list():
|
def canvas_list():
|
||||||
|
return get_json_result(data=sorted([c.to_dict() for c in \
|
||||||
return get_json_result(data=[c.to_dict() for c in UserCanvasService.query(user_id=current_user.id)])
|
UserCanvasService.query(user_id=current_user.id)], key=lambda x: x["update_time"])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/rm', methods=['POST'])
|
@manager.route('/rm', methods=['POST'])
|
||||||
@ -53,7 +55,7 @@ def rm():
|
|||||||
def save():
|
def save():
|
||||||
req = request.json
|
req = request.json
|
||||||
req["user_id"] = current_user.id
|
req["user_id"] = current_user.id
|
||||||
if not isinstance(req["dsl"], str):req["dsl"] = json.dumps(req["dsl"], ensure_ascii=False)
|
if not isinstance(req["dsl"], str): req["dsl"] = json.dumps(req["dsl"], ensure_ascii=False)
|
||||||
|
|
||||||
req["dsl"] = json.loads(req["dsl"])
|
req["dsl"] = json.loads(req["dsl"])
|
||||||
if "id" not in req:
|
if "id" not in req:
|
||||||
@ -111,7 +113,7 @@ def run():
|
|||||||
for k in ans.keys():
|
for k in ans.keys():
|
||||||
final_ans[k] = ans[k]
|
final_ans[k] = ans[k]
|
||||||
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
ans = {"answer": ans["content"], "reference": ans.get("reference", [])}
|
||||||
yield "data:" + json.dumps({"retcode": 0, "retmsg": "", "data": ans}, ensure_ascii=False) +"\n\n"
|
yield "data:" + json.dumps({"retcode": 0, "retmsg": "", "data": ans}, ensure_ascii=False) + "\n\n"
|
||||||
|
|
||||||
canvas.messages.append({"role": "assistant", "content": final_ans["content"]})
|
canvas.messages.append({"role": "assistant", "content": final_ans["content"]})
|
||||||
if "reference" in final_ans:
|
if "reference" in final_ans:
|
||||||
@ -153,5 +155,3 @@ def reset():
|
|||||||
return get_json_result(data=req["dsl"])
|
return get_json_result(data=req["dsl"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
|
|
||||||
|
|
||||||
|
@ -91,4 +91,9 @@ class FileSource(StrEnum):
|
|||||||
KNOWLEDGEBASE = "knowledgebase"
|
KNOWLEDGEBASE = "knowledgebase"
|
||||||
S3 = "s3"
|
S3 = "s3"
|
||||||
|
|
||||||
|
|
||||||
|
class CanvasType(StrEnum):
|
||||||
|
ChatBot = "chatbot"
|
||||||
|
DocBot = "docbot"
|
||||||
|
|
||||||
KNOWLEDGEBASE_FOLDER_NAME=".knowledgebase"
|
KNOWLEDGEBASE_FOLDER_NAME=".knowledgebase"
|
@ -13,6 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
@ -21,11 +22,13 @@ from copy import deepcopy
|
|||||||
from api.db import LLMType, UserTenantRole
|
from api.db import LLMType, UserTenantRole
|
||||||
from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
|
from api.db.db_models import init_database_tables as init_web_db, LLMFactories, LLM, TenantLLM
|
||||||
from api.db.services import UserService
|
from api.db.services import UserService
|
||||||
|
from api.db.services.canvas_service import CanvasTemplateService
|
||||||
from api.db.services.document_service import DocumentService
|
from api.db.services.document_service import DocumentService
|
||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
|
from api.db.services.llm_service import LLMFactoriesService, LLMService, TenantLLMService, LLMBundle
|
||||||
from api.db.services.user_service import TenantService, UserTenantService
|
from api.db.services.user_service import TenantService, UserTenantService
|
||||||
from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
|
from api.settings import CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, LLM_FACTORY, API_KEY, LLM_BASE_URL
|
||||||
|
from api.utils.file_utils import get_project_base_directory
|
||||||
|
|
||||||
|
|
||||||
def init_superuser():
|
def init_superuser():
|
||||||
@ -694,6 +697,20 @@ def init_llm_factory():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def add_graph_templates():
|
||||||
|
dir = os.path.join(get_project_base_directory(), "graph", "templates")
|
||||||
|
for fnm in os.listdir(dir):
|
||||||
|
try:
|
||||||
|
cnvs = json.load(open(os.path.join(dir, fnm), "r"))
|
||||||
|
try:
|
||||||
|
CanvasTemplateService.save(**cnvs)
|
||||||
|
except:
|
||||||
|
CanvasTemplateService.update_by_id(cnvs["id"], cnvs)
|
||||||
|
except Exception as e:
|
||||||
|
print("Add graph templates error: ", e)
|
||||||
|
print("------------", flush=True)
|
||||||
|
|
||||||
|
|
||||||
def init_web_data():
|
def init_web_data():
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
@ -701,6 +718,7 @@ def init_web_data():
|
|||||||
if not UserService.get_all().count():
|
if not UserService.get_all().count():
|
||||||
init_superuser()
|
init_superuser()
|
||||||
|
|
||||||
|
add_graph_templates()
|
||||||
print("init web data success:{}".format(time.time() - start_time))
|
print("init web data success:{}".format(time.time() - start_time))
|
||||||
|
|
||||||
|
|
||||||
|
782
graph/templates/HR_callout_zh.json
Normal file
782
graph/templates/HR_callout_zh.json
Normal file
File diff suppressed because one or more lines are too long
9
graph/templates/customer_service.json
Normal file
9
graph/templates/customer_service.json
Normal file
File diff suppressed because one or more lines are too long
8
graph/templates/interpreter.json
Normal file
8
graph/templates/interpreter.json
Normal file
File diff suppressed because one or more lines are too long
@ -158,7 +158,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"downstream": ["answer:1"],
|
"downstream": ["answer:1"],
|
||||||
"upstream": ["relevant:0"]
|
"upstream": ["retrieval:0"]
|
||||||
},
|
},
|
||||||
"generate:get_wechat": {
|
"generate:get_wechat": {
|
||||||
"obj": {
|
"obj": {
|
||||||
|
39
graph/test/dsl_examples/intergreper.json
Normal file
39
graph/test/dsl_examples/intergreper.json
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"components": {
|
||||||
|
"begin": {
|
||||||
|
"obj":{
|
||||||
|
"component_name": "Begin",
|
||||||
|
"params": {
|
||||||
|
"prologue": "Hi there! Please enter the text you want to translate in format like: 'text you want to translate' => target language. For an example: 您好! => English"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"downstream": ["answer:0"],
|
||||||
|
"upstream": []
|
||||||
|
},
|
||||||
|
"answer:0": {
|
||||||
|
"obj": {
|
||||||
|
"component_name": "Answer",
|
||||||
|
"params": {}
|
||||||
|
},
|
||||||
|
"downstream": ["generate:0"],
|
||||||
|
"upstream": ["begin", "generate:0"]
|
||||||
|
},
|
||||||
|
"generate:0": {
|
||||||
|
"obj": {
|
||||||
|
"component_name": "Generate",
|
||||||
|
"params": {
|
||||||
|
"llm_id": "deepseek-chat",
|
||||||
|
"prompt": "You are an professional interpreter.\n- Role: an professional interpreter.\n- Input format: content need to be translated => target language. \n- Answer format: => translated content in target language. \n- Examples:\n - user: 您好! => English. assistant: => How are you doing!\n - user: You look good today. => Japanese. assistant: => 今日は調子がいいですね 。\n",
|
||||||
|
"temperature": 0.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"downstream": ["answer:0"],
|
||||||
|
"upstream": ["answer:0"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"history": [],
|
||||||
|
"messages": [],
|
||||||
|
"reference": {},
|
||||||
|
"path": [],
|
||||||
|
"answer": []
|
||||||
|
}
|
@ -587,5 +587,6 @@ You're a question analyzer.
|
|||||||
Answer format: (in language of user's question)
|
Answer format: (in language of user's question)
|
||||||
- keyword:
|
- keyword:
|
||||||
"""
|
"""
|
||||||
kwd, _ = chat_mdl.chat(prompt, [{"role": "user", "content": content}], {"temperature": 0.2})
|
kwd = chat_mdl.chat(prompt, [{"role": "user", "content": content}], {"temperature": 0.2})
|
||||||
|
if isinstance(kwd, tuple): return kwd[0]
|
||||||
return kwd
|
return kwd
|
||||||
|
Loading…
x
Reference in New Issue
Block a user