mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-06-04 11:24:00 +08:00
feat: docs for api endpoints to generate openapi specification (#3109)
### What problem does this PR solve? **Added openapi specification for API routes. This creates swagger UI similar to FastAPI to better use the API.** Using python package `flasgger` ### Type of change - [x] New Feature (non-breaking change which adds functionality) Not all routes are included since this is a work in progress. Docs can be accessed on: `{host}:{port}/apidocs`
This commit is contained in:
parent
07c453500b
commit
dd1146ec64
@ -21,6 +21,7 @@ from pathlib import Path
|
|||||||
from flask import Blueprint, Flask
|
from flask import Blueprint, Flask
|
||||||
from werkzeug.wrappers.request import Request
|
from werkzeug.wrappers.request import Request
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
|
from flasgger import Swagger
|
||||||
|
|
||||||
from api.db import StatusEnum
|
from api.db import StatusEnum
|
||||||
from api.db.db_models import close_connection
|
from api.db.db_models import close_connection
|
||||||
@ -34,27 +35,62 @@ from api.settings import API_VERSION, access_logger
|
|||||||
from api.utils.api_utils import server_error_response
|
from api.utils.api_utils import server_error_response
|
||||||
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
from itsdangerous.url_safe import URLSafeTimedSerializer as Serializer
|
||||||
|
|
||||||
__all__ = ['app']
|
__all__ = ["app"]
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('flask.app')
|
logger = logging.getLogger("flask.app")
|
||||||
for h in access_logger.handlers:
|
for h in access_logger.handlers:
|
||||||
logger.addHandler(h)
|
logger.addHandler(h)
|
||||||
|
|
||||||
Request.json = property(lambda self: self.get_json(force=True, silent=True))
|
Request.json = property(lambda self: self.get_json(force=True, silent=True))
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
CORS(app, supports_credentials=True,max_age=2592000)
|
|
||||||
|
# Add this at the beginning of your file to configure Swagger UI
|
||||||
|
swagger_config = {
|
||||||
|
"headers": [],
|
||||||
|
"specs": [
|
||||||
|
{
|
||||||
|
"endpoint": "apispec",
|
||||||
|
"route": "/apispec.json",
|
||||||
|
"rule_filter": lambda rule: True, # Include all endpoints
|
||||||
|
"model_filter": lambda tag: True, # Include all models
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"static_url_path": "/flasgger_static",
|
||||||
|
"swagger_ui": True,
|
||||||
|
"specs_route": "/apidocs/",
|
||||||
|
}
|
||||||
|
|
||||||
|
swagger = Swagger(
|
||||||
|
app,
|
||||||
|
config=swagger_config,
|
||||||
|
template={
|
||||||
|
"swagger": "2.0",
|
||||||
|
"info": {
|
||||||
|
"title": "RAGFlow API",
|
||||||
|
"description": "",
|
||||||
|
"version": "1.0.0",
|
||||||
|
},
|
||||||
|
"securityDefinitions": {
|
||||||
|
"ApiKeyAuth": {"type": "apiKey", "name": "Authorization", "in": "header"}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
CORS(app, supports_credentials=True, max_age=2592000)
|
||||||
app.url_map.strict_slashes = False
|
app.url_map.strict_slashes = False
|
||||||
app.json_encoder = CustomJSONEncoder
|
app.json_encoder = CustomJSONEncoder
|
||||||
app.errorhandler(Exception)(server_error_response)
|
app.errorhandler(Exception)(server_error_response)
|
||||||
|
|
||||||
|
|
||||||
## convince for dev and debug
|
## convince for dev and debug
|
||||||
#app.config["LOGIN_DISABLED"] = True
|
# app.config["LOGIN_DISABLED"] = True
|
||||||
app.config["SESSION_PERMANENT"] = False
|
app.config["SESSION_PERMANENT"] = False
|
||||||
app.config["SESSION_TYPE"] = "filesystem"
|
app.config["SESSION_TYPE"] = "filesystem"
|
||||||
app.config['MAX_CONTENT_LENGTH'] = int(os.environ.get("MAX_CONTENT_LENGTH", 128 * 1024 * 1024))
|
app.config["MAX_CONTENT_LENGTH"] = int(
|
||||||
|
os.environ.get("MAX_CONTENT_LENGTH", 128 * 1024 * 1024)
|
||||||
|
)
|
||||||
|
|
||||||
Session(app)
|
Session(app)
|
||||||
login_manager = LoginManager()
|
login_manager = LoginManager()
|
||||||
@ -64,17 +100,23 @@ commands.register_commands(app)
|
|||||||
|
|
||||||
|
|
||||||
def search_pages_path(pages_dir):
|
def search_pages_path(pages_dir):
|
||||||
app_path_list = [path for path in pages_dir.glob('*_app.py') if not path.name.startswith('.')]
|
app_path_list = [
|
||||||
api_path_list = [path for path in pages_dir.glob('*sdk/*.py') if not path.name.startswith('.')]
|
path for path in pages_dir.glob("*_app.py") if not path.name.startswith(".")
|
||||||
|
]
|
||||||
|
api_path_list = [
|
||||||
|
path for path in pages_dir.glob("*sdk/*.py") if not path.name.startswith(".")
|
||||||
|
]
|
||||||
app_path_list.extend(api_path_list)
|
app_path_list.extend(api_path_list)
|
||||||
return app_path_list
|
return app_path_list
|
||||||
|
|
||||||
|
|
||||||
def register_page(page_path):
|
def register_page(page_path):
|
||||||
path = f'{page_path}'
|
path = f"{page_path}"
|
||||||
|
|
||||||
page_name = page_path.stem.rstrip('_app')
|
page_name = page_path.stem.rstrip("_app")
|
||||||
module_name = '.'.join(page_path.parts[page_path.parts.index('api'):-1] + (page_name,))
|
module_name = ".".join(
|
||||||
|
page_path.parts[page_path.parts.index("api") : -1] + (page_name,)
|
||||||
|
)
|
||||||
|
|
||||||
spec = spec_from_file_location(module_name, page_path)
|
spec = spec_from_file_location(module_name, page_path)
|
||||||
page = module_from_spec(spec)
|
page = module_from_spec(spec)
|
||||||
@ -82,8 +124,10 @@ def register_page(page_path):
|
|||||||
page.manager = Blueprint(page_name, module_name)
|
page.manager = Blueprint(page_name, module_name)
|
||||||
sys.modules[module_name] = page
|
sys.modules[module_name] = page
|
||||||
spec.loader.exec_module(page)
|
spec.loader.exec_module(page)
|
||||||
page_name = getattr(page, 'page_name', page_name)
|
page_name = getattr(page, "page_name", page_name)
|
||||||
url_prefix = f'/api/{API_VERSION}' if "/sdk/" in path else f'/{API_VERSION}/{page_name}'
|
url_prefix = (
|
||||||
|
f"/api/{API_VERSION}" if "/sdk/" in path else f"/{API_VERSION}/{page_name}"
|
||||||
|
)
|
||||||
|
|
||||||
app.register_blueprint(page.manager, url_prefix=url_prefix)
|
app.register_blueprint(page.manager, url_prefix=url_prefix)
|
||||||
return url_prefix
|
return url_prefix
|
||||||
@ -91,14 +135,12 @@ def register_page(page_path):
|
|||||||
|
|
||||||
pages_dir = [
|
pages_dir = [
|
||||||
Path(__file__).parent,
|
Path(__file__).parent,
|
||||||
Path(__file__).parent.parent / 'api' / 'apps',
|
Path(__file__).parent.parent / "api" / "apps",
|
||||||
Path(__file__).parent.parent / 'api' / 'apps' / 'sdk',
|
Path(__file__).parent.parent / "api" / "apps" / "sdk",
|
||||||
]
|
]
|
||||||
|
|
||||||
client_urls_prefix = [
|
client_urls_prefix = [
|
||||||
register_page(path)
|
register_page(path) for dir in pages_dir for path in search_pages_path(dir)
|
||||||
for dir in pages_dir
|
|
||||||
for path in search_pages_path(dir)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -109,7 +151,9 @@ def load_user(web_request):
|
|||||||
if authorization:
|
if authorization:
|
||||||
try:
|
try:
|
||||||
access_token = str(jwt.loads(authorization))
|
access_token = str(jwt.loads(authorization))
|
||||||
user = UserService.query(access_token=access_token, status=StatusEnum.VALID.value)
|
user = UserService.query(
|
||||||
|
access_token=access_token, status=StatusEnum.VALID.value
|
||||||
|
)
|
||||||
if user:
|
if user:
|
||||||
return user[0]
|
return user[0]
|
||||||
else:
|
else:
|
||||||
@ -123,4 +167,4 @@ def load_user(web_request):
|
|||||||
|
|
||||||
@app.teardown_request
|
@app.teardown_request
|
||||||
def _db_close(exc):
|
def _db_close(exc):
|
||||||
close_connection()
|
close_connection()
|
||||||
|
@ -21,16 +21,72 @@ from api.db.services.document_service import DocumentService
|
|||||||
from api.db.services.file2document_service import File2DocumentService
|
from api.db.services.file2document_service import File2DocumentService
|
||||||
from api.db.services.file_service import FileService
|
from api.db.services.file_service import FileService
|
||||||
from api.db.services.knowledgebase_service import KnowledgebaseService
|
from api.db.services.knowledgebase_service import KnowledgebaseService
|
||||||
from api.db.services.llm_service import TenantLLMService,LLMService
|
from api.db.services.llm_service import TenantLLMService, LLMService
|
||||||
from api.db.services.user_service import TenantService
|
from api.db.services.user_service import TenantService
|
||||||
from api.settings import RetCode
|
from api.settings import RetCode
|
||||||
from api.utils import get_uuid
|
from api.utils import get_uuid
|
||||||
from api.utils.api_utils import get_result, token_required, get_error_data_result, valid,get_parser_config
|
from api.utils.api_utils import (
|
||||||
|
get_result,
|
||||||
|
token_required,
|
||||||
|
get_error_data_result,
|
||||||
|
valid,
|
||||||
|
get_parser_config,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/datasets', methods=['POST'])
|
@manager.route("/datasets", methods=["POST"])
|
||||||
@token_required
|
@token_required
|
||||||
def create(tenant_id):
|
def create(tenant_id):
|
||||||
|
"""
|
||||||
|
Create a new dataset.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- Datasets
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Bearer token for authentication.
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: Dataset creation parameters.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- name
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: Name of the dataset.
|
||||||
|
permission:
|
||||||
|
type: string
|
||||||
|
enum: ['me', 'team']
|
||||||
|
description: Dataset permission.
|
||||||
|
language:
|
||||||
|
type: string
|
||||||
|
enum: ['Chinese', 'English']
|
||||||
|
description: Language of the dataset.
|
||||||
|
chunk_method:
|
||||||
|
type: string
|
||||||
|
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
||||||
|
"presentation", "picture", "one", "knowledge_graph", "email"]
|
||||||
|
description: Chunking method.
|
||||||
|
parser_config:
|
||||||
|
type: object
|
||||||
|
description: Parser configuration.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Successful operation.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
req = request.json
|
req = request.json
|
||||||
e, t = TenantService.get_by_id(tenant_id)
|
e, t = TenantService.get_by_id(tenant_id)
|
||||||
permission = req.get("permission")
|
permission = req.get("permission")
|
||||||
@ -38,49 +94,97 @@ def create(tenant_id):
|
|||||||
chunk_method = req.get("chunk_method")
|
chunk_method = req.get("chunk_method")
|
||||||
parser_config = req.get("parser_config")
|
parser_config = req.get("parser_config")
|
||||||
valid_permission = ["me", "team"]
|
valid_permission = ["me", "team"]
|
||||||
valid_language =["Chinese", "English"]
|
valid_language = ["Chinese", "English"]
|
||||||
valid_chunk_method = ["naive","manual","qa","table","paper","book","laws","presentation","picture","one","knowledge_graph","email"]
|
valid_chunk_method = [
|
||||||
check_validation=valid(permission,valid_permission,language,valid_language,chunk_method,valid_chunk_method)
|
"naive",
|
||||||
|
"manual",
|
||||||
|
"qa",
|
||||||
|
"table",
|
||||||
|
"paper",
|
||||||
|
"book",
|
||||||
|
"laws",
|
||||||
|
"presentation",
|
||||||
|
"picture",
|
||||||
|
"one",
|
||||||
|
"knowledge_graph",
|
||||||
|
"email",
|
||||||
|
]
|
||||||
|
check_validation = valid(
|
||||||
|
permission,
|
||||||
|
valid_permission,
|
||||||
|
language,
|
||||||
|
valid_language,
|
||||||
|
chunk_method,
|
||||||
|
valid_chunk_method,
|
||||||
|
)
|
||||||
if check_validation:
|
if check_validation:
|
||||||
return check_validation
|
return check_validation
|
||||||
req["parser_config"]=get_parser_config(chunk_method,parser_config)
|
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
||||||
if "tenant_id" in req:
|
if "tenant_id" in req:
|
||||||
return get_error_data_result(
|
return get_error_data_result(retmsg="`tenant_id` must not be provided")
|
||||||
retmsg="`tenant_id` must not be provided")
|
|
||||||
if "chunk_count" in req or "document_count" in req:
|
if "chunk_count" in req or "document_count" in req:
|
||||||
return get_error_data_result(retmsg="`chunk_count` or `document_count` must not be provided")
|
|
||||||
if "name" not in req:
|
|
||||||
return get_error_data_result(
|
return get_error_data_result(
|
||||||
retmsg="`name` is not empty!")
|
retmsg="`chunk_count` or `document_count` must not be provided"
|
||||||
req['id'] = get_uuid()
|
)
|
||||||
|
if "name" not in req:
|
||||||
|
return get_error_data_result(retmsg="`name` is not empty!")
|
||||||
|
req["id"] = get_uuid()
|
||||||
req["name"] = req["name"].strip()
|
req["name"] = req["name"].strip()
|
||||||
if req["name"] == "":
|
if req["name"] == "":
|
||||||
|
return get_error_data_result(retmsg="`name` is not empty string!")
|
||||||
|
if KnowledgebaseService.query(
|
||||||
|
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
||||||
|
):
|
||||||
return get_error_data_result(
|
return get_error_data_result(
|
||||||
retmsg="`name` is not empty string!")
|
retmsg="Duplicated dataset name in creating dataset."
|
||||||
if KnowledgebaseService.query(name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value):
|
)
|
||||||
return get_error_data_result(
|
req["tenant_id"] = req["created_by"] = tenant_id
|
||||||
retmsg="Duplicated dataset name in creating dataset.")
|
|
||||||
req["tenant_id"] = req['created_by'] = tenant_id
|
|
||||||
if not req.get("embedding_model"):
|
if not req.get("embedding_model"):
|
||||||
req['embedding_model'] = t.embd_id
|
req["embedding_model"] = t.embd_id
|
||||||
else:
|
else:
|
||||||
valid_embedding_models=["BAAI/bge-large-zh-v1.5","BAAI/bge-base-en-v1.5","BAAI/bge-large-en-v1.5","BAAI/bge-small-en-v1.5",
|
valid_embedding_models = [
|
||||||
"BAAI/bge-small-zh-v1.5","jinaai/jina-embeddings-v2-base-en","jinaai/jina-embeddings-v2-small-en",
|
"BAAI/bge-large-zh-v1.5",
|
||||||
"nomic-ai/nomic-embed-text-v1.5","sentence-transformers/all-MiniLM-L6-v2","text-embedding-v2",
|
"BAAI/bge-base-en-v1.5",
|
||||||
"text-embedding-v3","maidalun1020/bce-embedding-base_v1"]
|
"BAAI/bge-large-en-v1.5",
|
||||||
embd_model=LLMService.query(llm_name=req["embedding_model"],model_type="embedding")
|
"BAAI/bge-small-en-v1.5",
|
||||||
|
"BAAI/bge-small-zh-v1.5",
|
||||||
|
"jinaai/jina-embeddings-v2-base-en",
|
||||||
|
"jinaai/jina-embeddings-v2-small-en",
|
||||||
|
"nomic-ai/nomic-embed-text-v1.5",
|
||||||
|
"sentence-transformers/all-MiniLM-L6-v2",
|
||||||
|
"text-embedding-v2",
|
||||||
|
"text-embedding-v3",
|
||||||
|
"maidalun1020/bce-embedding-base_v1",
|
||||||
|
]
|
||||||
|
embd_model = LLMService.query(
|
||||||
|
llm_name=req["embedding_model"], model_type="embedding"
|
||||||
|
)
|
||||||
if not embd_model:
|
if not embd_model:
|
||||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
return get_error_data_result(
|
||||||
|
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
||||||
|
)
|
||||||
if embd_model:
|
if embd_model:
|
||||||
if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model")):
|
if req[
|
||||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
"embedding_model"
|
||||||
|
] not in valid_embedding_models and not TenantLLMService.query(
|
||||||
|
tenant_id=tenant_id,
|
||||||
|
model_type="embedding",
|
||||||
|
llm_name=req.get("embedding_model"),
|
||||||
|
):
|
||||||
|
return get_error_data_result(
|
||||||
|
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
||||||
|
)
|
||||||
key_mapping = {
|
key_mapping = {
|
||||||
"chunk_num": "chunk_count",
|
"chunk_num": "chunk_count",
|
||||||
"doc_num": "document_count",
|
"doc_num": "document_count",
|
||||||
"parser_id": "chunk_method",
|
"parser_id": "chunk_method",
|
||||||
"embd_id": "embedding_model"
|
"embd_id": "embedding_model",
|
||||||
|
}
|
||||||
|
mapped_keys = {
|
||||||
|
new_key: req[old_key]
|
||||||
|
for new_key, old_key in key_mapping.items()
|
||||||
|
if old_key in req
|
||||||
}
|
}
|
||||||
mapped_keys = {new_key: req[old_key] for new_key, old_key in key_mapping.items() if old_key in req}
|
|
||||||
req.update(mapped_keys)
|
req.update(mapped_keys)
|
||||||
if not KnowledgebaseService.save(**req):
|
if not KnowledgebaseService.save(**req):
|
||||||
return get_error_data_result(retmsg="Create dataset error.(Database error)")
|
return get_error_data_result(retmsg="Create dataset error.(Database error)")
|
||||||
@ -91,21 +195,53 @@ def create(tenant_id):
|
|||||||
renamed_data[new_key] = value
|
renamed_data[new_key] = value
|
||||||
return get_result(data=renamed_data)
|
return get_result(data=renamed_data)
|
||||||
|
|
||||||
@manager.route('/datasets', methods=['DELETE'])
|
|
||||||
|
@manager.route("/datasets", methods=["DELETE"])
|
||||||
@token_required
|
@token_required
|
||||||
def delete(tenant_id):
|
def delete(tenant_id):
|
||||||
|
"""
|
||||||
|
Delete datasets.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- Datasets
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Bearer token for authentication.
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: Dataset deletion parameters.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
ids:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
description: List of dataset IDs to delete.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Successful operation.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
req = request.json
|
req = request.json
|
||||||
if not req:
|
if not req:
|
||||||
ids=None
|
ids = None
|
||||||
else:
|
else:
|
||||||
ids=req.get("ids")
|
ids = req.get("ids")
|
||||||
if not ids:
|
if not ids:
|
||||||
id_list = []
|
id_list = []
|
||||||
kbs=KnowledgebaseService.query(tenant_id=tenant_id)
|
kbs = KnowledgebaseService.query(tenant_id=tenant_id)
|
||||||
for kb in kbs:
|
for kb in kbs:
|
||||||
id_list.append(kb.id)
|
id_list.append(kb.id)
|
||||||
else:
|
else:
|
||||||
id_list=ids
|
id_list = ids
|
||||||
for id in id_list:
|
for id in id_list:
|
||||||
kbs = KnowledgebaseService.query(id=id, tenant_id=tenant_id)
|
kbs = KnowledgebaseService.query(id=id, tenant_id=tenant_id)
|
||||||
if not kbs:
|
if not kbs:
|
||||||
@ -113,19 +249,75 @@ def delete(tenant_id):
|
|||||||
for doc in DocumentService.query(kb_id=id):
|
for doc in DocumentService.query(kb_id=id):
|
||||||
if not DocumentService.remove_document(doc, tenant_id):
|
if not DocumentService.remove_document(doc, tenant_id):
|
||||||
return get_error_data_result(
|
return get_error_data_result(
|
||||||
retmsg="Remove document error.(Database error)")
|
retmsg="Remove document error.(Database error)"
|
||||||
|
)
|
||||||
f2d = File2DocumentService.get_by_document_id(doc.id)
|
f2d = File2DocumentService.get_by_document_id(doc.id)
|
||||||
FileService.filter_delete([File.source_type == FileSource.KNOWLEDGEBASE, File.id == f2d[0].file_id])
|
FileService.filter_delete(
|
||||||
|
[
|
||||||
|
File.source_type == FileSource.KNOWLEDGEBASE,
|
||||||
|
File.id == f2d[0].file_id,
|
||||||
|
]
|
||||||
|
)
|
||||||
File2DocumentService.delete_by_document_id(doc.id)
|
File2DocumentService.delete_by_document_id(doc.id)
|
||||||
if not KnowledgebaseService.delete_by_id(id):
|
if not KnowledgebaseService.delete_by_id(id):
|
||||||
return get_error_data_result(
|
return get_error_data_result(retmsg="Delete dataset error.(Database error)")
|
||||||
retmsg="Delete dataset error.(Database error)")
|
|
||||||
return get_result(retcode=RetCode.SUCCESS)
|
return get_result(retcode=RetCode.SUCCESS)
|
||||||
|
|
||||||
@manager.route('/datasets/<dataset_id>', methods=['PUT'])
|
|
||||||
|
@manager.route("/datasets/<dataset_id>", methods=["PUT"])
|
||||||
@token_required
|
@token_required
|
||||||
def update(tenant_id,dataset_id):
|
def update(tenant_id, dataset_id):
|
||||||
if not KnowledgebaseService.query(id=dataset_id,tenant_id=tenant_id):
|
"""
|
||||||
|
Update a dataset.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- Datasets
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: path
|
||||||
|
name: dataset_id
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: ID of the dataset to update.
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Bearer token for authentication.
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: Dataset update parameters.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: New name of the dataset.
|
||||||
|
permission:
|
||||||
|
type: string
|
||||||
|
enum: ['me', 'team']
|
||||||
|
description: Updated permission.
|
||||||
|
language:
|
||||||
|
type: string
|
||||||
|
enum: ['Chinese', 'English']
|
||||||
|
description: Updated language.
|
||||||
|
chunk_method:
|
||||||
|
type: string
|
||||||
|
enum: ["naive", "manual", "qa", "table", "paper", "book", "laws",
|
||||||
|
"presentation", "picture", "one", "knowledge_graph", "email"]
|
||||||
|
description: Updated chunking method.
|
||||||
|
parser_config:
|
||||||
|
type: object
|
||||||
|
description: Updated parser configuration.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Successful operation.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
|
if not KnowledgebaseService.query(id=dataset_id, tenant_id=tenant_id):
|
||||||
return get_error_data_result(retmsg="You don't own the dataset")
|
return get_error_data_result(retmsg="You don't own the dataset")
|
||||||
req = request.json
|
req = request.json
|
||||||
e, t = TenantService.get_by_id(tenant_id)
|
e, t = TenantService.get_by_id(tenant_id)
|
||||||
@ -138,91 +330,202 @@ def update(tenant_id,dataset_id):
|
|||||||
parser_config = req.get("parser_config")
|
parser_config = req.get("parser_config")
|
||||||
valid_permission = ["me", "team"]
|
valid_permission = ["me", "team"]
|
||||||
valid_language = ["Chinese", "English"]
|
valid_language = ["Chinese", "English"]
|
||||||
valid_chunk_method = ["naive", "manual", "qa", "table", "paper", "book", "laws", "presentation", "picture", "one",
|
valid_chunk_method = [
|
||||||
"knowledge_graph", "email"]
|
"naive",
|
||||||
check_validation = valid(permission, valid_permission, language, valid_language, chunk_method, valid_chunk_method)
|
"manual",
|
||||||
|
"qa",
|
||||||
|
"table",
|
||||||
|
"paper",
|
||||||
|
"book",
|
||||||
|
"laws",
|
||||||
|
"presentation",
|
||||||
|
"picture",
|
||||||
|
"one",
|
||||||
|
"knowledge_graph",
|
||||||
|
"email",
|
||||||
|
]
|
||||||
|
check_validation = valid(
|
||||||
|
permission,
|
||||||
|
valid_permission,
|
||||||
|
language,
|
||||||
|
valid_language,
|
||||||
|
chunk_method,
|
||||||
|
valid_chunk_method,
|
||||||
|
)
|
||||||
if check_validation:
|
if check_validation:
|
||||||
return check_validation
|
return check_validation
|
||||||
if "tenant_id" in req:
|
if "tenant_id" in req:
|
||||||
if req["tenant_id"] != tenant_id:
|
if req["tenant_id"] != tenant_id:
|
||||||
return get_error_data_result(
|
return get_error_data_result(retmsg="Can't change `tenant_id`.")
|
||||||
retmsg="Can't change `tenant_id`.")
|
|
||||||
e, kb = KnowledgebaseService.get_by_id(dataset_id)
|
e, kb = KnowledgebaseService.get_by_id(dataset_id)
|
||||||
if "parser_config" in req:
|
if "parser_config" in req:
|
||||||
temp_dict=kb.parser_config
|
temp_dict = kb.parser_config
|
||||||
temp_dict.update(req["parser_config"])
|
temp_dict.update(req["parser_config"])
|
||||||
req["parser_config"] = temp_dict
|
req["parser_config"] = temp_dict
|
||||||
if "chunk_count" in req:
|
if "chunk_count" in req:
|
||||||
if req["chunk_count"] != kb.chunk_num:
|
if req["chunk_count"] != kb.chunk_num:
|
||||||
return get_error_data_result(
|
return get_error_data_result(retmsg="Can't change `chunk_count`.")
|
||||||
retmsg="Can't change `chunk_count`.")
|
|
||||||
req.pop("chunk_count")
|
req.pop("chunk_count")
|
||||||
if "document_count" in req:
|
if "document_count" in req:
|
||||||
if req['document_count'] != kb.doc_num:
|
if req["document_count"] != kb.doc_num:
|
||||||
return get_error_data_result(
|
return get_error_data_result(retmsg="Can't change `document_count`.")
|
||||||
retmsg="Can't change `document_count`.")
|
|
||||||
req.pop("document_count")
|
req.pop("document_count")
|
||||||
if "chunk_method" in req:
|
if "chunk_method" in req:
|
||||||
if kb.chunk_num != 0 and req['chunk_method'] != kb.parser_id:
|
if kb.chunk_num != 0 and req["chunk_method"] != kb.parser_id:
|
||||||
return get_error_data_result(
|
return get_error_data_result(
|
||||||
retmsg="If `chunk_count` is not 0, `chunk_method` is not changeable.")
|
retmsg="If `chunk_count` is not 0, `chunk_method` is not changeable."
|
||||||
req['parser_id'] = req.pop('chunk_method')
|
)
|
||||||
if req['parser_id'] != kb.parser_id:
|
req["parser_id"] = req.pop("chunk_method")
|
||||||
|
if req["parser_id"] != kb.parser_id:
|
||||||
if not req.get("parser_config"):
|
if not req.get("parser_config"):
|
||||||
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
req["parser_config"] = get_parser_config(chunk_method, parser_config)
|
||||||
if "embedding_model" in req:
|
if "embedding_model" in req:
|
||||||
if kb.chunk_num != 0 and req['embedding_model'] != kb.embd_id:
|
if kb.chunk_num != 0 and req["embedding_model"] != kb.embd_id:
|
||||||
return get_error_data_result(
|
return get_error_data_result(
|
||||||
retmsg="If `chunk_count` is not 0, `embedding_model` is not changeable.")
|
retmsg="If `chunk_count` is not 0, `embedding_model` is not changeable."
|
||||||
|
)
|
||||||
if not req.get("embedding_model"):
|
if not req.get("embedding_model"):
|
||||||
return get_error_data_result("`embedding_model` can't be empty")
|
return get_error_data_result("`embedding_model` can't be empty")
|
||||||
valid_embedding_models=["BAAI/bge-large-zh-v1.5","BAAI/bge-base-en-v1.5","BAAI/bge-large-en-v1.5","BAAI/bge-small-en-v1.5",
|
valid_embedding_models = [
|
||||||
"BAAI/bge-small-zh-v1.5","jinaai/jina-embeddings-v2-base-en","jinaai/jina-embeddings-v2-small-en",
|
"BAAI/bge-large-zh-v1.5",
|
||||||
"nomic-ai/nomic-embed-text-v1.5","sentence-transformers/all-MiniLM-L6-v2","text-embedding-v2",
|
"BAAI/bge-base-en-v1.5",
|
||||||
"text-embedding-v3","maidalun1020/bce-embedding-base_v1"]
|
"BAAI/bge-large-en-v1.5",
|
||||||
embd_model=LLMService.query(llm_name=req["embedding_model"],model_type="embedding")
|
"BAAI/bge-small-en-v1.5",
|
||||||
|
"BAAI/bge-small-zh-v1.5",
|
||||||
|
"jinaai/jina-embeddings-v2-base-en",
|
||||||
|
"jinaai/jina-embeddings-v2-small-en",
|
||||||
|
"nomic-ai/nomic-embed-text-v1.5",
|
||||||
|
"sentence-transformers/all-MiniLM-L6-v2",
|
||||||
|
"text-embedding-v2",
|
||||||
|
"text-embedding-v3",
|
||||||
|
"maidalun1020/bce-embedding-base_v1",
|
||||||
|
]
|
||||||
|
embd_model = LLMService.query(
|
||||||
|
llm_name=req["embedding_model"], model_type="embedding"
|
||||||
|
)
|
||||||
if not embd_model:
|
if not embd_model:
|
||||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
return get_error_data_result(
|
||||||
|
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
||||||
|
)
|
||||||
if embd_model:
|
if embd_model:
|
||||||
if req["embedding_model"] not in valid_embedding_models and not TenantLLMService.query(tenant_id=tenant_id,model_type="embedding", llm_name=req.get("embedding_model")):
|
if req[
|
||||||
return get_error_data_result(f"`embedding_model` {req.get('embedding_model')} doesn't exist")
|
"embedding_model"
|
||||||
req['embd_id'] = req.pop('embedding_model')
|
] not in valid_embedding_models and not TenantLLMService.query(
|
||||||
|
tenant_id=tenant_id,
|
||||||
|
model_type="embedding",
|
||||||
|
llm_name=req.get("embedding_model"),
|
||||||
|
):
|
||||||
|
return get_error_data_result(
|
||||||
|
f"`embedding_model` {req.get('embedding_model')} doesn't exist"
|
||||||
|
)
|
||||||
|
req["embd_id"] = req.pop("embedding_model")
|
||||||
if "name" in req:
|
if "name" in req:
|
||||||
req["name"] = req["name"].strip()
|
req["name"] = req["name"].strip()
|
||||||
if req["name"].lower() != kb.name.lower() \
|
if (
|
||||||
and len(KnowledgebaseService.query(name=req["name"], tenant_id=tenant_id,
|
req["name"].lower() != kb.name.lower()
|
||||||
status=StatusEnum.VALID.value)) > 0:
|
and len(
|
||||||
|
KnowledgebaseService.query(
|
||||||
|
name=req["name"], tenant_id=tenant_id, status=StatusEnum.VALID.value
|
||||||
|
)
|
||||||
|
)
|
||||||
|
> 0
|
||||||
|
):
|
||||||
return get_error_data_result(
|
return get_error_data_result(
|
||||||
retmsg="Duplicated dataset name in updating dataset.")
|
retmsg="Duplicated dataset name in updating dataset."
|
||||||
|
)
|
||||||
if not KnowledgebaseService.update_by_id(kb.id, req):
|
if not KnowledgebaseService.update_by_id(kb.id, req):
|
||||||
return get_error_data_result(retmsg="Update dataset error.(Database error)")
|
return get_error_data_result(retmsg="Update dataset error.(Database error)")
|
||||||
return get_result(retcode=RetCode.SUCCESS)
|
return get_result(retcode=RetCode.SUCCESS)
|
||||||
|
|
||||||
@manager.route('/datasets', methods=['GET'])
|
|
||||||
|
@manager.route("/datasets", methods=["GET"])
|
||||||
@token_required
|
@token_required
|
||||||
def list(tenant_id):
|
def list(tenant_id):
|
||||||
|
"""
|
||||||
|
List datasets.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- Datasets
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: id
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
description: Dataset ID to filter.
|
||||||
|
- in: query
|
||||||
|
name: name
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
description: Dataset name to filter.
|
||||||
|
- in: query
|
||||||
|
name: page
|
||||||
|
type: integer
|
||||||
|
required: false
|
||||||
|
default: 1
|
||||||
|
description: Page number.
|
||||||
|
- in: query
|
||||||
|
name: page_size
|
||||||
|
type: integer
|
||||||
|
required: false
|
||||||
|
default: 1024
|
||||||
|
description: Number of items per page.
|
||||||
|
- in: query
|
||||||
|
name: orderby
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
default: "create_time"
|
||||||
|
description: Field to order by.
|
||||||
|
- in: query
|
||||||
|
name: desc
|
||||||
|
type: boolean
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
description: Order in descending.
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Bearer token for authentication.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Successful operation.
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
id = request.args.get("id")
|
id = request.args.get("id")
|
||||||
name = request.args.get("name")
|
name = request.args.get("name")
|
||||||
kbs = KnowledgebaseService.query(id=id,name=name,status=1)
|
kbs = KnowledgebaseService.query(id=id, name=name, status=1)
|
||||||
if not kbs:
|
if not kbs:
|
||||||
return get_error_data_result(retmsg="The dataset doesn't exist")
|
return get_error_data_result(retmsg="The dataset doesn't exist")
|
||||||
page_number = int(request.args.get("page", 1))
|
page_number = int(request.args.get("page", 1))
|
||||||
items_per_page = int(request.args.get("page_size", 1024))
|
items_per_page = int(request.args.get("page_size", 1024))
|
||||||
orderby = request.args.get("orderby", "create_time")
|
orderby = request.args.get("orderby", "create_time")
|
||||||
if request.args.get("desc") == "False" or request.args.get("desc") == "false" :
|
if request.args.get("desc") == "False" or request.args.get("desc") == "false":
|
||||||
desc = False
|
desc = False
|
||||||
else:
|
else:
|
||||||
desc = True
|
desc = True
|
||||||
tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
|
tenants = TenantService.get_joined_tenants_by_user_id(tenant_id)
|
||||||
kbs = KnowledgebaseService.get_list(
|
kbs = KnowledgebaseService.get_list(
|
||||||
[m["tenant_id"] for m in tenants], tenant_id, page_number, items_per_page, orderby, desc, id, name)
|
[m["tenant_id"] for m in tenants],
|
||||||
|
tenant_id,
|
||||||
|
page_number,
|
||||||
|
items_per_page,
|
||||||
|
orderby,
|
||||||
|
desc,
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
)
|
||||||
renamed_list = []
|
renamed_list = []
|
||||||
for kb in kbs:
|
for kb in kbs:
|
||||||
key_mapping = {
|
key_mapping = {
|
||||||
"chunk_num": "chunk_count",
|
"chunk_num": "chunk_count",
|
||||||
"doc_num": "document_count",
|
"doc_num": "document_count",
|
||||||
"parser_id": "chunk_method",
|
"parser_id": "chunk_method",
|
||||||
"embd_id": "embedding_model"
|
"embd_id": "embedding_model",
|
||||||
}
|
}
|
||||||
renamed_data = {}
|
renamed_data = {}
|
||||||
for key, value in kb.items():
|
for key, value in kb.items():
|
||||||
|
1042
api/apps/sdk/doc.py
1042
api/apps/sdk/doc.py
File diff suppressed because it is too large
Load Diff
@ -24,8 +24,14 @@ from api.db.services.knowledgebase_service import KnowledgebaseService
|
|||||||
from api.db.services.user_service import UserTenantService
|
from api.db.services.user_service import UserTenantService
|
||||||
from api.settings import DATABASE_TYPE
|
from api.settings import DATABASE_TYPE
|
||||||
from api.utils import current_timestamp, datetime_format
|
from api.utils import current_timestamp, datetime_format
|
||||||
from api.utils.api_utils import get_json_result, get_data_error_result, server_error_response, \
|
from api.utils.api_utils import (
|
||||||
generate_confirmation_token, request, validate_request
|
get_json_result,
|
||||||
|
get_data_error_result,
|
||||||
|
server_error_response,
|
||||||
|
generate_confirmation_token,
|
||||||
|
request,
|
||||||
|
validate_request,
|
||||||
|
)
|
||||||
from api.versions import get_rag_version
|
from api.versions import get_rag_version
|
||||||
from rag.utils.es_conn import ELASTICSEARCH
|
from rag.utils.es_conn import ELASTICSEARCH
|
||||||
from rag.utils.storage_factory import STORAGE_IMPL, STORAGE_IMPL_TYPE
|
from rag.utils.storage_factory import STORAGE_IMPL, STORAGE_IMPL_TYPE
|
||||||
@ -34,44 +40,121 @@ from timeit import default_timer as timer
|
|||||||
from rag.utils.redis_conn import REDIS_CONN
|
from rag.utils.redis_conn import REDIS_CONN
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/version', methods=['GET'])
|
@manager.route("/version", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def version():
|
def version():
|
||||||
|
"""
|
||||||
|
Get the current version of the application.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- System
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Version retrieved successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
description: Version number.
|
||||||
|
"""
|
||||||
return get_json_result(data=get_rag_version())
|
return get_json_result(data=get_rag_version())
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/status', methods=['GET'])
|
@manager.route("/status", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def status():
|
def status():
|
||||||
|
"""
|
||||||
|
Get the system status.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- System
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: System is operational.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
es:
|
||||||
|
type: object
|
||||||
|
description: Elasticsearch status.
|
||||||
|
storage:
|
||||||
|
type: object
|
||||||
|
description: Storage status.
|
||||||
|
database:
|
||||||
|
type: object
|
||||||
|
description: Database status.
|
||||||
|
503:
|
||||||
|
description: Service unavailable.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
error:
|
||||||
|
type: string
|
||||||
|
description: Error message.
|
||||||
|
"""
|
||||||
res = {}
|
res = {}
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
res["es"] = ELASTICSEARCH.health()
|
res["es"] = ELASTICSEARCH.health()
|
||||||
res["es"]["elapsed"] = "{:.1f}".format((timer() - st)*1000.)
|
res["es"]["elapsed"] = "{:.1f}".format((timer() - st) * 1000.0)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["es"] = {"status": "red", "elapsed": "{:.1f}".format((timer() - st)*1000.), "error": str(e)}
|
res["es"] = {
|
||||||
|
"status": "red",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
STORAGE_IMPL.health()
|
STORAGE_IMPL.health()
|
||||||
res["storage"] = {"storage": STORAGE_IMPL_TYPE.lower(), "status": "green", "elapsed": "{:.1f}".format((timer() - st)*1000.)}
|
res["storage"] = {
|
||||||
|
"storage": STORAGE_IMPL_TYPE.lower(),
|
||||||
|
"status": "green",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["storage"] = {"storage": STORAGE_IMPL_TYPE.lower(), "status": "red", "elapsed": "{:.1f}".format((timer() - st)*1000.), "error": str(e)}
|
res["storage"] = {
|
||||||
|
"storage": STORAGE_IMPL_TYPE.lower(),
|
||||||
|
"status": "red",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
KnowledgebaseService.get_by_id("x")
|
KnowledgebaseService.get_by_id("x")
|
||||||
res["database"] = {"database": DATABASE_TYPE.lower(), "status": "green", "elapsed": "{:.1f}".format((timer() - st)*1000.)}
|
res["database"] = {
|
||||||
|
"database": DATABASE_TYPE.lower(),
|
||||||
|
"status": "green",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["database"] = {"database": DATABASE_TYPE.lower(), "status": "red", "elapsed": "{:.1f}".format((timer() - st)*1000.), "error": str(e)}
|
res["database"] = {
|
||||||
|
"database": DATABASE_TYPE.lower(),
|
||||||
|
"status": "red",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
st = timer()
|
st = timer()
|
||||||
try:
|
try:
|
||||||
if not REDIS_CONN.health():
|
if not REDIS_CONN.health():
|
||||||
raise Exception("Lost connection!")
|
raise Exception("Lost connection!")
|
||||||
res["redis"] = {"status": "green", "elapsed": "{:.1f}".format((timer() - st)*1000.)}
|
res["redis"] = {
|
||||||
|
"status": "green",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["redis"] = {"status": "red", "elapsed": "{:.1f}".format((timer() - st)*1000.), "error": str(e)}
|
res["redis"] = {
|
||||||
|
"status": "red",
|
||||||
|
"elapsed": "{:.1f}".format((timer() - st) * 1000.0),
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
v = REDIS_CONN.get("TASKEXE")
|
v = REDIS_CONN.get("TASKEXE")
|
||||||
@ -84,10 +167,12 @@ def status():
|
|||||||
if len(arr) == 1:
|
if len(arr) == 1:
|
||||||
obj[id] = [0]
|
obj[id] = [0]
|
||||||
else:
|
else:
|
||||||
obj[id] = [arr[i+1]-arr[i] for i in range(len(arr)-1)]
|
obj[id] = [arr[i + 1] - arr[i] for i in range(len(arr) - 1)]
|
||||||
elapsed = max(obj[id])
|
elapsed = max(obj[id])
|
||||||
if elapsed > 50: color = "yellow"
|
if elapsed > 50:
|
||||||
if elapsed > 120: color = "red"
|
color = "yellow"
|
||||||
|
if elapsed > 120:
|
||||||
|
color = "red"
|
||||||
res["task_executor"] = {"status": color, "elapsed": obj}
|
res["task_executor"] = {"status": color, "elapsed": obj}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
res["task_executor"] = {"status": "red", "error": str(e)}
|
res["task_executor"] = {"status": "red", "error": str(e)}
|
||||||
@ -95,21 +180,46 @@ def status():
|
|||||||
return get_json_result(data=res)
|
return get_json_result(data=res)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/new_token', methods=['POST'])
|
@manager.route("/new_token", methods=["POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def new_token():
|
def new_token():
|
||||||
|
"""
|
||||||
|
Generate a new API token.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- API Tokens
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: name
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
description: Name of the token.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Token generated successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
token:
|
||||||
|
type: string
|
||||||
|
description: The generated API token.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
tenants = UserTenantService.query(user_id=current_user.id)
|
tenants = UserTenantService.query(user_id=current_user.id)
|
||||||
if not tenants:
|
if not tenants:
|
||||||
return get_data_error_result(retmsg="Tenant not found!")
|
return get_data_error_result(retmsg="Tenant not found!")
|
||||||
|
|
||||||
tenant_id = tenants[0].tenant_id
|
tenant_id = tenants[0].tenant_id
|
||||||
obj = {"tenant_id": tenant_id, "token": generate_confirmation_token(tenant_id),
|
obj = {
|
||||||
"create_time": current_timestamp(),
|
"tenant_id": tenant_id,
|
||||||
"create_date": datetime_format(datetime.now()),
|
"token": generate_confirmation_token(tenant_id),
|
||||||
"update_time": None,
|
"create_time": current_timestamp(),
|
||||||
"update_date": None
|
"create_date": datetime_format(datetime.now()),
|
||||||
}
|
"update_time": None,
|
||||||
|
"update_date": None,
|
||||||
|
}
|
||||||
|
|
||||||
if not APITokenService.save(**obj):
|
if not APITokenService.save(**obj):
|
||||||
return get_data_error_result(retmsg="Fail to new a dialog!")
|
return get_data_error_result(retmsg="Fail to new a dialog!")
|
||||||
@ -119,9 +229,37 @@ def new_token():
|
|||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/token_list', methods=['GET'])
|
@manager.route("/token_list", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def token_list():
|
def token_list():
|
||||||
|
"""
|
||||||
|
List all API tokens for the current user.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- API Tokens
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: List of API tokens.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
tokens:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
token:
|
||||||
|
type: string
|
||||||
|
description: The API token.
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: Name of the token.
|
||||||
|
create_time:
|
||||||
|
type: string
|
||||||
|
description: Token creation time.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
tenants = UserTenantService.query(user_id=current_user.id)
|
tenants = UserTenantService.query(user_id=current_user.id)
|
||||||
if not tenants:
|
if not tenants:
|
||||||
@ -133,9 +271,33 @@ def token_list():
|
|||||||
return server_error_response(e)
|
return server_error_response(e)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/token/<token>', methods=['DELETE'])
|
@manager.route("/token/<token>", methods=["DELETE"])
|
||||||
@login_required
|
@login_required
|
||||||
def rm(token):
|
def rm(token):
|
||||||
|
"""
|
||||||
|
Remove an API token.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- API Tokens
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: path
|
||||||
|
name: token
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: The API token to remove.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Token removed successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
success:
|
||||||
|
type: boolean
|
||||||
|
description: Deletion status.
|
||||||
|
"""
|
||||||
APITokenService.filter_delete(
|
APITokenService.filter_delete(
|
||||||
[APIToken.tenant_id == current_user.id, APIToken.token == token])
|
[APIToken.tenant_id == current_user.id, APIToken.token == token]
|
||||||
return get_json_result(data=True)
|
)
|
||||||
|
return get_json_result(data=True)
|
||||||
|
@ -23,65 +23,141 @@ from flask_login import login_required, current_user, login_user, logout_user
|
|||||||
|
|
||||||
from api.db.db_models import TenantLLM
|
from api.db.db_models import TenantLLM
|
||||||
from api.db.services.llm_service import TenantLLMService, LLMService
|
from api.db.services.llm_service import TenantLLMService, LLMService
|
||||||
from api.utils.api_utils import server_error_response, validate_request, get_data_error_result
|
from api.utils.api_utils import (
|
||||||
from api.utils import get_uuid, get_format_time, decrypt, download_img, current_timestamp, datetime_format
|
server_error_response,
|
||||||
|
validate_request,
|
||||||
|
get_data_error_result,
|
||||||
|
)
|
||||||
|
from api.utils import (
|
||||||
|
get_uuid,
|
||||||
|
get_format_time,
|
||||||
|
decrypt,
|
||||||
|
download_img,
|
||||||
|
current_timestamp,
|
||||||
|
datetime_format,
|
||||||
|
)
|
||||||
from api.db import UserTenantRole, LLMType, FileType
|
from api.db import UserTenantRole, LLMType, FileType
|
||||||
from api.settings import RetCode, GITHUB_OAUTH, FEISHU_OAUTH, CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, \
|
from api.settings import (
|
||||||
API_KEY, \
|
RetCode,
|
||||||
LLM_FACTORY, LLM_BASE_URL, RERANK_MDL
|
GITHUB_OAUTH,
|
||||||
|
FEISHU_OAUTH,
|
||||||
|
CHAT_MDL,
|
||||||
|
EMBEDDING_MDL,
|
||||||
|
ASR_MDL,
|
||||||
|
IMAGE2TEXT_MDL,
|
||||||
|
PARSERS,
|
||||||
|
API_KEY,
|
||||||
|
LLM_FACTORY,
|
||||||
|
LLM_BASE_URL,
|
||||||
|
RERANK_MDL,
|
||||||
|
)
|
||||||
from api.db.services.user_service import UserService, TenantService, UserTenantService
|
from api.db.services.user_service import UserService, TenantService, UserTenantService
|
||||||
from api.db.services.file_service import FileService
|
from api.db.services.file_service import FileService
|
||||||
from api.settings import stat_logger
|
from api.settings import stat_logger
|
||||||
from api.utils.api_utils import get_json_result, construct_response
|
from api.utils.api_utils import get_json_result, construct_response
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/login', methods=['POST', 'GET'])
|
@manager.route("/login", methods=["POST", "GET"])
|
||||||
def login():
|
def login():
|
||||||
|
"""
|
||||||
|
User login endpoint.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- User
|
||||||
|
parameters:
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: Login credentials.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
email:
|
||||||
|
type: string
|
||||||
|
description: User email.
|
||||||
|
password:
|
||||||
|
type: string
|
||||||
|
description: User password.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Login successful.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
401:
|
||||||
|
description: Authentication failed.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
if not request.json:
|
if not request.json:
|
||||||
return get_json_result(data=False,
|
return get_json_result(
|
||||||
retcode=RetCode.AUTHENTICATION_ERROR,
|
data=False, retcode=RetCode.AUTHENTICATION_ERROR, retmsg="Unauthorized!"
|
||||||
retmsg='Unauthorized!')
|
)
|
||||||
|
|
||||||
email = request.json.get('email', "")
|
email = request.json.get("email", "")
|
||||||
users = UserService.query(email=email)
|
users = UserService.query(email=email)
|
||||||
if not users:
|
if not users:
|
||||||
return get_json_result(data=False,
|
return get_json_result(
|
||||||
retcode=RetCode.AUTHENTICATION_ERROR,
|
data=False,
|
||||||
retmsg=f'Email: {email} is not registered!')
|
retcode=RetCode.AUTHENTICATION_ERROR,
|
||||||
|
retmsg=f"Email: {email} is not registered!",
|
||||||
|
)
|
||||||
|
|
||||||
password = request.json.get('password')
|
password = request.json.get("password")
|
||||||
try:
|
try:
|
||||||
password = decrypt(password)
|
password = decrypt(password)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
return get_json_result(data=False,
|
return get_json_result(
|
||||||
retcode=RetCode.SERVER_ERROR,
|
data=False, retcode=RetCode.SERVER_ERROR, retmsg="Fail to crypt password"
|
||||||
retmsg='Fail to crypt password')
|
)
|
||||||
|
|
||||||
user = UserService.query_user(email, password)
|
user = UserService.query_user(email, password)
|
||||||
if user:
|
if user:
|
||||||
response_data = user.to_json()
|
response_data = user.to_json()
|
||||||
user.access_token = get_uuid()
|
user.access_token = get_uuid()
|
||||||
login_user(user)
|
login_user(user)
|
||||||
user.update_time = current_timestamp(),
|
user.update_time = (current_timestamp(),)
|
||||||
user.update_date = datetime_format(datetime.now()),
|
user.update_date = (datetime_format(datetime.now()),)
|
||||||
user.save()
|
user.save()
|
||||||
msg = "Welcome back!"
|
msg = "Welcome back!"
|
||||||
return construct_response(data=response_data, auth=user.get_id(), retmsg=msg)
|
return construct_response(data=response_data, auth=user.get_id(), retmsg=msg)
|
||||||
else:
|
else:
|
||||||
return get_json_result(data=False,
|
return get_json_result(
|
||||||
retcode=RetCode.AUTHENTICATION_ERROR,
|
data=False,
|
||||||
retmsg='Email and password do not match!')
|
retcode=RetCode.AUTHENTICATION_ERROR,
|
||||||
|
retmsg="Email and password do not match!",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/github_callback', methods=['GET'])
|
@manager.route("/github_callback", methods=["GET"])
|
||||||
def github_callback():
|
def github_callback():
|
||||||
|
"""
|
||||||
|
GitHub OAuth callback endpoint.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- OAuth
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: code
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Authorization code from GitHub.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Authentication successful.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
import requests
|
import requests
|
||||||
res = requests.post(GITHUB_OAUTH.get("url"),
|
|
||||||
data={
|
res = requests.post(
|
||||||
"client_id": GITHUB_OAUTH.get("client_id"),
|
GITHUB_OAUTH.get("url"),
|
||||||
"client_secret": GITHUB_OAUTH.get("secret_key"),
|
data={
|
||||||
"code": request.args.get('code')},
|
"client_id": GITHUB_OAUTH.get("client_id"),
|
||||||
headers={"Accept": "application/json"})
|
"client_secret": GITHUB_OAUTH.get("secret_key"),
|
||||||
|
"code": request.args.get("code"),
|
||||||
|
},
|
||||||
|
headers={"Accept": "application/json"},
|
||||||
|
)
|
||||||
res = res.json()
|
res = res.json()
|
||||||
if "error" in res:
|
if "error" in res:
|
||||||
return redirect("/?error=%s" % res["error_description"])
|
return redirect("/?error=%s" % res["error_description"])
|
||||||
@ -103,19 +179,22 @@ def github_callback():
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
stat_logger.exception(e)
|
stat_logger.exception(e)
|
||||||
avatar = ""
|
avatar = ""
|
||||||
users = user_register(user_id, {
|
users = user_register(
|
||||||
"access_token": session["access_token"],
|
user_id,
|
||||||
"email": email_address,
|
{
|
||||||
"avatar": avatar,
|
"access_token": session["access_token"],
|
||||||
"nickname": user_info["login"],
|
"email": email_address,
|
||||||
"login_channel": "github",
|
"avatar": avatar,
|
||||||
"last_login_time": get_format_time(),
|
"nickname": user_info["login"],
|
||||||
"is_superuser": False,
|
"login_channel": "github",
|
||||||
})
|
"last_login_time": get_format_time(),
|
||||||
|
"is_superuser": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
if not users:
|
if not users:
|
||||||
raise Exception(f'Fail to register {email_address}.')
|
raise Exception(f"Fail to register {email_address}.")
|
||||||
if len(users) > 1:
|
if len(users) > 1:
|
||||||
raise Exception(f'Same email: {email_address} exists!')
|
raise Exception(f"Same email: {email_address} exists!")
|
||||||
|
|
||||||
# Try to log in
|
# Try to log in
|
||||||
user = users[0]
|
user = users[0]
|
||||||
@ -134,30 +213,56 @@ def github_callback():
|
|||||||
return redirect("/?auth=%s" % user.get_id())
|
return redirect("/?auth=%s" % user.get_id())
|
||||||
|
|
||||||
|
|
||||||
@manager.route('/feishu_callback', methods=['GET'])
|
@manager.route("/feishu_callback", methods=["GET"])
|
||||||
def feishu_callback():
|
def feishu_callback():
|
||||||
|
"""
|
||||||
|
Feishu OAuth callback endpoint.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- OAuth
|
||||||
|
parameters:
|
||||||
|
- in: query
|
||||||
|
name: code
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
description: Authorization code from Feishu.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Authentication successful.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
import requests
|
import requests
|
||||||
app_access_token_res = requests.post(FEISHU_OAUTH.get("app_access_token_url"),
|
|
||||||
data=json.dumps({
|
app_access_token_res = requests.post(
|
||||||
"app_id": FEISHU_OAUTH.get("app_id"),
|
FEISHU_OAUTH.get("app_access_token_url"),
|
||||||
"app_secret": FEISHU_OAUTH.get("app_secret")
|
data=json.dumps(
|
||||||
}),
|
{
|
||||||
headers={"Content-Type": "application/json; charset=utf-8"})
|
"app_id": FEISHU_OAUTH.get("app_id"),
|
||||||
|
"app_secret": FEISHU_OAUTH.get("app_secret"),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
headers={"Content-Type": "application/json; charset=utf-8"},
|
||||||
|
)
|
||||||
app_access_token_res = app_access_token_res.json()
|
app_access_token_res = app_access_token_res.json()
|
||||||
if app_access_token_res['code'] != 0:
|
if app_access_token_res["code"] != 0:
|
||||||
return redirect("/?error=%s" % app_access_token_res)
|
return redirect("/?error=%s" % app_access_token_res)
|
||||||
|
|
||||||
res = requests.post(FEISHU_OAUTH.get("user_access_token_url"),
|
res = requests.post(
|
||||||
data=json.dumps({
|
FEISHU_OAUTH.get("user_access_token_url"),
|
||||||
"grant_type": FEISHU_OAUTH.get("grant_type"),
|
data=json.dumps(
|
||||||
"code": request.args.get('code')
|
{
|
||||||
}),
|
"grant_type": FEISHU_OAUTH.get("grant_type"),
|
||||||
headers={
|
"code": request.args.get("code"),
|
||||||
"Content-Type": "application/json; charset=utf-8",
|
}
|
||||||
'Authorization': f"Bearer {app_access_token_res['app_access_token']}"
|
),
|
||||||
})
|
headers={
|
||||||
|
"Content-Type": "application/json; charset=utf-8",
|
||||||
|
"Authorization": f"Bearer {app_access_token_res['app_access_token']}",
|
||||||
|
},
|
||||||
|
)
|
||||||
res = res.json()
|
res = res.json()
|
||||||
if res['code'] != 0:
|
if res["code"] != 0:
|
||||||
return redirect("/?error=%s" % res["message"])
|
return redirect("/?error=%s" % res["message"])
|
||||||
|
|
||||||
if "contact:user.email:readonly" not in res["data"]["scope"].split(" "):
|
if "contact:user.email:readonly" not in res["data"]["scope"].split(" "):
|
||||||
@ -176,19 +281,22 @@ def feishu_callback():
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
stat_logger.exception(e)
|
stat_logger.exception(e)
|
||||||
avatar = ""
|
avatar = ""
|
||||||
users = user_register(user_id, {
|
users = user_register(
|
||||||
"access_token": session["access_token"],
|
user_id,
|
||||||
"email": email_address,
|
{
|
||||||
"avatar": avatar,
|
"access_token": session["access_token"],
|
||||||
"nickname": user_info["en_name"],
|
"email": email_address,
|
||||||
"login_channel": "feishu",
|
"avatar": avatar,
|
||||||
"last_login_time": get_format_time(),
|
"nickname": user_info["en_name"],
|
||||||
"is_superuser": False,
|
"login_channel": "feishu",
|
||||||
})
|
"last_login_time": get_format_time(),
|
||||||
|
"is_superuser": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
if not users:
|
if not users:
|
||||||
raise Exception(f'Fail to register {email_address}.')
|
raise Exception(f"Fail to register {email_address}.")
|
||||||
if len(users) > 1:
|
if len(users) > 1:
|
||||||
raise Exception(f'Same email: {email_address} exists!')
|
raise Exception(f"Same email: {email_address} exists!")
|
||||||
|
|
||||||
# Try to log in
|
# Try to log in
|
||||||
user = users[0]
|
user = users[0]
|
||||||
@ -209,11 +317,14 @@ def feishu_callback():
|
|||||||
|
|
||||||
def user_info_from_feishu(access_token):
|
def user_info_from_feishu(access_token):
|
||||||
import requests
|
import requests
|
||||||
headers = {"Content-Type": "application/json; charset=utf-8",
|
|
||||||
'Authorization': f"Bearer {access_token}"}
|
headers = {
|
||||||
|
"Content-Type": "application/json; charset=utf-8",
|
||||||
|
"Authorization": f"Bearer {access_token}",
|
||||||
|
}
|
||||||
res = requests.get(
|
res = requests.get(
|
||||||
f"https://open.feishu.cn/open-apis/authen/v1/user_info",
|
f"https://open.feishu.cn/open-apis/authen/v1/user_info", headers=headers
|
||||||
headers=headers)
|
)
|
||||||
user_info = res.json()["data"]
|
user_info = res.json()["data"]
|
||||||
user_info["email"] = None if user_info.get("email") == "" else user_info["email"]
|
user_info["email"] = None if user_info.get("email") == "" else user_info["email"]
|
||||||
return user_info
|
return user_info
|
||||||
@ -221,24 +332,38 @@ def user_info_from_feishu(access_token):
|
|||||||
|
|
||||||
def user_info_from_github(access_token):
|
def user_info_from_github(access_token):
|
||||||
import requests
|
import requests
|
||||||
headers = {"Accept": "application/json",
|
|
||||||
'Authorization': f"token {access_token}"}
|
headers = {"Accept": "application/json", "Authorization": f"token {access_token}"}
|
||||||
res = requests.get(
|
res = requests.get(
|
||||||
f"https://api.github.com/user?access_token={access_token}",
|
f"https://api.github.com/user?access_token={access_token}", headers=headers
|
||||||
headers=headers)
|
)
|
||||||
user_info = res.json()
|
user_info = res.json()
|
||||||
email_info = requests.get(
|
email_info = requests.get(
|
||||||
f"https://api.github.com/user/emails?access_token={access_token}",
|
f"https://api.github.com/user/emails?access_token={access_token}",
|
||||||
headers=headers).json()
|
headers=headers,
|
||||||
|
).json()
|
||||||
user_info["email"] = next(
|
user_info["email"] = next(
|
||||||
(email for email in email_info if email['primary'] == True),
|
(email for email in email_info if email["primary"] == True), None
|
||||||
None)["email"]
|
)["email"]
|
||||||
return user_info
|
return user_info
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/logout", methods=['GET'])
|
@manager.route("/logout", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def log_out():
|
def log_out():
|
||||||
|
"""
|
||||||
|
User logout endpoint.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- User
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Logout successful.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
current_user.access_token = ""
|
current_user.access_token = ""
|
||||||
current_user.save()
|
current_user.save()
|
||||||
logout_user()
|
logout_user()
|
||||||
@ -248,20 +373,62 @@ def log_out():
|
|||||||
@manager.route("/setting", methods=["POST"])
|
@manager.route("/setting", methods=["POST"])
|
||||||
@login_required
|
@login_required
|
||||||
def setting_user():
|
def setting_user():
|
||||||
|
"""
|
||||||
|
Update user settings.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- User
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: User settings to update.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
nickname:
|
||||||
|
type: string
|
||||||
|
description: New nickname.
|
||||||
|
email:
|
||||||
|
type: string
|
||||||
|
description: New email.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Settings updated successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
update_dict = {}
|
update_dict = {}
|
||||||
request_data = request.json
|
request_data = request.json
|
||||||
if request_data.get("password"):
|
if request_data.get("password"):
|
||||||
new_password = request_data.get("new_password")
|
new_password = request_data.get("new_password")
|
||||||
if not check_password_hash(
|
if not check_password_hash(
|
||||||
current_user.password, decrypt(request_data["password"])):
|
current_user.password, decrypt(request_data["password"])
|
||||||
return get_json_result(data=False, retcode=RetCode.AUTHENTICATION_ERROR, retmsg='Password error!')
|
):
|
||||||
|
return get_json_result(
|
||||||
|
data=False,
|
||||||
|
retcode=RetCode.AUTHENTICATION_ERROR,
|
||||||
|
retmsg="Password error!",
|
||||||
|
)
|
||||||
|
|
||||||
if new_password:
|
if new_password:
|
||||||
update_dict["password"] = generate_password_hash(decrypt(new_password))
|
update_dict["password"] = generate_password_hash(decrypt(new_password))
|
||||||
|
|
||||||
for k in request_data.keys():
|
for k in request_data.keys():
|
||||||
if k in ["password", "new_password", "email", "status", "is_superuser", "login_channel", "is_anonymous",
|
if k in [
|
||||||
"is_active", "is_authenticated", "last_login_time"]:
|
"password",
|
||||||
|
"new_password",
|
||||||
|
"email",
|
||||||
|
"status",
|
||||||
|
"is_superuser",
|
||||||
|
"login_channel",
|
||||||
|
"is_anonymous",
|
||||||
|
"is_active",
|
||||||
|
"is_authenticated",
|
||||||
|
"last_login_time",
|
||||||
|
]:
|
||||||
continue
|
continue
|
||||||
update_dict[k] = request_data[k]
|
update_dict[k] = request_data[k]
|
||||||
|
|
||||||
@ -270,12 +437,37 @@ def setting_user():
|
|||||||
return get_json_result(data=True)
|
return get_json_result(data=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
stat_logger.exception(e)
|
stat_logger.exception(e)
|
||||||
return get_json_result(data=False, retmsg='Update failure!', retcode=RetCode.EXCEPTION_ERROR)
|
return get_json_result(
|
||||||
|
data=False, retmsg="Update failure!", retcode=RetCode.EXCEPTION_ERROR
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/info", methods=["GET"])
|
@manager.route("/info", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def user_profile():
|
def user_profile():
|
||||||
|
"""
|
||||||
|
Get user profile information.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- User
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: User profile retrieved successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
description: User ID.
|
||||||
|
nickname:
|
||||||
|
type: string
|
||||||
|
description: User nickname.
|
||||||
|
email:
|
||||||
|
type: string
|
||||||
|
description: User email.
|
||||||
|
"""
|
||||||
return get_json_result(data=current_user.to_dict())
|
return get_json_result(data=current_user.to_dict())
|
||||||
|
|
||||||
|
|
||||||
@ -310,13 +502,13 @@ def user_register(user_id, user):
|
|||||||
"asr_id": ASR_MDL,
|
"asr_id": ASR_MDL,
|
||||||
"parser_ids": PARSERS,
|
"parser_ids": PARSERS,
|
||||||
"img2txt_id": IMAGE2TEXT_MDL,
|
"img2txt_id": IMAGE2TEXT_MDL,
|
||||||
"rerank_id": RERANK_MDL
|
"rerank_id": RERANK_MDL,
|
||||||
}
|
}
|
||||||
usr_tenant = {
|
usr_tenant = {
|
||||||
"tenant_id": user_id,
|
"tenant_id": user_id,
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"invited_by": user_id,
|
"invited_by": user_id,
|
||||||
"role": UserTenantRole.OWNER
|
"role": UserTenantRole.OWNER,
|
||||||
}
|
}
|
||||||
file_id = get_uuid()
|
file_id = get_uuid()
|
||||||
file = {
|
file = {
|
||||||
@ -331,13 +523,16 @@ def user_register(user_id, user):
|
|||||||
}
|
}
|
||||||
tenant_llm = []
|
tenant_llm = []
|
||||||
for llm in LLMService.query(fid=LLM_FACTORY):
|
for llm in LLMService.query(fid=LLM_FACTORY):
|
||||||
tenant_llm.append({"tenant_id": user_id,
|
tenant_llm.append(
|
||||||
"llm_factory": LLM_FACTORY,
|
{
|
||||||
"llm_name": llm.llm_name,
|
"tenant_id": user_id,
|
||||||
"model_type": llm.model_type,
|
"llm_factory": LLM_FACTORY,
|
||||||
"api_key": API_KEY,
|
"llm_name": llm.llm_name,
|
||||||
"api_base": LLM_BASE_URL
|
"model_type": llm.model_type,
|
||||||
})
|
"api_key": API_KEY,
|
||||||
|
"api_base": LLM_BASE_URL,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if not UserService.save(**user):
|
if not UserService.save(**user):
|
||||||
return
|
return
|
||||||
@ -351,21 +546,52 @@ def user_register(user_id, user):
|
|||||||
@manager.route("/register", methods=["POST"])
|
@manager.route("/register", methods=["POST"])
|
||||||
@validate_request("nickname", "email", "password")
|
@validate_request("nickname", "email", "password")
|
||||||
def user_add():
|
def user_add():
|
||||||
|
"""
|
||||||
|
Register a new user.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- User
|
||||||
|
parameters:
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: Registration details.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
nickname:
|
||||||
|
type: string
|
||||||
|
description: User nickname.
|
||||||
|
email:
|
||||||
|
type: string
|
||||||
|
description: User email.
|
||||||
|
password:
|
||||||
|
type: string
|
||||||
|
description: User password.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Registration successful.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
req = request.json
|
req = request.json
|
||||||
email_address = req["email"]
|
email_address = req["email"]
|
||||||
|
|
||||||
# Validate the email address
|
# Validate the email address
|
||||||
if not re.match(r"^[\w\._-]+@([\w_-]+\.)+[\w-]{2,5}$", email_address):
|
if not re.match(r"^[\w\._-]+@([\w_-]+\.)+[\w-]{2,5}$", email_address):
|
||||||
return get_json_result(data=False,
|
return get_json_result(
|
||||||
retmsg=f'Invalid email address: {email_address}!',
|
data=False,
|
||||||
retcode=RetCode.OPERATING_ERROR)
|
retmsg=f"Invalid email address: {email_address}!",
|
||||||
|
retcode=RetCode.OPERATING_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
# Check if the email address is already used
|
# Check if the email address is already used
|
||||||
if UserService.query(email=email_address):
|
if UserService.query(email=email_address):
|
||||||
return get_json_result(
|
return get_json_result(
|
||||||
data=False,
|
data=False,
|
||||||
retmsg=f'Email: {email_address} has already registered!',
|
retmsg=f"Email: {email_address} has already registered!",
|
||||||
retcode=RetCode.OPERATING_ERROR)
|
retcode=RetCode.OPERATING_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
# Construct user info data
|
# Construct user info data
|
||||||
nickname = req["nickname"]
|
nickname = req["nickname"]
|
||||||
@ -383,25 +609,55 @@ def user_add():
|
|||||||
try:
|
try:
|
||||||
users = user_register(user_id, user_dict)
|
users = user_register(user_id, user_dict)
|
||||||
if not users:
|
if not users:
|
||||||
raise Exception(f'Fail to register {email_address}.')
|
raise Exception(f"Fail to register {email_address}.")
|
||||||
if len(users) > 1:
|
if len(users) > 1:
|
||||||
raise Exception(f'Same email: {email_address} exists!')
|
raise Exception(f"Same email: {email_address} exists!")
|
||||||
user = users[0]
|
user = users[0]
|
||||||
login_user(user)
|
login_user(user)
|
||||||
return construct_response(data=user.to_json(),
|
return construct_response(
|
||||||
auth=user.get_id(),
|
data=user.to_json(),
|
||||||
retmsg=f"{nickname}, welcome aboard!")
|
auth=user.get_id(),
|
||||||
|
retmsg=f"{nickname}, welcome aboard!",
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
rollback_user_registration(user_id)
|
rollback_user_registration(user_id)
|
||||||
stat_logger.exception(e)
|
stat_logger.exception(e)
|
||||||
return get_json_result(data=False,
|
return get_json_result(
|
||||||
retmsg=f'User registration failure, error: {str(e)}',
|
data=False,
|
||||||
retcode=RetCode.EXCEPTION_ERROR)
|
retmsg=f"User registration failure, error: {str(e)}",
|
||||||
|
retcode=RetCode.EXCEPTION_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@manager.route("/tenant_info", methods=["GET"])
|
@manager.route("/tenant_info", methods=["GET"])
|
||||||
@login_required
|
@login_required
|
||||||
def tenant_info():
|
def tenant_info():
|
||||||
|
"""
|
||||||
|
Get tenant information.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- Tenant
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Tenant information retrieved successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
tenant_id:
|
||||||
|
type: string
|
||||||
|
description: Tenant ID.
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
description: Tenant name.
|
||||||
|
llm_id:
|
||||||
|
type: string
|
||||||
|
description: LLM ID.
|
||||||
|
embd_id:
|
||||||
|
type: string
|
||||||
|
description: Embedding model ID.
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
tenants = TenantService.get_info_by(current_user.id)
|
tenants = TenantService.get_info_by(current_user.id)
|
||||||
if not tenants:
|
if not tenants:
|
||||||
@ -415,6 +671,42 @@ def tenant_info():
|
|||||||
@login_required
|
@login_required
|
||||||
@validate_request("tenant_id", "asr_id", "embd_id", "img2txt_id", "llm_id")
|
@validate_request("tenant_id", "asr_id", "embd_id", "img2txt_id", "llm_id")
|
||||||
def set_tenant_info():
|
def set_tenant_info():
|
||||||
|
"""
|
||||||
|
Update tenant information.
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- Tenant
|
||||||
|
security:
|
||||||
|
- ApiKeyAuth: []
|
||||||
|
parameters:
|
||||||
|
- in: body
|
||||||
|
name: body
|
||||||
|
description: Tenant information to update.
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
tenant_id:
|
||||||
|
type: string
|
||||||
|
description: Tenant ID.
|
||||||
|
llm_id:
|
||||||
|
type: string
|
||||||
|
description: LLM ID.
|
||||||
|
embd_id:
|
||||||
|
type: string
|
||||||
|
description: Embedding model ID.
|
||||||
|
asr_id:
|
||||||
|
type: string
|
||||||
|
description: ASR model ID.
|
||||||
|
img2txt_id:
|
||||||
|
type: string
|
||||||
|
description: Image to Text model ID.
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Tenant information updated successfully.
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
"""
|
||||||
req = request.json
|
req = request.json
|
||||||
try:
|
try:
|
||||||
tid = req["tenant_id"]
|
tid = req["tenant_id"]
|
||||||
|
@ -27,7 +27,11 @@ from api.apps import app
|
|||||||
from api.db.runtime_config import RuntimeConfig
|
from api.db.runtime_config import RuntimeConfig
|
||||||
from api.db.services.document_service import DocumentService
|
from api.db.services.document_service import DocumentService
|
||||||
from api.settings import (
|
from api.settings import (
|
||||||
HOST, HTTP_PORT, access_logger, database_logger, stat_logger,
|
HOST,
|
||||||
|
HTTP_PORT,
|
||||||
|
access_logger,
|
||||||
|
database_logger,
|
||||||
|
stat_logger,
|
||||||
)
|
)
|
||||||
from api import utils
|
from api import utils
|
||||||
|
|
||||||
@ -45,27 +49,33 @@ def update_progress():
|
|||||||
stat_logger.error("update_progress exception:" + str(e))
|
stat_logger.error("update_progress exception:" + str(e))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
print(r"""
|
print(
|
||||||
|
r"""
|
||||||
____ ___ ______ ______ __
|
____ ___ ______ ______ __
|
||||||
/ __ \ / | / ____// ____// /____ _ __
|
/ __ \ / | / ____// ____// /____ _ __
|
||||||
/ /_/ // /| | / / __ / /_ / // __ \| | /| / /
|
/ /_/ // /| | / / __ / /_ / // __ \| | /| / /
|
||||||
/ _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
|
/ _, _// ___ |/ /_/ // __/ / // /_/ /| |/ |/ /
|
||||||
/_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
|
/_/ |_|/_/ |_|\____//_/ /_/ \____/ |__/|__/
|
||||||
|
|
||||||
""", flush=True)
|
""",
|
||||||
stat_logger.info(
|
flush=True,
|
||||||
f'project base: {utils.file_utils.get_project_base_directory()}'
|
|
||||||
)
|
)
|
||||||
|
stat_logger.info(f"project base: {utils.file_utils.get_project_base_directory()}")
|
||||||
|
|
||||||
# init db
|
# init db
|
||||||
init_web_db()
|
init_web_db()
|
||||||
init_web_data()
|
init_web_data()
|
||||||
# init runtime config
|
# init runtime config
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('--version', default=False, help="rag flow version", action='store_true')
|
parser.add_argument(
|
||||||
parser.add_argument('--debug', default=False, help="debug mode", action='store_true')
|
"--version", default=False, help="rag flow version", action="store_true"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--debug", default=False, help="debug mode", action="store_true"
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if args.version:
|
if args.version:
|
||||||
print(get_versions())
|
print(get_versions())
|
||||||
@ -78,7 +88,7 @@ if __name__ == '__main__':
|
|||||||
RuntimeConfig.init_env()
|
RuntimeConfig.init_env()
|
||||||
RuntimeConfig.init_config(JOB_SERVER_HOST=HOST, HTTP_PORT=HTTP_PORT)
|
RuntimeConfig.init_config(JOB_SERVER_HOST=HOST, HTTP_PORT=HTTP_PORT)
|
||||||
|
|
||||||
peewee_logger = logging.getLogger('peewee')
|
peewee_logger = logging.getLogger("peewee")
|
||||||
peewee_logger.propagate = False
|
peewee_logger.propagate = False
|
||||||
# rag_arch.common.log.ROpenHandler
|
# rag_arch.common.log.ROpenHandler
|
||||||
peewee_logger.addHandler(database_logger.handlers[0])
|
peewee_logger.addHandler(database_logger.handlers[0])
|
||||||
@ -93,7 +103,14 @@ if __name__ == '__main__':
|
|||||||
werkzeug_logger = logging.getLogger("werkzeug")
|
werkzeug_logger = logging.getLogger("werkzeug")
|
||||||
for h in access_logger.handlers:
|
for h in access_logger.handlers:
|
||||||
werkzeug_logger.addHandler(h)
|
werkzeug_logger.addHandler(h)
|
||||||
run_simple(hostname=HOST, port=HTTP_PORT, application=app, threaded=True, use_reloader=RuntimeConfig.DEBUG, use_debugger=RuntimeConfig.DEBUG)
|
run_simple(
|
||||||
|
hostname=HOST,
|
||||||
|
port=HTTP_PORT,
|
||||||
|
application=app,
|
||||||
|
threaded=True,
|
||||||
|
use_reloader=RuntimeConfig.DEBUG,
|
||||||
|
use_debugger=RuntimeConfig.DEBUG,
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
os.kill(os.getpid(), signal.SIGKILL)
|
os.kill(os.getpid(), signal.SIGKILL)
|
||||||
|
106
poetry.lock
generated
106
poetry.lock
generated
@ -435,6 +435,17 @@ files = [
|
|||||||
{file = "Aspose.Slides-24.10.0-py3-none-win_amd64.whl", hash = "sha256:8980015fbc32c1e70e80444c70a642597511300ead6b352183bf74ba3da67f2d"},
|
{file = "Aspose.Slides-24.10.0-py3-none-win_amd64.whl", hash = "sha256:8980015fbc32c1e70e80444c70a642597511300ead6b352183bf74ba3da67f2d"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-timeout"
|
||||||
|
version = "4.0.3"
|
||||||
|
description = "Timeout context manager for asyncio programs"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
|
||||||
|
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "24.2.0"
|
version = "24.2.0"
|
||||||
@ -1912,7 +1923,10 @@ files = [
|
|||||||
huggingface-hub = ">=0.20,<1.0"
|
huggingface-hub = ">=0.20,<1.0"
|
||||||
loguru = ">=0.7.2,<0.8.0"
|
loguru = ">=0.7.2,<0.8.0"
|
||||||
mmh3 = ">=4.0,<5.0"
|
mmh3 = ">=4.0,<5.0"
|
||||||
numpy = {version = ">=1.26,<2", markers = "python_version >= \"3.12\""}
|
numpy = [
|
||||||
|
{version = ">=1.21,<2", markers = "python_version < \"3.12\""},
|
||||||
|
{version = ">=1.26,<2", markers = "python_version >= \"3.12\""},
|
||||||
|
]
|
||||||
onnx = ">=1.15.0,<2.0.0"
|
onnx = ">=1.15.0,<2.0.0"
|
||||||
onnxruntime = ">=1.17.0,<2.0.0"
|
onnxruntime = ">=1.17.0,<2.0.0"
|
||||||
pillow = ">=10.3.0,<11.0.0"
|
pillow = ">=10.3.0,<11.0.0"
|
||||||
@ -2037,6 +2051,24 @@ sentence_transformers = "*"
|
|||||||
torch = ">=1.6.0"
|
torch = ">=1.6.0"
|
||||||
transformers = ">=4.33.0"
|
transformers = ">=4.33.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flasgger"
|
||||||
|
version = "0.9.7.1"
|
||||||
|
description = "Extract swagger specs from your flask project"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "flasgger-0.9.7.1.tar.gz", hash = "sha256:ca098e10bfbb12f047acc6299cc70a33851943a746e550d86e65e60d4df245fb"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
Flask = ">=0.10"
|
||||||
|
jsonschema = ">=3.0.1"
|
||||||
|
mistune = "*"
|
||||||
|
packaging = "*"
|
||||||
|
PyYAML = ">=3.0"
|
||||||
|
six = ">=1.10.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flask"
|
name = "flask"
|
||||||
version = "3.0.3"
|
version = "3.0.3"
|
||||||
@ -4381,6 +4413,17 @@ httpx = ">=0.25,<1"
|
|||||||
orjson = ">=3.9.10,<3.11"
|
orjson = ">=3.9.10,<3.11"
|
||||||
pydantic = ">=2.5.2,<3"
|
pydantic = ">=2.5.2,<3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mistune"
|
||||||
|
version = "3.0.2"
|
||||||
|
description = "A sane and fast Markdown parser with useful plugins and renderers"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"},
|
||||||
|
{file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mkl"
|
name = "mkl"
|
||||||
version = "2021.4.0"
|
version = "2021.4.0"
|
||||||
@ -5149,7 +5192,10 @@ files = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""}
|
numpy = [
|
||||||
|
{version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
|
||||||
|
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "opencv-python-headless"
|
name = "opencv-python-headless"
|
||||||
@ -5168,7 +5214,10 @@ files = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""}
|
numpy = [
|
||||||
|
{version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
|
||||||
|
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openpyxl"
|
name = "openpyxl"
|
||||||
@ -5350,7 +5399,10 @@ files = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
numpy = {version = ">=1.26.0", markers = "python_version >= \"3.12\""}
|
numpy = [
|
||||||
|
{version = ">=1.23.2", markers = "python_version == \"3.11\""},
|
||||||
|
{version = ">=1.26.0", markers = "python_version >= \"3.12\""},
|
||||||
|
]
|
||||||
python-dateutil = ">=2.8.2"
|
python-dateutil = ">=2.8.2"
|
||||||
pytz = ">=2020.1"
|
pytz = ">=2020.1"
|
||||||
tzdata = ">=2022.7"
|
tzdata = ">=2022.7"
|
||||||
@ -7009,6 +7061,24 @@ lxml = "*"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["timeout-decorator"]
|
test = ["timeout-decorator"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redis"
|
||||||
|
version = "5.0.3"
|
||||||
|
description = "Python client for Redis database and key-value store"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"},
|
||||||
|
{file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
hiredis = ["hiredis (>=1.0.0)"]
|
||||||
|
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "referencing"
|
name = "referencing"
|
||||||
version = "0.35.1"
|
version = "0.35.1"
|
||||||
@ -8468,6 +8538,7 @@ nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"
|
|||||||
nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
|
nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
|
||||||
nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
|
nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
|
||||||
sympy = "*"
|
sympy = "*"
|
||||||
|
triton = {version = "2.3.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""}
|
||||||
typing-extensions = ">=4.8.0"
|
typing-extensions = ">=4.8.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
@ -8611,6 +8682,29 @@ files = [
|
|||||||
trio = ">=0.11"
|
trio = ">=0.11"
|
||||||
wsproto = ">=0.14"
|
wsproto = ">=0.14"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "triton"
|
||||||
|
version = "2.3.0"
|
||||||
|
description = "A language and compiler for custom Deep Learning operations"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "triton-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ce4b8ff70c48e47274c66f269cce8861cf1dc347ceeb7a67414ca151b1822d8"},
|
||||||
|
{file = "triton-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c3d9607f85103afdb279938fc1dd2a66e4f5999a58eb48a346bd42738f986dd"},
|
||||||
|
{file = "triton-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:218d742e67480d9581bafb73ed598416cc8a56f6316152e5562ee65e33de01c0"},
|
||||||
|
{file = "triton-2.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381ec6b3dac06922d3e4099cfc943ef032893b25415de295e82b1a82b0359d2c"},
|
||||||
|
{file = "triton-2.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038e06a09c06a164fef9c48de3af1e13a63dc1ba3c792871e61a8e79720ea440"},
|
||||||
|
{file = "triton-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8f636e0341ac348899a47a057c3daea99ea7db31528a225a3ba4ded28ccc65"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
filelock = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
build = ["cmake (>=3.20)", "lit"]
|
||||||
|
tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"]
|
||||||
|
tutorials = ["matplotlib", "pandas", "tabulate", "torch"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typer"
|
name = "typer"
|
||||||
version = "0.12.5"
|
version = "0.12.5"
|
||||||
@ -9446,5 +9540,5 @@ files = [
|
|||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.12,<3.13"
|
python-versions = ">=3.11,<3.13"
|
||||||
content-hash = "9c488418342dcd2a1ff625db0da677d086e309c9e4285b46c622f1099af4850f"
|
content-hash = "74a9b4afef47cc36d638b43fd918ece27d65259af1ca9e5b17f6b239774e8bf9"
|
||||||
|
@ -8,7 +8,7 @@ readme = "README.md"
|
|||||||
package-mode = false
|
package-mode = false
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.12,<3.13"
|
python = ">=3.11,<3.13"
|
||||||
datrie = "0.8.2"
|
datrie = "0.8.2"
|
||||||
akshare = "^1.14.81"
|
akshare = "^1.14.81"
|
||||||
azure-storage-blob = "12.22.0"
|
azure-storage-blob = "12.22.0"
|
||||||
@ -114,6 +114,7 @@ graspologic = "^3.4.1"
|
|||||||
pymysql = "^1.1.1"
|
pymysql = "^1.1.1"
|
||||||
mini-racer = "^0.12.4"
|
mini-racer = "^0.12.4"
|
||||||
pyicu = "^2.13.1"
|
pyicu = "^2.13.1"
|
||||||
|
flasgger = "^0.9.7.1"
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.group.full]
|
[tool.poetry.group.full]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user