mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-04-21 05:29:57 +08:00
go through smoke test of all API (#12)
* add field progress msg into docinfo; add file processing procedure * go through upload, create kb, add doc to kb * smoke test for all API * smoke test for all API
This commit is contained in:
parent
72b7b5fae5
commit
1eb186a25f
@ -11,7 +11,7 @@ ES_PORT=9200
|
||||
KIBANA_PORT=6601
|
||||
|
||||
# Increase or decrease based on the available host memory (in bytes)
|
||||
MEM_LIMIT=1073741824
|
||||
MEM_LIMIT=4073741824
|
||||
|
||||
POSTGRES_USER=root
|
||||
POSTGRES_PASSWORD=infiniflow_docgpt
|
||||
|
@ -54,6 +54,22 @@ services:
|
||||
- docgpt
|
||||
restart: always
|
||||
|
||||
minio:
|
||||
image: quay.io/minio/minio:RELEASE.2023-12-20T01-00-02Z
|
||||
container_name: docgpt-minio
|
||||
command: server --console-address ":9001" /data
|
||||
ports:
|
||||
- 9000:9000
|
||||
- 9001:9001
|
||||
environment:
|
||||
- MINIO_ROOT_USER=${MINIO_USER}
|
||||
- MINIO_ROOT_PASSWORD=${MINIO_PASSWORD}
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
networks:
|
||||
- docgpt
|
||||
restart: always
|
||||
|
||||
|
||||
volumes:
|
||||
esdata01:
|
||||
@ -62,6 +78,8 @@ volumes:
|
||||
driver: local
|
||||
pg_data:
|
||||
driver: local
|
||||
minio_data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
docgpt:
|
||||
|
@ -20,13 +20,14 @@ impl MigrationTrait for Migration {
|
||||
)
|
||||
.col(ColumnDef::new(UserInfo::Email).string().not_null())
|
||||
.col(ColumnDef::new(UserInfo::Nickname).string().not_null())
|
||||
.col(ColumnDef::new(UserInfo::AvatarUrl).string())
|
||||
.col(ColumnDef::new(UserInfo::ColorSchema).string().default("dark"))
|
||||
.col(ColumnDef::new(UserInfo::AvatarBase64).string())
|
||||
.col(ColumnDef::new(UserInfo::ColorScheme).string().default("dark"))
|
||||
.col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
|
||||
.col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
|
||||
.col(ColumnDef::new(UserInfo::Password).string().not_null())
|
||||
.col(ColumnDef::new(UserInfo::CreatedAt).date().not_null())
|
||||
.col(ColumnDef::new(UserInfo::UpdatedAt).date().not_null())
|
||||
.col(ColumnDef::new(UserInfo::LastLoginAt).timestamp_with_time_zone())
|
||||
.col(ColumnDef::new(UserInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(UserInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
@ -49,9 +50,9 @@ impl MigrationTrait for Migration {
|
||||
.col(ColumnDef::new(TagInfo::Regx).string())
|
||||
.col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
|
||||
.col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
|
||||
.col(ColumnDef::new(TagInfo::Dir).string())
|
||||
.col(ColumnDef::new(TagInfo::CreatedAt).date().not_null())
|
||||
.col(ColumnDef::new(TagInfo::UpdatedAt).date().not_null())
|
||||
.col(ColumnDef::new(TagInfo::FolderId).big_integer())
|
||||
.col(ColumnDef::new(TagInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(TagInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
@ -89,6 +90,10 @@ impl MigrationTrait for Migration {
|
||||
)
|
||||
.col(ColumnDef::new(Kb2Doc::KbId).big_integer())
|
||||
.col(ColumnDef::new(Kb2Doc::Did).big_integer())
|
||||
.col(ColumnDef::new(Kb2Doc::KbProgress).float().default(0))
|
||||
.col(ColumnDef::new(Kb2Doc::KbProgressMsg).string().default(""))
|
||||
.col(ColumnDef::new(Kb2Doc::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(Kb2Doc::IsDeleted).boolean().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
@ -141,8 +146,8 @@ impl MigrationTrait for Migration {
|
||||
.col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(KbInfo::KbName).string().not_null())
|
||||
.col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
|
||||
.col(ColumnDef::new(KbInfo::CreatedAt).date().not_null())
|
||||
.col(ColumnDef::new(KbInfo::UpdatedAt).date().not_null())
|
||||
.col(ColumnDef::new(KbInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(KbInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
@ -162,10 +167,8 @@ impl MigrationTrait for Migration {
|
||||
.col(ColumnDef::new(DocInfo::Location).string().not_null())
|
||||
.col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
|
||||
.col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
|
||||
.col(ColumnDef::new(DocInfo::KbProgress).float().default(0))
|
||||
.col(ColumnDef::new(DocInfo::KbProgressMsg).string().default(""))
|
||||
.col(ColumnDef::new(DocInfo::CreatedAt).date().not_null())
|
||||
.col(ColumnDef::new(DocInfo::UpdatedAt).date().not_null())
|
||||
.col(ColumnDef::new(DocInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(DocInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
@ -182,10 +185,11 @@ impl MigrationTrait for Migration {
|
||||
.auto_increment()
|
||||
.primary_key())
|
||||
.col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::KbId).big_integer().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::History).string().comment("json"))
|
||||
.col(ColumnDef::new(DialogInfo::CreatedAt).date().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::UpdatedAt).date().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||
.col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
|
||||
.to_owned(),
|
||||
)
|
||||
@ -241,11 +245,12 @@ enum UserInfo {
|
||||
Uid,
|
||||
Email,
|
||||
Nickname,
|
||||
AvatarUrl,
|
||||
ColorSchema,
|
||||
AvatarBase64,
|
||||
ColorScheme,
|
||||
ListStyle,
|
||||
Language,
|
||||
Password,
|
||||
LastLoginAt,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
IsDeleted,
|
||||
@ -260,7 +265,7 @@ enum TagInfo {
|
||||
Regx,
|
||||
Color,
|
||||
Icon,
|
||||
Dir,
|
||||
FolderId,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
IsDeleted,
|
||||
@ -280,6 +285,10 @@ enum Kb2Doc {
|
||||
Id,
|
||||
KbId,
|
||||
Did,
|
||||
KbProgress,
|
||||
KbProgressMsg,
|
||||
UpdatedAt,
|
||||
IsDeleted,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
@ -319,8 +328,6 @@ enum DocInfo {
|
||||
Location,
|
||||
Size,
|
||||
Type,
|
||||
KbProgress,
|
||||
KbProgressMsg,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
IsDeleted,
|
||||
@ -329,8 +336,9 @@ enum DocInfo {
|
||||
#[derive(DeriveIden)]
|
||||
enum DialogInfo {
|
||||
Table,
|
||||
DialogId,
|
||||
Uid,
|
||||
KbId,
|
||||
DialogId,
|
||||
DialogName,
|
||||
History,
|
||||
CreatedAt,
|
||||
|
@ -1,7 +1,10 @@
|
||||
[infiniflow]
|
||||
es=127.0.0.1:9200
|
||||
es=http://127.0.0.1:9200
|
||||
pgdb_usr=root
|
||||
pgdb_pwd=infiniflow_docgpt
|
||||
pgdb_host=127.0.0.1
|
||||
pgdb_port=5455
|
||||
minio_host=127.0.0.1:9000
|
||||
minio_usr=infiniflow
|
||||
minio_pwd=infiniflow_docgpt
|
||||
|
||||
|
@ -2,6 +2,7 @@ import re
|
||||
import os
|
||||
import copy
|
||||
import base64
|
||||
import magic
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
import numpy as np
|
||||
@ -373,6 +374,7 @@ class PptChunker(HuChunker):
|
||||
from pptx import Presentation
|
||||
ppt = Presentation(fnm)
|
||||
flds = self.Fields()
|
||||
flds.text_chunks = []
|
||||
for slide in ppt.slides:
|
||||
for shape in slide.shapes:
|
||||
if hasattr(shape, "text"):
|
||||
@ -391,11 +393,21 @@ class TextChunker(HuChunker):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
@staticmethod
|
||||
def is_binary_file(file_path):
|
||||
mime = magic.Magic(mime=True)
|
||||
file_type = mime.from_file(file_path)
|
||||
if 'text' in file_type:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def __call__(self, fnm):
|
||||
flds = self.Fields()
|
||||
if self.is_binary_file(fnm):return flds
|
||||
with open(fnm, "r") as f:
|
||||
txt = f.read()
|
||||
flds.text_chunks = self.naive_text_chunk(txt)
|
||||
flds.text_chunks = [(c, None) for c in self.naive_text_chunk(txt)]
|
||||
flds.table_chunks = []
|
||||
return flds
|
||||
|
||||
|
@ -1,10 +1,15 @@
|
||||
import json, re, sys, os, hashlib, copy, glob, util, time, random
|
||||
from util.es_conn import HuEs, Postgres
|
||||
import json, os, sys, hashlib, copy, time, random, re, logging, torch
|
||||
from os.path import dirname, realpath
|
||||
sys.path.append(dirname(realpath(__file__)) + "/../")
|
||||
from util.es_conn import HuEs
|
||||
from util.db_conn import Postgres
|
||||
from util.minio_conn import HuMinio
|
||||
from util import rmSpace, findMaxDt
|
||||
from FlagEmbedding import FlagModel
|
||||
from nlp import huchunk, huqie
|
||||
import base64, hashlib
|
||||
from io import BytesIO
|
||||
import pandas as pd
|
||||
from elasticsearch_dsl import Q
|
||||
from parser import (
|
||||
PdfParser,
|
||||
@ -22,73 +27,115 @@ from nlp.huchunk import (
|
||||
ES = HuEs("infiniflow")
|
||||
BATCH_SIZE = 64
|
||||
PG = Postgres("infiniflow", "docgpt")
|
||||
MINIO = HuMinio("infiniflow")
|
||||
|
||||
PDF = PdfChunker(PdfParser())
|
||||
DOC = DocxChunker(DocxParser())
|
||||
EXC = ExcelChunker(ExcelParser())
|
||||
PPT = PptChunker()
|
||||
|
||||
UPLOAD_LOCATION = os.environ.get("UPLOAD_LOCATION", "./")
|
||||
logging.warning(f"The files are stored in {UPLOAD_LOCATION}, please check it!")
|
||||
|
||||
|
||||
def chuck_doc(name):
|
||||
name = os.path.split(name)[-1].lower().split(".")[-1]
|
||||
if name.find("pdf") >= 0: return PDF(name)
|
||||
if name.find("doc") >= 0: return DOC(name)
|
||||
if name.find("xlsx") >= 0: return EXC(name)
|
||||
if name.find("ppt") >= 0: return PDF(name)
|
||||
if name.find("pdf") >= 0: return PPT(name)
|
||||
suff = os.path.split(name)[-1].lower().split(".")[-1]
|
||||
if suff.find("pdf") >= 0: return PDF(name)
|
||||
if suff.find("doc") >= 0: return DOC(name)
|
||||
if re.match(r"(xlsx|xlsm|xltx|xltm)", suff): return EXC(name)
|
||||
if suff.find("ppt") >= 0: return PPT(name)
|
||||
|
||||
if re.match(r"(txt|csv)", name): return TextChunker(name)
|
||||
return TextChunker()(name)
|
||||
|
||||
|
||||
def collect(comm, mod, tm):
|
||||
sql = f"""
|
||||
select
|
||||
id as kb2doc_id,
|
||||
kb_id,
|
||||
did,
|
||||
updated_at,
|
||||
is_deleted
|
||||
from kb2_doc
|
||||
where
|
||||
updated_at >= '{tm}'
|
||||
and kb_progress = 0
|
||||
and MOD(did, {comm}) = {mod}
|
||||
order by updated_at asc
|
||||
limit 1000
|
||||
"""
|
||||
kb2doc = PG.select(sql)
|
||||
if len(kb2doc) == 0:return pd.DataFrame()
|
||||
|
||||
sql = """
|
||||
select
|
||||
did,
|
||||
uid,
|
||||
doc_name,
|
||||
location,
|
||||
updated_at
|
||||
from docinfo
|
||||
size
|
||||
from doc_info
|
||||
where
|
||||
updated_at >= '{tm}'
|
||||
and kb_progress = 0
|
||||
and type = 'doc'
|
||||
and MOD(uid, {comm}) = {mod}
|
||||
order by updated_at asc
|
||||
limit 1000
|
||||
"""
|
||||
df = PG.select(sql)
|
||||
df = df.fillna("")
|
||||
mtm = str(df["updated_at"].max())[:19]
|
||||
print("TOTAL:", len(df), "To: ", mtm)
|
||||
return df, mtm
|
||||
did in (%s)
|
||||
"""%",".join([str(i) for i in kb2doc["did"].unique()])
|
||||
docs = PG.select(sql)
|
||||
docs = docs.fillna("")
|
||||
docs = docs.join(kb2doc.set_index("did"), on="did", how="left")
|
||||
|
||||
mtm = str(docs["updated_at"].max())[:19]
|
||||
print("TOTAL:", len(docs), "To: ", mtm)
|
||||
return docs
|
||||
|
||||
|
||||
def set_progress(did, prog, msg):
|
||||
def set_progress(kb2doc_id, prog, msg="Processing..."):
|
||||
sql = f"""
|
||||
update docinfo set kb_progress={prog}, kb_progress_msg='{msg}' where did={did}
|
||||
update kb2_doc set kb_progress={prog}, kb_progress_msg='{msg}'
|
||||
where
|
||||
id={kb2doc_id}
|
||||
"""
|
||||
PG.update(sql)
|
||||
|
||||
|
||||
def build(row):
|
||||
if row["size"] > 256000000:
|
||||
set_progress(row["did"], -1, "File size exceeds( <= 256Mb )")
|
||||
set_progress(row["kb2doc_id"], -1, "File size exceeds( <= 256Mb )")
|
||||
return []
|
||||
res = ES.search(Q("term", doc_id=row["did"]))
|
||||
if ES.getTotal(res) > 0:
|
||||
ES.updateScriptByQuery(Q("term", doc_id=row["did"]),
|
||||
scripts="""
|
||||
if(!ctx._source.kb_id.contains('%s'))
|
||||
ctx._source.kb_id.add('%s');
|
||||
"""%(str(row["kb_id"]), str(row["kb_id"])),
|
||||
idxnm = index_name(row["uid"])
|
||||
)
|
||||
set_progress(row["kb2doc_id"], 1, "Done")
|
||||
return []
|
||||
|
||||
random.seed(time.time())
|
||||
set_progress(row["kb2doc_id"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
|
||||
try:
|
||||
obj = chuck_doc(os.path.join(UPLOAD_LOCATION, row["location"]))
|
||||
except Exception as e:
|
||||
if re.search("(No such file|not found)", str(e)):
|
||||
set_progress(row["kb2doc_id"], -1, "Can not find file <%s>"%row["doc_name"])
|
||||
else:
|
||||
set_progress(row["kb2doc_id"], -1, f"Internal system error: %s"%str(e).replace("'", ""))
|
||||
return []
|
||||
|
||||
print(row["doc_name"], obj)
|
||||
if not obj.text_chunks and not obj.table_chunks:
|
||||
set_progress(row["kb2doc_id"], 1, "Nothing added! Mostly, file type unsupported yet.")
|
||||
return []
|
||||
|
||||
set_progress(row["kb2doc_id"], random.randint(20, 60)/100., "Finished slicing files. Start to embedding the content.")
|
||||
|
||||
doc = {
|
||||
"doc_id": row["did"],
|
||||
"kb_id": [str(row["kb_id"])],
|
||||
"title_tks": huqie.qie(os.path.split(row["location"])[-1]),
|
||||
"updated_at": row["updated_at"]
|
||||
"updated_at": str(row["updated_at"]).replace("T", " ")[:19]
|
||||
}
|
||||
random.seed(time.time())
|
||||
set_progress(row["did"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
|
||||
obj = chuck_doc(row["location"])
|
||||
if not obj:
|
||||
set_progress(row["did"], -1, "Unsuported file type.")
|
||||
return []
|
||||
|
||||
set_progress(row["did"], random.randint(20, 60)/100.)
|
||||
|
||||
output_buffer = BytesIO()
|
||||
docs = []
|
||||
md5 = hashlib.md5()
|
||||
@ -97,12 +144,11 @@ def build(row):
|
||||
md5.update((txt + str(d["doc_id"])).encode("utf-8"))
|
||||
d["_id"] = md5.hexdigest()
|
||||
d["content_ltks"] = huqie.qie(txt)
|
||||
d["docnm_kwd"] = rmSpace(d["docnm_tks"])
|
||||
if not img:
|
||||
docs.append(d)
|
||||
continue
|
||||
img.save(output_buffer, format='JPEG')
|
||||
d["img_bin"] = base64.b64encode(output_buffer.getvalue())
|
||||
d["img_bin"] = str(output_buffer.getvalue())
|
||||
docs.append(d)
|
||||
|
||||
for arr, img in obj.table_chunks:
|
||||
@ -115,9 +161,11 @@ def build(row):
|
||||
docs.append(d)
|
||||
continue
|
||||
img.save(output_buffer, format='JPEG')
|
||||
d["img_bin"] = base64.b64encode(output_buffer.getvalue())
|
||||
MINIO.put("{}-{}".format(row["uid"], row["kb_id"]), d["_id"],
|
||||
output_buffer.getvalue())
|
||||
d["img_id"] = "{}-{}".format(row["uid"], row["kb_id"])
|
||||
docs.append(d)
|
||||
set_progress(row["did"], random.randint(60, 70)/100., "Finished slicing. Start to embedding the content.")
|
||||
set_progress(row["kb2doc_id"], random.randint(60, 70)/100., "Continue embedding the content.")
|
||||
|
||||
return docs
|
||||
|
||||
@ -127,7 +175,7 @@ def index_name(uid):return f"docgpt_{uid}"
|
||||
def init_kb(row):
|
||||
idxnm = index_name(row["uid"])
|
||||
if ES.indexExist(idxnm): return
|
||||
return ES.createIdx(idxnm, json.load(open("res/mapping.json", "r")))
|
||||
return ES.createIdx(idxnm, json.load(open("conf/mapping.json", "r")))
|
||||
|
||||
|
||||
model = None
|
||||
@ -138,27 +186,59 @@ def embedding(docs):
|
||||
vects = 0.1 * tts + 0.9 * cnts
|
||||
assert len(vects) == len(docs)
|
||||
for i,d in enumerate(docs):d["q_vec"] = vects[i].tolist()
|
||||
for d in docs:
|
||||
set_progress(d["doc_id"], random.randint(70, 95)/100.,
|
||||
"Finished embedding! Start to build index!")
|
||||
|
||||
|
||||
def rm_doc_from_kb(df):
|
||||
if len(df) == 0:return
|
||||
for _,r in df.iterrows():
|
||||
ES.updateScriptByQuery(Q("term", doc_id=r["did"]),
|
||||
scripts="""
|
||||
if(ctx._source.kb_id.contains('%s'))
|
||||
ctx._source.kb_id.remove(
|
||||
ctx._source.kb_id.indexOf('%s')
|
||||
);
|
||||
"""%(str(r["kb_id"]),str(r["kb_id"])),
|
||||
idxnm = index_name(r["uid"])
|
||||
)
|
||||
if len(df) == 0:return
|
||||
sql = """
|
||||
delete from kb2_doc where id in (%s)
|
||||
"""%",".join([str(i) for i in df["kb2doc_id"]])
|
||||
PG.update(sql)
|
||||
|
||||
|
||||
def main(comm, mod):
|
||||
global model
|
||||
from FlagEmbedding import FlagModel
|
||||
model = FlagModel('/opt/home/kevinhu/data/bge-large-zh-v1.5/',
|
||||
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||
use_fp16=torch.cuda.is_available())
|
||||
tm_fnm = f"res/{comm}-{mod}.tm"
|
||||
tmf = open(tm_fnm, "a+")
|
||||
tm = findMaxDt(tm_fnm)
|
||||
rows, tm = collect(comm, mod, tm)
|
||||
for r in rows:
|
||||
if r["is_deleted"]:
|
||||
ES.deleteByQuery(Q("term", dock_id=r["did"]), index_name(r["uid"]))
|
||||
continue
|
||||
rows = collect(comm, mod, tm)
|
||||
if len(rows) == 0:return
|
||||
|
||||
rm_doc_from_kb(rows.loc[rows.is_deleted == True])
|
||||
rows = rows.loc[rows.is_deleted == False].reset_index(drop=True)
|
||||
if len(rows) == 0:return
|
||||
tmf = open(tm_fnm, "a+")
|
||||
for _, r in rows.iterrows():
|
||||
cks = build(r)
|
||||
if not cks:
|
||||
tmf.write(str(r["updated_at"]) + "\n")
|
||||
continue
|
||||
## TODO: exception handler
|
||||
## set_progress(r["did"], -1, "ERROR: ")
|
||||
embedding(cks)
|
||||
if cks: init_kb(r)
|
||||
ES.bulk(cks, index_name(r["uid"]))
|
||||
|
||||
set_progress(r["kb2doc_id"], random.randint(70, 95)/100.,
|
||||
"Finished embedding! Start to build index!")
|
||||
init_kb(r)
|
||||
es_r = ES.bulk(cks, index_name(r["uid"]))
|
||||
if es_r:
|
||||
set_progress(r["kb2doc_id"], -1, "Index failure!")
|
||||
print(es_r)
|
||||
else: set_progress(r["kb2doc_id"], 1., "Done!")
|
||||
tmf.write(str(r["updated_at"]) + "\n")
|
||||
tmf.close()
|
||||
|
||||
@ -166,6 +246,5 @@ def main(comm, mod):
|
||||
if __name__ == "__main__":
|
||||
from mpi4py import MPI
|
||||
comm = MPI.COMM_WORLD
|
||||
rank = comm.Get_rank()
|
||||
main(comm, rank)
|
||||
main(comm.Get_size(), comm.Get_rank())
|
||||
|
||||
|
@ -14,9 +14,9 @@ class Config:
|
||||
self.env = env
|
||||
if env == "spark":CF.read("./cv.cnf")
|
||||
|
||||
def get(self, key):
|
||||
def get(self, key, default=None):
|
||||
global CF
|
||||
return CF.get(self.env, key)
|
||||
return CF[self.env].get(key, default)
|
||||
|
||||
def init(env):
|
||||
return Config(env)
|
||||
|
@ -49,7 +49,11 @@ class Postgres(object):
|
||||
cur = self.conn.cursor()
|
||||
cur.execute(sql)
|
||||
updated_rows = cur.rowcount
|
||||
<<<<<<< HEAD
|
||||
self.conn.commit()
|
||||
=======
|
||||
conn.commit()
|
||||
>>>>>>> upstream/main
|
||||
cur.close()
|
||||
return updated_rows
|
||||
except Exception as e:
|
||||
|
@ -5,10 +5,10 @@ import time
|
||||
import copy
|
||||
import elasticsearch
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch_dsl import UpdateByQuery, Search, Index
|
||||
from elasticsearch_dsl import UpdateByQuery, Search, Index, Q
|
||||
from util import config
|
||||
|
||||
print("Elasticsearch version: ", elasticsearch.__version__)
|
||||
logging.info("Elasticsearch version: ", elasticsearch.__version__)
|
||||
|
||||
|
||||
def instance(env):
|
||||
@ -20,7 +20,7 @@ def instance(env):
|
||||
timeout=600
|
||||
)
|
||||
|
||||
print("ES: ", ES_DRESS, ES.info())
|
||||
logging.info("ES: ", ES_DRESS, ES.info())
|
||||
|
||||
return ES
|
||||
|
||||
@ -31,7 +31,7 @@ class HuEs:
|
||||
self.info = {}
|
||||
self.config = config.init(env)
|
||||
self.conn()
|
||||
self.idxnm = self.config.get("idx_nm","")
|
||||
self.idxnm = self.config.get("idx_nm", "")
|
||||
if not self.es.ping():
|
||||
raise Exception("Can't connect to ES cluster")
|
||||
|
||||
@ -46,6 +46,7 @@ class HuEs:
|
||||
break
|
||||
except Exception as e:
|
||||
logging.error("Fail to connect to es: " + str(e))
|
||||
time.sleep(1)
|
||||
|
||||
def version(self):
|
||||
v = self.info.get("version", {"number": "5.6"})
|
||||
@ -121,7 +122,6 @@ class HuEs:
|
||||
acts.append(
|
||||
{"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
|
||||
acts.append({"doc": d, "doc_as_upsert": "true"})
|
||||
logging.info("bulk upsert: %s" % id)
|
||||
|
||||
res = []
|
||||
for _ in range(100):
|
||||
@ -148,7 +148,6 @@ class HuEs:
|
||||
return res
|
||||
except Exception as e:
|
||||
logging.warn("Fail to bulk: " + str(e))
|
||||
print(e)
|
||||
if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
|
||||
time.sleep(3)
|
||||
continue
|
||||
@ -229,7 +228,7 @@ class HuEs:
|
||||
return False
|
||||
|
||||
def search(self, q, idxnm=None, src=False, timeout="2s"):
|
||||
print(json.dumps(q, ensure_ascii=False))
|
||||
if not isinstance(q, dict): q = Search().query(q).to_dict()
|
||||
for i in range(3):
|
||||
try:
|
||||
res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
|
||||
@ -271,9 +270,31 @@ class HuEs:
|
||||
str(e) + "【Q】:" + str(q.to_dict()))
|
||||
if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
|
||||
continue
|
||||
self.conn()
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def updateScriptByQuery(self, q, scripts, idxnm=None):
|
||||
ubq = UpdateByQuery(index=self.idxnm if not idxnm else idxnm).using(self.es).query(q)
|
||||
ubq = ubq.script(source=scripts)
|
||||
ubq = ubq.params(refresh=True)
|
||||
ubq = ubq.params(slices=5)
|
||||
ubq = ubq.params(conflicts="proceed")
|
||||
for i in range(3):
|
||||
try:
|
||||
r = ubq.execute()
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.error("ES updateByQuery exception: " +
|
||||
str(e) + "【Q】:" + str(q.to_dict()))
|
||||
if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
|
||||
continue
|
||||
self.conn()
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def deleteByQuery(self, query, idxnm=""):
|
||||
for i in range(3):
|
||||
try:
|
||||
@ -307,7 +328,6 @@ class HuEs:
|
||||
routing=routing, refresh=False) # , doc_type="_doc")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
logging.error("ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
|
||||
json.dumps(script, ensure_ascii=False))
|
||||
if str(e).find("Timeout") > 0:
|
||||
|
73
python/util/minio_conn.py
Normal file
73
python/util/minio_conn.py
Normal file
@ -0,0 +1,73 @@
|
||||
import logging
|
||||
import time
|
||||
from util import config
|
||||
from minio import Minio
|
||||
from io import BytesIO
|
||||
|
||||
class HuMinio(object):
|
||||
def __init__(self, env):
|
||||
self.config = config.init(env)
|
||||
self.conn = None
|
||||
self.__open__()
|
||||
|
||||
def __open__(self):
|
||||
try:
|
||||
if self.conn:self.__close__()
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.conn = Minio(self.config.get("minio_host"),
|
||||
access_key=self.config.get("minio_usr"),
|
||||
secret_key=self.config.get("minio_pwd"),
|
||||
secure=False
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error("Fail to connect %s "%self.config.get("minio_host") + str(e))
|
||||
|
||||
|
||||
def __close__(self):
|
||||
del self.conn
|
||||
self.conn = None
|
||||
|
||||
|
||||
def put(self, bucket, fnm, binary):
|
||||
for _ in range(10):
|
||||
try:
|
||||
if not self.conn.bucket_exists(bucket):
|
||||
self.conn.make_bucket(bucket)
|
||||
|
||||
r = self.conn.put_object(bucket, fnm,
|
||||
BytesIO(binary),
|
||||
len(binary)
|
||||
)
|
||||
return r
|
||||
except Exception as e:
|
||||
logging.error(f"Fail put {bucket}/{fnm}: "+str(e))
|
||||
self.__open__()
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def get(self, bucket, fnm):
|
||||
for _ in range(10):
|
||||
try:
|
||||
r = self.conn.get_object(bucket, fnm)
|
||||
return r.read()
|
||||
except Exception as e:
|
||||
logging.error(f"Fail get {bucket}/{fnm}: "+str(e))
|
||||
self.__open__()
|
||||
time.sleep(1)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
conn = HuMinio("infiniflow")
|
||||
fnm = "/opt/home/kevinhu/docgpt/upload/13/11-408.jpg"
|
||||
from PIL import Image
|
||||
img = Image.open(fnm)
|
||||
buff = BytesIO()
|
||||
img.save(buff, format='JPEG')
|
||||
print(conn.put("test", "11-408.jpg", buff.getvalue()))
|
||||
bts = conn.get("test", "11-408.jpg")
|
||||
img = Image.open(BytesIO(bts))
|
||||
img.save("test.jpg")
|
||||
|
@ -1,5 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use actix_web::{get, HttpResponse, post, web};
|
||||
use actix_web::{HttpResponse, post, web};
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use serde_json::json;
|
||||
use crate::api::JsonResponse;
|
||||
use crate::AppState;
|
||||
use crate::entity::dialog_info;
|
||||
@ -7,13 +10,46 @@ use crate::errors::AppError;
|
||||
use crate::service::dialog_info::Query;
|
||||
use crate::service::dialog_info::Mutation;
|
||||
|
||||
#[get("/v1.0/dialogs")]
|
||||
async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let dialogs = Query::find_dialog_infos_by_uid(&data.conn, model.uid).await?;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ListParams {
|
||||
pub uid: i64,
|
||||
pub dialog_id: Option<i64>
|
||||
}
|
||||
#[post("/v1.0/dialogs")]
|
||||
async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let mut result = HashMap::new();
|
||||
result.insert("dialogs", dialogs);
|
||||
if let Some(dia_id) = params.dialog_id{
|
||||
let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?.unwrap();
|
||||
let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
|
||||
print!("{:?}", dia.history);
|
||||
let hist:Value = serde_json::from_str(&dia.history)?;
|
||||
let detail = json!({
|
||||
"dialog_id": dia_id,
|
||||
"dialog_name": dia.dialog_name.to_owned(),
|
||||
"created_at": dia.created_at.to_string().to_owned(),
|
||||
"updated_at": dia.updated_at.to_string().to_owned(),
|
||||
"history": hist,
|
||||
"kb_info": kb
|
||||
});
|
||||
|
||||
result.insert("dialogs", vec![detail]);
|
||||
}
|
||||
else{
|
||||
let mut dias = Vec::<Value>::new();
|
||||
for dia in Query::find_dialog_infos_by_uid(&data.conn, params.uid).await?{
|
||||
let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
|
||||
let hist:Value = serde_json::from_str(&dia.history)?;
|
||||
dias.push(json!({
|
||||
"dialog_id": dia.dialog_id,
|
||||
"dialog_name": dia.dialog_name.to_owned(),
|
||||
"created_at": dia.created_at.to_string().to_owned(),
|
||||
"updated_at": dia.updated_at.to_string().to_owned(),
|
||||
"history": hist,
|
||||
"kb_info": kb
|
||||
}));
|
||||
}
|
||||
result.insert("dialogs", dias);
|
||||
}
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
@ -25,27 +61,14 @@ async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[get("/v1.0/dialog")]
|
||||
async fn detail(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let dialogs = Query::find_dialog_info_by_id(&data.conn, model.dialog_id).await?;
|
||||
|
||||
let mut result = HashMap::new();
|
||||
result.insert("dialogs", dialogs);
|
||||
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: result,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct RmParams {
|
||||
pub uid: i64,
|
||||
pub dialog_id: i64
|
||||
}
|
||||
|
||||
#[post("/v1.0/delete_dialog")]
|
||||
async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let _ = Mutation::delete_dialog_info(&data.conn, model.dialog_id).await?;
|
||||
async fn delete(params: web::Json<RmParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let _ = Mutation::delete_dialog_info(&data.conn, params.dialog_id).await?;
|
||||
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
@ -58,12 +81,25 @@ async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[post("/v1.0/create_kb")]
|
||||
async fn create(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let model = Mutation::create_dialog_info(&data.conn, model.into_inner()).await?;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CreateParams {
|
||||
pub uid: i64,
|
||||
pub dialog_id: Option<i64>,
|
||||
pub kb_id: i64,
|
||||
pub name: String
|
||||
}
|
||||
#[post("/v1.0/create_dialog")]
|
||||
async fn create(param: web::Json<CreateParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let mut result = HashMap::new();
|
||||
result.insert("dialog_id", model.dialog_id.unwrap());
|
||||
if let Some(dia_id) = param.dialog_id {
|
||||
result.insert("dialog_id", dia_id);
|
||||
let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?;
|
||||
let _ = Mutation::update_dialog_info_by_id(&data.conn, dia_id, ¶m.name, &dia.unwrap().history).await?;
|
||||
}
|
||||
else{
|
||||
let dia = Mutation::create_dialog_info(&data.conn, param.uid, param.kb_id, ¶m.name).await?;
|
||||
result.insert("dialog_id", dia.dialog_id.unwrap());
|
||||
}
|
||||
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
@ -75,3 +111,33 @@ async fn create(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct UpdateHistoryParams {
|
||||
pub uid: i64,
|
||||
pub dialog_id: i64,
|
||||
pub history: Value
|
||||
}
|
||||
#[post("/v1.0/update_history")]
|
||||
async fn update_history(param: web::Json<UpdateHistoryParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let mut json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: (),
|
||||
};
|
||||
|
||||
if let Some(dia) = Query::find_dialog_info_by_id(&data.conn, param.dialog_id).await?{
|
||||
let _ = Mutation::update_dialog_info_by_id(&data.conn, param.dialog_id, &dia.dialog_name,
|
||||
¶m.history.to_string()).await?;
|
||||
}
|
||||
else{
|
||||
json_response.code = 500;
|
||||
json_response.err = "Can't find dialog data!".to_owned();
|
||||
}
|
||||
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
@ -1,12 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
use std::slice::Chunks;
|
||||
//use actix_multipart::{Multipart, MultipartError, Field};
|
||||
use actix_multipart_extract::{File, Multipart, MultipartForm};
|
||||
use actix_web::{get, HttpResponse, post, web};
|
||||
use actix_web::web::Bytes;
|
||||
use chrono::Local;
|
||||
use futures_util::StreamExt;
|
||||
use chrono::{Utc, FixedOffset};
|
||||
use sea_orm::DbConn;
|
||||
use crate::api::JsonResponse;
|
||||
use crate::AppState;
|
||||
@ -15,14 +11,17 @@ use crate::errors::AppError;
|
||||
use crate::service::doc_info::{Mutation, Query};
|
||||
use serde::Deserialize;
|
||||
|
||||
fn now()->chrono::DateTime<FixedOffset>{
|
||||
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Params {
|
||||
pub struct ListParams {
|
||||
pub uid: i64,
|
||||
pub filter: FilterParams,
|
||||
pub sortby: String,
|
||||
pub page: u64,
|
||||
pub per_page: u64,
|
||||
pub page: Option<u32>,
|
||||
pub per_page: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@ -33,14 +32,8 @@ pub struct FilterParams {
|
||||
pub kb_id: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct MvParams {
|
||||
pub dids: Vec<i64>,
|
||||
pub dest_did: i64,
|
||||
}
|
||||
|
||||
#[get("/v1.0/docs")]
|
||||
async fn list(params: web::Json<Params>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
#[post("/v1.0/docs")]
|
||||
async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
|
||||
.await?;
|
||||
|
||||
@ -69,21 +62,21 @@ pub struct UploadForm {
|
||||
#[post("/v1.0/upload")]
|
||||
async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let uid = payload.uid;
|
||||
async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64) -> String {
|
||||
async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64, parent_id:i64) -> String {
|
||||
let mut i = 0;
|
||||
let mut new_file_name = file_name.to_string();
|
||||
let arr: Vec<&str> = file_name.split(".").collect();
|
||||
let suffix = String::from(arr[arr.len()-1]);
|
||||
let preffix = arr[..arr.len()-1].join(".");
|
||||
let mut docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
|
||||
let mut docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
|
||||
while docs.len()>0 {
|
||||
i += 1;
|
||||
new_file_name = format!("{}_{}.{}", preffix, i, suffix);
|
||||
docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
|
||||
docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
|
||||
}
|
||||
new_file_name
|
||||
}
|
||||
let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid).await;
|
||||
let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid, payload.did).await;
|
||||
|
||||
std::fs::create_dir_all(format!("./upload/{}/", uid));
|
||||
let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
|
||||
@ -95,13 +88,11 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
|
||||
uid: uid,
|
||||
doc_name: fnm,
|
||||
size: payload.file_field.bytes.len() as i64,
|
||||
kb_infos: Vec::new(),
|
||||
kb_progress: 0.0,
|
||||
kb_progress_msg: "".to_string(),
|
||||
location: filepath,
|
||||
r#type: "doc".to_string(),
|
||||
created_at: Local::now().date_naive(),
|
||||
updated_at: Local::now().date_naive(),
|
||||
created_at: now(),
|
||||
updated_at: now(),
|
||||
is_deleted:Default::default(),
|
||||
}).await?;
|
||||
|
||||
let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
|
||||
@ -109,11 +100,14 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
|
||||
Ok(HttpResponse::Ok().body("File uploaded successfully"))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct RmDocsParam {
|
||||
uid: i64,
|
||||
dids: Vec<i64>
|
||||
}
|
||||
#[post("/v1.0/delete_docs")]
|
||||
async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
for doc_id in doc_ids.iter() {
|
||||
let _ = Mutation::delete_doc_info(&data.conn, *doc_id).await?;
|
||||
}
|
||||
async fn delete(params: web::Json<RmDocsParam>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let _ = Mutation::delete_doc_info(&data.conn, ¶ms.dids).await?;
|
||||
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
@ -126,6 +120,13 @@ async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Resu
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct MvParams {
|
||||
pub uid:i64,
|
||||
pub dids: Vec<i64>,
|
||||
pub dest_did: i64,
|
||||
}
|
||||
|
||||
#[post("/v1.0/mv_docs")]
|
||||
async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
Mutation::mv_doc_info(&data.conn, params.dest_did, ¶ms.dids).await?;
|
||||
@ -140,3 +141,61 @@ async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<Ht
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct NewFoldParams {
|
||||
pub uid: i64,
|
||||
pub parent_id: i64,
|
||||
pub name: String
|
||||
}
|
||||
|
||||
#[post("/v1.0/new_folder")]
|
||||
async fn new_folder(params: web::Json<NewFoldParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let doc = Mutation::create_doc_info(&data.conn, Model {
|
||||
did:Default::default(),
|
||||
uid: params.uid,
|
||||
doc_name: params.name.to_string(),
|
||||
size:0,
|
||||
r#type: "folder".to_string(),
|
||||
location: "".to_owned(),
|
||||
created_at: now(),
|
||||
updated_at: now(),
|
||||
is_deleted:Default::default(),
|
||||
}).await?;
|
||||
let _ = Mutation::place_doc(&data.conn, params.parent_id, doc.did.unwrap()).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().body("Folder created successfully"))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct RenameParams {
|
||||
pub uid: i64,
|
||||
pub did: i64,
|
||||
pub name: String
|
||||
}
|
||||
|
||||
#[post("/v1.0/rename")]
|
||||
async fn rename(params: web::Json<RenameParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let docs = Query::find_doc_infos_by_name(&data.conn, params.uid, ¶ms.name, None).await?;
|
||||
if docs.len()>0{
|
||||
let json_response = JsonResponse {
|
||||
code: 500,
|
||||
err: "Name duplicated!".to_owned(),
|
||||
data: (),
|
||||
};
|
||||
return Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?));
|
||||
}
|
||||
let doc = Mutation::rename(&data.conn, params.did, ¶ms.name).await?;
|
||||
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: doc,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
@ -60,6 +60,20 @@ async fn add_docs_to_kb(param: web::Json<AddDocs2KbParams>, data: web::Data<AppS
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[post("/v1.0/anti_kb_docs")]
|
||||
async fn anti_kb_docs(param: web::Json<AddDocs2KbParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let _ = Mutation::remove_docs(&data.conn, param.dids.to_owned(), Some(param.kb_id)).await?;
|
||||
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: (),
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
#[get("/v1.0/kbs")]
|
||||
async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
|
||||
@ -92,3 +106,27 @@ async fn delete(model: web::Json<kb_info::Model>, data: web::Data<AppState>) ->
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct DocIdsParams {
|
||||
pub uid: i64,
|
||||
pub dids: Vec<i64>
|
||||
}
|
||||
|
||||
#[post("/v1.0/all_relevents")]
|
||||
async fn all_relevents(params: web::Json<DocIdsParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let dids = crate::service::doc_info::Query::all_descendent_ids(&data.conn, ¶ms.dids).await?;
|
||||
let mut result = HashMap::new();
|
||||
let kbs = Query::find_kb_by_docs(&data.conn, dids).await?;
|
||||
result.insert("kbs", kbs);
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: result,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use actix_web::{get, HttpResponse, post, web};
|
||||
use actix_web_httpauth::middleware::HttpAuthentication;
|
||||
use serde::Deserialize;
|
||||
use crate::validator;
|
||||
use crate::api::JsonResponse;
|
||||
use crate::AppState;
|
||||
@ -8,6 +9,11 @@ use crate::entity::tag_info;
|
||||
use crate::errors::AppError;
|
||||
use crate::service::tag_info::{Mutation, Query};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TagListParams {
|
||||
pub uid: i64
|
||||
}
|
||||
|
||||
#[post("/v1.0/create_tag")]
|
||||
async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
|
||||
@ -41,9 +47,12 @@ async fn delete(model: web::Json<tag_info::Model>, data: web::Data<AppState>) ->
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
|
||||
async fn list(data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let tags = Query::find_tag_infos(&data.conn).await?;
|
||||
|
||||
//#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
|
||||
|
||||
#[post("/v1.0/tags")]
|
||||
async fn list(param: web::Json<TagListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let tags = Query::find_tags_by_uid(param.uid, &data.conn).await?;
|
||||
|
||||
let mut result = HashMap::new();
|
||||
result.insert("tags", tags);
|
||||
|
@ -1,10 +1,13 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use actix_identity::Identity;
|
||||
use actix_web::{get, HttpResponse, post, web};
|
||||
use actix_web::{HttpResponse, post, web};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::api::JsonResponse;
|
||||
use crate::AppState;
|
||||
use crate::entity::user_info::Model;
|
||||
use crate::errors::{AppError, UserError};
|
||||
use crate::service::user_info::Mutation;
|
||||
use crate::service::user_info::Query;
|
||||
|
||||
pub(crate) fn create_auth_token(user: &Model) -> u64 {
|
||||
@ -32,6 +35,7 @@ async fn login(
|
||||
) -> Result<HttpResponse, AppError> {
|
||||
match Query::login(&data.conn, &input.email, &input.password).await? {
|
||||
Some(user) => {
|
||||
let _ = Mutation::update_login_status(user.uid,&data.conn).await?;
|
||||
let token = create_auth_token(&user).to_string();
|
||||
|
||||
identity.remember(token.clone());
|
||||
@ -50,3 +54,33 @@ async fn login(
|
||||
None => Err(UserError::LoginFailed.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[post("/v1.0/register")]
|
||||
async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let mut result = HashMap::new();
|
||||
let usr = Mutation::create_user(&data.conn, &model).await?;
|
||||
result.insert("uid", usr.uid.unwrap());
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: result,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
||||
|
||||
#[post("/v1.0/setting")]
|
||||
async fn setting(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||
let _ = Mutation::update_user_by_id(&data.conn, &model).await?;
|
||||
let json_response = JsonResponse {
|
||||
code: 200,
|
||||
err: "".to_owned(),
|
||||
data: (),
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(serde_json::to_string(&json_response)?))
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@ -8,13 +9,17 @@ pub struct Model {
|
||||
pub dialog_id: i64,
|
||||
#[sea_orm(index)]
|
||||
pub uid: i64,
|
||||
#[serde(skip_deserializing)]
|
||||
pub kb_id: i64,
|
||||
pub dialog_name: String,
|
||||
pub history: String,
|
||||
|
||||
#[serde(skip_deserializing)]
|
||||
pub created_at: Date,
|
||||
pub created_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: Date,
|
||||
pub updated_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub is_deleted: bool
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
@ -1,6 +1,7 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::entity::kb_info;
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[sea_orm(table_name = "doc_info")]
|
||||
@ -13,16 +14,14 @@ pub struct Model {
|
||||
pub size: i64,
|
||||
#[sea_orm(column_name = "type")]
|
||||
pub r#type: String,
|
||||
pub kb_progress: f32,
|
||||
pub kb_progress_msg: String,
|
||||
#[serde(skip_deserializing)]
|
||||
pub location: String,
|
||||
#[sea_orm(ignore)]
|
||||
pub kb_infos: Vec<kb_info::Model>,
|
||||
|
||||
#[serde(skip_deserializing)]
|
||||
pub created_at: Date,
|
||||
pub created_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: Date,
|
||||
pub updated_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub is_deleted: bool
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
@ -48,7 +47,7 @@ impl Related<super::kb_info::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<Entity> for Entity {
|
||||
impl Related<super::doc2_doc::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
super::doc2_doc::Relation::Parent.def()
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[sea_orm(table_name = "kb2_doc")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = true)]
|
||||
@ -10,6 +11,17 @@ pub struct Model {
|
||||
pub kb_id: i64,
|
||||
#[sea_orm(index)]
|
||||
pub did: i64,
|
||||
<<<<<<< HEAD
|
||||
#[serde(skip_deserializing)]
|
||||
pub kb_progress: f32,
|
||||
#[serde(skip_deserializing)]
|
||||
pub kb_progress_msg: String,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub is_deleted: bool,
|
||||
=======
|
||||
>>>>>>> upstream/main
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, EnumIter)]
|
||||
|
@ -1,10 +1,12 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[sea_orm(table_name = "kb_info")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
#[serde(skip_deserializing)]
|
||||
pub kb_id: i64,
|
||||
#[sea_orm(index)]
|
||||
pub uid: i64,
|
||||
@ -12,9 +14,11 @@ pub struct Model {
|
||||
pub icon: i16,
|
||||
|
||||
#[serde(skip_deserializing)]
|
||||
pub created_at: Date,
|
||||
pub created_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: Date,
|
||||
pub updated_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub is_deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
@ -1,23 +1,34 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[sea_orm(table_name = "tag_info")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
#[serde(skip_deserializing)]
|
||||
pub tid: i64,
|
||||
#[sea_orm(index)]
|
||||
pub uid: i64,
|
||||
pub tag_name: String,
|
||||
<<<<<<< HEAD
|
||||
#[serde(skip_deserializing)]
|
||||
pub regx: String,
|
||||
pub color: i16,
|
||||
pub icon: i16,
|
||||
#[serde(skip_deserializing)]
|
||||
pub folder_id: i64,
|
||||
=======
|
||||
pub regx: Option<String>,
|
||||
pub color: u16,
|
||||
pub icon: u16,
|
||||
pub dir: Option<String>,
|
||||
>>>>>>> upstream/main
|
||||
|
||||
#[serde(skip_deserializing)]
|
||||
pub created_at: Date,
|
||||
pub created_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: Date,
|
||||
pub updated_at: DateTime<FixedOffset>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
@ -1,5 +1,6 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use chrono::{DateTime, FixedOffset};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
|
||||
#[sea_orm(table_name = "user_info")]
|
||||
@ -9,19 +10,22 @@ pub struct Model {
|
||||
pub uid: i64,
|
||||
pub email: String,
|
||||
pub nickname: String,
|
||||
pub avatar_url: Option<String>,
|
||||
pub color_schema: String,
|
||||
pub avatar_base64: String,
|
||||
pub color_scheme: String,
|
||||
pub list_style: String,
|
||||
pub language: String,
|
||||
pub password: String,
|
||||
|
||||
#[serde(skip_deserializing)]
|
||||
pub created_at: Date,
|
||||
pub last_login_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: Date,
|
||||
pub created_at: DateTime<FixedOffset>,
|
||||
#[serde(skip_deserializing)]
|
||||
pub updated_at: DateTime<FixedOffset>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
11
src/main.rs
11
src/main.rs
@ -98,16 +98,25 @@ fn init(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(api::kb_info::delete);
|
||||
cfg.service(api::kb_info::list);
|
||||
cfg.service(api::kb_info::add_docs_to_kb);
|
||||
<<<<<<< HEAD
|
||||
cfg.service(api::kb_info::anti_kb_docs);
|
||||
cfg.service(api::kb_info::all_relevents);
|
||||
=======
|
||||
>>>>>>> upstream/main
|
||||
|
||||
cfg.service(api::doc_info::list);
|
||||
cfg.service(api::doc_info::delete);
|
||||
cfg.service(api::doc_info::mv);
|
||||
cfg.service(api::doc_info::upload);
|
||||
cfg.service(api::doc_info::new_folder);
|
||||
cfg.service(api::doc_info::rename);
|
||||
|
||||
cfg.service(api::dialog_info::list);
|
||||
cfg.service(api::dialog_info::delete);
|
||||
cfg.service(api::dialog_info::detail);
|
||||
cfg.service(api::dialog_info::create);
|
||||
cfg.service(api::dialog_info::update_history);
|
||||
|
||||
cfg.service(api::user_info::login);
|
||||
cfg.service(api::user_info::register);
|
||||
cfg.service(api::user_info::setting);
|
||||
}
|
@ -1,11 +1,15 @@
|
||||
use chrono::Local;
|
||||
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
|
||||
use chrono::{Local, FixedOffset, Utc};
|
||||
use migration::Expr;
|
||||
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, UpdateResult};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::QueryFilter;
|
||||
use sea_orm::ColumnTrait;
|
||||
use crate::entity::dialog_info;
|
||||
use crate::entity::dialog_info::Entity;
|
||||
|
||||
fn now()->chrono::DateTime<FixedOffset>{
|
||||
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||
}
|
||||
pub struct Query;
|
||||
|
||||
impl Query {
|
||||
@ -20,6 +24,7 @@ impl Query {
|
||||
pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
|
||||
Entity::find()
|
||||
.filter(dialog_info::Column::Uid.eq(uid))
|
||||
.filter(dialog_info::Column::IsDeleted.eq(false))
|
||||
.all(db)
|
||||
.await
|
||||
}
|
||||
@ -45,15 +50,19 @@ pub struct Mutation;
|
||||
impl Mutation {
|
||||
pub async fn create_dialog_info(
|
||||
db: &DbConn,
|
||||
form_data: dialog_info::Model,
|
||||
uid: i64,
|
||||
kb_id: i64,
|
||||
name: &String
|
||||
) -> Result<dialog_info::ActiveModel, DbErr> {
|
||||
dialog_info::ActiveModel {
|
||||
dialog_id: Default::default(),
|
||||
uid: Set(form_data.uid.to_owned()),
|
||||
dialog_name: Set(form_data.dialog_name.to_owned()),
|
||||
history: Set(form_data.history.to_owned()),
|
||||
created_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
uid: Set(uid),
|
||||
kb_id: Set(kb_id),
|
||||
dialog_name: Set(name.to_owned()),
|
||||
history: Set("".to_owned()),
|
||||
created_at: Set(now()),
|
||||
updated_at: Set(now()),
|
||||
is_deleted: Default::default()
|
||||
}
|
||||
.save(db)
|
||||
.await
|
||||
@ -61,35 +70,25 @@ impl Mutation {
|
||||
|
||||
pub async fn update_dialog_info_by_id(
|
||||
db: &DbConn,
|
||||
id: i64,
|
||||
form_data: dialog_info::Model,
|
||||
) -> Result<dialog_info::Model, DbErr> {
|
||||
let dialog_info: dialog_info::ActiveModel = Entity::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
||||
.map(Into::into)?;
|
||||
|
||||
dialog_info::ActiveModel {
|
||||
dialog_id: dialog_info.dialog_id,
|
||||
uid: dialog_info.uid,
|
||||
dialog_name: Set(form_data.dialog_name.to_owned()),
|
||||
history: Set(form_data.history.to_owned()),
|
||||
created_at: Default::default(),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
}
|
||||
.update(db)
|
||||
dialog_id: i64,
|
||||
dialog_name:&String,
|
||||
history: &String
|
||||
) -> Result<UpdateResult, DbErr> {
|
||||
Entity::update_many()
|
||||
.col_expr(dialog_info::Column::DialogName, Expr::value(dialog_name))
|
||||
.col_expr(dialog_info::Column::History, Expr::value(history))
|
||||
.col_expr(dialog_info::Column::UpdatedAt, Expr::value(now()))
|
||||
.filter(dialog_info::Column::DialogId.eq(dialog_id))
|
||||
.exec(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_dialog_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
|
||||
let tag: dialog_info::ActiveModel = Entity::find_by_id(kb_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
||||
.map(Into::into)?;
|
||||
|
||||
tag.delete(db).await
|
||||
pub async fn delete_dialog_info(db: &DbConn, dialog_id: i64) -> Result<UpdateResult, DbErr> {
|
||||
Entity::update_many()
|
||||
.col_expr(dialog_info::Column::IsDeleted, Expr::value(true))
|
||||
.filter(dialog_info::Column::DialogId.eq(dialog_id))
|
||||
.exec(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||
|
@ -1,10 +1,15 @@
|
||||
use chrono::Local;
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement};
|
||||
use chrono::{Utc, FixedOffset};
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement, QuerySelect, JoinType, RelationTrait, DbBackend, Statement, UpdateResult};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use sea_orm::QueryFilter;
|
||||
use crate::api::doc_info::Params;
|
||||
use crate::entity::{doc2_doc, doc_info, kb_info, tag_info};
|
||||
use crate::api::doc_info::ListParams;
|
||||
use crate::entity::{doc2_doc, doc_info};
|
||||
use crate::entity::doc_info::Entity;
|
||||
use crate::service;
|
||||
|
||||
fn now()->chrono::DateTime<FixedOffset>{
|
||||
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||
}
|
||||
|
||||
pub struct Query;
|
||||
|
||||
@ -24,42 +29,121 @@ impl Query {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: String) -> Result<Vec<doc_info::Model>, DbErr> {
|
||||
pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: &String, parent_id:Option<i64>) -> Result<Vec<doc_info::Model>, DbErr> {
|
||||
let mut dids = Vec::<i64>::new();
|
||||
if let Some(pid) = parent_id {
|
||||
for d2d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(pid)).all(db).await?{
|
||||
dids.push(d2d.did);
|
||||
}
|
||||
}
|
||||
else{
|
||||
let doc = Entity::find()
|
||||
.filter(doc_info::Column::DocName.eq(name.clone()))
|
||||
.filter(doc_info::Column::Uid.eq(uid))
|
||||
.all(db)
|
||||
.await?;
|
||||
if doc.len() == 0{
|
||||
return Ok(vec![]);
|
||||
}
|
||||
assert!(doc.len()>0);
|
||||
let d2d = doc2_doc::Entity::find().filter(doc2_doc::Column::Did.eq(doc[0].did)).all(db).await?;
|
||||
assert!(d2d.len() <= 1, "Did: {}->{}", doc[0].did, d2d.len());
|
||||
if d2d.len()>0{
|
||||
for d2d_ in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(d2d[0].parent_id)).all(db).await?{
|
||||
dids.push(d2d_.did);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Entity::find()
|
||||
.filter(doc_info::Column::DocName.eq(name))
|
||||
.filter(doc_info::Column::DocName.eq(name.clone()))
|
||||
.filter(doc_info::Column::Uid.eq(uid))
|
||||
.filter(doc_info::Column::Did.is_in(dids))
|
||||
.filter(doc_info::Column::IsDeleted.eq(false))
|
||||
.all(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_doc_infos_by_params(db: &DbConn, params: Params) -> Result<Vec<doc_info::Model>, DbErr> {
|
||||
// Setup paginator
|
||||
let paginator = Entity::find();
|
||||
pub async fn all_descendent_ids(db: &DbConn, doc_ids: &Vec<i64>) -> Result<Vec<i64>, DbErr> {
|
||||
let mut dids = doc_ids.clone();
|
||||
let mut i:usize = 0;
|
||||
loop {
|
||||
if dids.len() == i {
|
||||
break;
|
||||
}
|
||||
|
||||
for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
|
||||
dids.push(d.did);
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
Ok(dids)
|
||||
}
|
||||
|
||||
pub async fn find_doc_infos_by_params(db: &DbConn, params: ListParams) -> Result<Vec<doc_info::Model>, DbErr> {
|
||||
// Setup paginator
|
||||
let mut sql:String = "
|
||||
select
|
||||
a.did,
|
||||
a.uid,
|
||||
a.doc_name,
|
||||
a.location,
|
||||
a.size,
|
||||
a.type,
|
||||
a.created_at,
|
||||
a.updated_at,
|
||||
a.is_deleted
|
||||
from
|
||||
doc_info as a
|
||||
".to_owned();
|
||||
|
||||
let mut cond:String = format!(" a.uid={} and a.is_deleted=False ", params.uid);
|
||||
|
||||
// Fetch paginated posts
|
||||
let mut query = paginator
|
||||
.find_with_related(kb_info::Entity);
|
||||
if let Some(kb_id) = params.filter.kb_id {
|
||||
query = query.filter(kb_info::Column::KbId.eq(kb_id));
|
||||
sql.push_str(&format!(" inner join kb2_doc on kb2_doc.did = a.did and kb2_doc.kb_id={}", kb_id));
|
||||
}
|
||||
if let Some(folder_id) = params.filter.folder_id {
|
||||
|
||||
sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", folder_id));
|
||||
}
|
||||
// Fetch paginated posts
|
||||
if let Some(tag_id) = params.filter.tag_id {
|
||||
query = query.filter(tag_info::Column::Tid.eq(tag_id));
|
||||
let tag = service::tag_info::Query::find_tag_info_by_id(tag_id, &db).await.unwrap().unwrap();
|
||||
if tag.folder_id > 0{
|
||||
sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", tag.folder_id));
|
||||
}
|
||||
if tag.regx.len()>0{
|
||||
cond.push_str(&format!(" and doc_name ~ '{}'", tag.regx));
|
||||
}
|
||||
}
|
||||
if let Some(keywords) = params.filter.keywords {
|
||||
|
||||
if let Some(keywords) = params.filter.keywords {
|
||||
cond.push_str(&format!(" and doc_name like '%{}%'", keywords));
|
||||
}
|
||||
Ok(query.order_by_asc(doc_info::Column::Did)
|
||||
.all(db)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|(mut doc_info, kb_infos)| {
|
||||
doc_info.kb_infos = kb_infos;
|
||||
doc_info
|
||||
})
|
||||
.collect())
|
||||
if cond.len() > 0{
|
||||
sql.push_str(&" where ");
|
||||
sql.push_str(&cond);
|
||||
}
|
||||
let mut orderby = params.sortby.clone();
|
||||
if orderby.len() == 0 {
|
||||
orderby = "updated_at desc".to_owned();
|
||||
}
|
||||
sql.push_str(&format!(" order by {}", orderby));
|
||||
let mut page_size:u32 = 30;
|
||||
if let Some(pg_sz) = params.per_page {
|
||||
page_size = pg_sz;
|
||||
}
|
||||
let mut page:u32 = 0;
|
||||
if let Some(pg) = params.page {
|
||||
page = pg;
|
||||
}
|
||||
sql.push_str(&format!(" limit {} offset {} ;", page_size, page*page_size));
|
||||
|
||||
print!("{}", sql);
|
||||
Entity::find()
|
||||
.from_raw_sql(
|
||||
Statement::from_sql_and_values(DbBackend::Postgres,sql,vec![])
|
||||
).all(db).await
|
||||
|
||||
}
|
||||
|
||||
pub async fn find_doc_infos_in_page(
|
||||
@ -126,11 +210,10 @@ impl Mutation {
|
||||
doc_name: Set(form_data.doc_name.to_owned()),
|
||||
size: Set(form_data.size.to_owned()),
|
||||
r#type: Set(form_data.r#type.to_owned()),
|
||||
kb_progress: Set(form_data.kb_progress.to_owned()),
|
||||
kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
|
||||
location: Set(form_data.location.to_owned()),
|
||||
created_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
created_at: Set(form_data.created_at.to_owned()),
|
||||
updated_at: Set(form_data.updated_at.to_owned()),
|
||||
is_deleted:Default::default()
|
||||
}
|
||||
.save(db)
|
||||
.await
|
||||
@ -153,24 +236,50 @@ impl Mutation {
|
||||
doc_name: Set(form_data.doc_name.to_owned()),
|
||||
size: Set(form_data.size.to_owned()),
|
||||
r#type: Set(form_data.r#type.to_owned()),
|
||||
kb_progress: Set(form_data.kb_progress.to_owned()),
|
||||
kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
|
||||
location: Set(form_data.location.to_owned()),
|
||||
created_at: Default::default(),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
created_at: doc_info.created_at,
|
||||
updated_at: Set(now()),
|
||||
is_deleted: Default::default(),
|
||||
}
|
||||
.update(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_doc_info(db: &DbConn, doc_id: i64) -> Result<DeleteResult, DbErr> {
|
||||
let tag: doc_info::ActiveModel = Entity::find_by_id(doc_id)
|
||||
pub async fn delete_doc_info(db: &DbConn, doc_ids: &Vec<i64>) -> Result<UpdateResult, DbErr> {
|
||||
let mut dids = doc_ids.clone();
|
||||
let mut i:usize = 0;
|
||||
loop {
|
||||
if dids.len() == i {
|
||||
break;
|
||||
}
|
||||
let mut doc: doc_info::ActiveModel = Entity::find_by_id(dids[i])
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
||||
.ok_or(DbErr::Custom(format!("Can't find doc:{}", dids[i])))
|
||||
.map(Into::into)?;
|
||||
doc.updated_at = Set(now());
|
||||
doc.is_deleted = Set(true);
|
||||
let _ = doc.update(db).await?;
|
||||
|
||||
tag.delete(db).await
|
||||
for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
|
||||
dids.push(d.did);
|
||||
}
|
||||
let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::ParentId.eq(dids[i])).exec(db).await?;
|
||||
let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::Did.eq(dids[i])).exec(db).await?;
|
||||
i += 1;
|
||||
}
|
||||
crate::service::kb_info::Mutation::remove_docs(&db, dids,None).await
|
||||
}
|
||||
|
||||
pub async fn rename(db: &DbConn, doc_id: i64, name: &String) -> Result<doc_info::Model, DbErr> {
|
||||
let mut doc: doc_info::ActiveModel = Entity::find_by_id(doc_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(DbErr::Custom(format!("Can't find doc:{}", doc_id)))
|
||||
.map(Into::into)?;
|
||||
doc.updated_at = Set(now());
|
||||
doc.doc_name = Set(name.clone());
|
||||
doc.update(db).await
|
||||
}
|
||||
|
||||
pub async fn delete_all_doc_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||
|
@ -1,10 +1,14 @@
|
||||
use chrono::Local;
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
|
||||
use chrono::{Local, FixedOffset, Utc};
|
||||
use migration::Expr;
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use crate::entity::kb_info;
|
||||
use crate::entity::kb2_doc;
|
||||
use crate::entity::kb_info::Entity;
|
||||
|
||||
fn now()->chrono::DateTime<FixedOffset>{
|
||||
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||
}
|
||||
pub struct Query;
|
||||
|
||||
impl Query {
|
||||
@ -30,6 +34,14 @@ impl Query {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_kb_by_docs(db: &DbConn, doc_ids: Vec<i64>) -> Result<Vec<kb_info::Model>, DbErr> {
|
||||
let mut kbids = Vec::<i64>::new();
|
||||
for k in kb2_doc::Entity::find().filter(kb2_doc::Column::Did.is_in(doc_ids)).all(db).await?{
|
||||
kbids.push(k.kb_id);
|
||||
}
|
||||
Entity::find().filter(kb_info::Column::KbId.is_in(kbids)).all(db).await
|
||||
}
|
||||
|
||||
pub async fn find_kb_infos_in_page(
|
||||
db: &DbConn,
|
||||
page: u64,
|
||||
@ -58,8 +70,9 @@ impl Mutation {
|
||||
uid: Set(form_data.uid.to_owned()),
|
||||
kb_name: Set(form_data.kb_name.to_owned()),
|
||||
icon: Set(form_data.icon.to_owned()),
|
||||
created_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
created_at: Set(now()),
|
||||
updated_at: Set(now()),
|
||||
is_deleted:Default::default()
|
||||
}
|
||||
.save(db)
|
||||
.await
|
||||
@ -71,10 +84,20 @@ impl Mutation {
|
||||
doc_ids: Vec<i64>
|
||||
)-> Result<(), DbErr> {
|
||||
for did in doc_ids{
|
||||
let res = kb2_doc::Entity::find()
|
||||
.filter(kb2_doc::Column::KbId.eq(kb_id))
|
||||
.filter(kb2_doc::Column::Did.eq(did))
|
||||
.all(db)
|
||||
.await?;
|
||||
if res.len()>0{continue;}
|
||||
let _ = kb2_doc::ActiveModel {
|
||||
id: Default::default(),
|
||||
kb_id: Set(kb_id),
|
||||
did: Set(did),
|
||||
kb_progress: Set(0.0),
|
||||
kb_progress_msg: Set("".to_owned()),
|
||||
updated_at: Set(now()),
|
||||
is_deleted:Default::default()
|
||||
}
|
||||
.save(db)
|
||||
.await?;
|
||||
@ -83,6 +106,25 @@ impl Mutation {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove_docs(
|
||||
db: &DbConn,
|
||||
doc_ids: Vec<i64>,
|
||||
kb_id: Option<i64>
|
||||
)-> Result<UpdateResult, DbErr> {
|
||||
let update = kb2_doc::Entity::update_many()
|
||||
.col_expr(kb2_doc::Column::IsDeleted, Expr::value(true))
|
||||
.col_expr(kb2_doc::Column::KbProgress, Expr::value(0))
|
||||
.col_expr(kb2_doc::Column::KbProgressMsg, Expr::value(""))
|
||||
.filter(kb2_doc::Column::Did.is_in(doc_ids));
|
||||
if let Some(kbid) = kb_id{
|
||||
update.filter(kb2_doc::Column::KbId.eq(kbid))
|
||||
.exec(db)
|
||||
.await
|
||||
}else{
|
||||
update.exec(db).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_kb_info_by_id(
|
||||
db: &DbConn,
|
||||
id: i64,
|
||||
@ -99,21 +141,22 @@ impl Mutation {
|
||||
uid: kb_info.uid,
|
||||
kb_name: Set(form_data.kb_name.to_owned()),
|
||||
icon: Set(form_data.icon.to_owned()),
|
||||
created_at: Default::default(),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
created_at: kb_info.created_at,
|
||||
updated_at: Set(now()),
|
||||
is_deleted: Default::default()
|
||||
}
|
||||
.update(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
|
||||
let tag: kb_info::ActiveModel = Entity::find_by_id(kb_id)
|
||||
let kb: kb_info::ActiveModel = Entity::find_by_id(kb_id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
||||
.map(Into::into)?;
|
||||
|
||||
tag.delete(db).await
|
||||
kb.delete(db).await
|
||||
}
|
||||
|
||||
pub async fn delete_all_kb_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||
|
@ -1,18 +1,24 @@
|
||||
use chrono::Local;
|
||||
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
|
||||
use chrono::{FixedOffset, Utc};
|
||||
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, ColumnTrait, QueryFilter};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use crate::entity::tag_info;
|
||||
use crate::entity::tag_info::Entity;
|
||||
|
||||
fn now()->chrono::DateTime<FixedOffset>{
|
||||
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||
}
|
||||
pub struct Query;
|
||||
|
||||
impl Query {
|
||||
pub async fn find_tag_info_by_id(db: &DbConn, id: i64) -> Result<Option<tag_info::Model>, DbErr> {
|
||||
pub async fn find_tag_info_by_id(id: i64, db: &DbConn) -> Result<Option<tag_info::Model>, DbErr> {
|
||||
Entity::find_by_id(id).one(db).await
|
||||
}
|
||||
|
||||
pub async fn find_tag_infos(db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
|
||||
Entity::find().all(db).await
|
||||
pub async fn find_tags_by_uid(uid:i64, db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
|
||||
Entity::find()
|
||||
.filter(tag_info::Column::Uid.eq(uid))
|
||||
.all(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_tag_infos_in_page(
|
||||
@ -45,9 +51,9 @@ impl Mutation {
|
||||
regx: Set(form_data.regx.to_owned()),
|
||||
color: Set(form_data.color.to_owned()),
|
||||
icon: Set(form_data.icon.to_owned()),
|
||||
dir: Set(form_data.dir.to_owned()),
|
||||
created_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
folder_id: Set(form_data.folder_id.to_owned()),
|
||||
created_at: Set(now()),
|
||||
updated_at: Set(now()),
|
||||
}
|
||||
.save(db)
|
||||
.await
|
||||
@ -71,9 +77,9 @@ impl Mutation {
|
||||
regx: Set(form_data.regx.to_owned()),
|
||||
color: Set(form_data.color.to_owned()),
|
||||
icon: Set(form_data.icon.to_owned()),
|
||||
dir: Set(form_data.dir.to_owned()),
|
||||
folder_id: Set(form_data.folder_id.to_owned()),
|
||||
created_at: Default::default(),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(now()),
|
||||
}
|
||||
.update(db)
|
||||
.await
|
||||
|
@ -1,9 +1,13 @@
|
||||
use chrono::Local;
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
|
||||
use chrono::{FixedOffset, Utc};
|
||||
use migration::Expr;
|
||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
|
||||
use sea_orm::ActiveValue::Set;
|
||||
use crate::entity::user_info;
|
||||
use crate::entity::user_info::Entity;
|
||||
|
||||
fn now()->chrono::DateTime<FixedOffset>{
|
||||
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||
}
|
||||
pub struct Query;
|
||||
|
||||
impl Query {
|
||||
@ -44,52 +48,64 @@ pub struct Mutation;
|
||||
impl Mutation {
|
||||
pub async fn create_user(
|
||||
db: &DbConn,
|
||||
form_data: user_info::Model,
|
||||
form_data: &user_info::Model,
|
||||
) -> Result<user_info::ActiveModel, DbErr> {
|
||||
user_info::ActiveModel {
|
||||
uid: Default::default(),
|
||||
email: Set(form_data.email.to_owned()),
|
||||
nickname: Set(form_data.nickname.to_owned()),
|
||||
avatar_url: Set(form_data.avatar_url.to_owned()),
|
||||
color_schema: Set(form_data.color_schema.to_owned()),
|
||||
avatar_base64: Set(form_data.avatar_base64.to_owned()),
|
||||
color_scheme: Set(form_data.color_scheme.to_owned()),
|
||||
list_style: Set(form_data.list_style.to_owned()),
|
||||
language: Set(form_data.language.to_owned()),
|
||||
password: Set(form_data.password.to_owned()),
|
||||
created_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
last_login_at: Set(now()),
|
||||
created_at: Set(now()),
|
||||
updated_at: Set(now()),
|
||||
}
|
||||
.save(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_tag_by_id(
|
||||
pub async fn update_user_by_id(
|
||||
db: &DbConn,
|
||||
id: i64,
|
||||
form_data: user_info::Model,
|
||||
form_data: &user_info::Model,
|
||||
) -> Result<user_info::Model, DbErr> {
|
||||
let user: user_info::ActiveModel = Entity::find_by_id(id)
|
||||
let usr: user_info::ActiveModel = Entity::find_by_id(form_data.uid)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
|
||||
.ok_or(DbErr::Custom("Cannot find user.".to_owned()))
|
||||
.map(Into::into)?;
|
||||
|
||||
user_info::ActiveModel {
|
||||
uid: user.uid,
|
||||
uid: Set(form_data.uid),
|
||||
email: Set(form_data.email.to_owned()),
|
||||
nickname: Set(form_data.nickname.to_owned()),
|
||||
avatar_url: Set(form_data.avatar_url.to_owned()),
|
||||
color_schema: Set(form_data.color_schema.to_owned()),
|
||||
avatar_base64: Set(form_data.avatar_base64.to_owned()),
|
||||
color_scheme: Set(form_data.color_scheme.to_owned()),
|
||||
list_style: Set(form_data.list_style.to_owned()),
|
||||
language: Set(form_data.language.to_owned()),
|
||||
password: Set(form_data.password.to_owned()),
|
||||
created_at: Default::default(),
|
||||
updated_at: Set(Local::now().date_naive()),
|
||||
updated_at: Set(now()),
|
||||
last_login_at: usr.last_login_at,
|
||||
created_at:usr.created_at,
|
||||
}
|
||||
.update(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_tag(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
|
||||
pub async fn update_login_status(
|
||||
uid: i64,
|
||||
db: &DbConn
|
||||
) -> Result<UpdateResult, DbErr> {
|
||||
Entity::update_many()
|
||||
.col_expr(user_info::Column::LastLoginAt, Expr::value(now()))
|
||||
.filter(user_info::Column::Uid.eq(uid))
|
||||
.exec(db)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_user(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
|
||||
let tag: user_info::ActiveModel = Entity::find_by_id(tid)
|
||||
.one(db)
|
||||
.await?
|
||||
@ -99,7 +115,7 @@ impl Mutation {
|
||||
tag.delete(db).await
|
||||
}
|
||||
|
||||
pub async fn delete_all_tags(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||
pub async fn delete_all(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||
Entity::delete_many().exec(db).await
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user