mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-04-22 14:10:01 +08:00
go through smoke test of all API (#12)
* add field progress msg into docinfo; add file processing procedure * go through upload, create kb, add doc to kb * smoke test for all API * smoke test for all API
This commit is contained in:
parent
72b7b5fae5
commit
1eb186a25f
@ -11,7 +11,7 @@ ES_PORT=9200
|
|||||||
KIBANA_PORT=6601
|
KIBANA_PORT=6601
|
||||||
|
|
||||||
# Increase or decrease based on the available host memory (in bytes)
|
# Increase or decrease based on the available host memory (in bytes)
|
||||||
MEM_LIMIT=1073741824
|
MEM_LIMIT=4073741824
|
||||||
|
|
||||||
POSTGRES_USER=root
|
POSTGRES_USER=root
|
||||||
POSTGRES_PASSWORD=infiniflow_docgpt
|
POSTGRES_PASSWORD=infiniflow_docgpt
|
||||||
|
@ -54,6 +54,22 @@ services:
|
|||||||
- docgpt
|
- docgpt
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
|
minio:
|
||||||
|
image: quay.io/minio/minio:RELEASE.2023-12-20T01-00-02Z
|
||||||
|
container_name: docgpt-minio
|
||||||
|
command: server --console-address ":9001" /data
|
||||||
|
ports:
|
||||||
|
- 9000:9000
|
||||||
|
- 9001:9001
|
||||||
|
environment:
|
||||||
|
- MINIO_ROOT_USER=${MINIO_USER}
|
||||||
|
- MINIO_ROOT_PASSWORD=${MINIO_PASSWORD}
|
||||||
|
volumes:
|
||||||
|
- minio_data:/data
|
||||||
|
networks:
|
||||||
|
- docgpt
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
esdata01:
|
esdata01:
|
||||||
@ -62,6 +78,8 @@ volumes:
|
|||||||
driver: local
|
driver: local
|
||||||
pg_data:
|
pg_data:
|
||||||
driver: local
|
driver: local
|
||||||
|
minio_data:
|
||||||
|
driver: local
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
docgpt:
|
docgpt:
|
||||||
|
@ -20,13 +20,14 @@ impl MigrationTrait for Migration {
|
|||||||
)
|
)
|
||||||
.col(ColumnDef::new(UserInfo::Email).string().not_null())
|
.col(ColumnDef::new(UserInfo::Email).string().not_null())
|
||||||
.col(ColumnDef::new(UserInfo::Nickname).string().not_null())
|
.col(ColumnDef::new(UserInfo::Nickname).string().not_null())
|
||||||
.col(ColumnDef::new(UserInfo::AvatarUrl).string())
|
.col(ColumnDef::new(UserInfo::AvatarBase64).string())
|
||||||
.col(ColumnDef::new(UserInfo::ColorSchema).string().default("dark"))
|
.col(ColumnDef::new(UserInfo::ColorScheme).string().default("dark"))
|
||||||
.col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
|
.col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
|
||||||
.col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
|
.col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
|
||||||
.col(ColumnDef::new(UserInfo::Password).string().not_null())
|
.col(ColumnDef::new(UserInfo::Password).string().not_null())
|
||||||
.col(ColumnDef::new(UserInfo::CreatedAt).date().not_null())
|
.col(ColumnDef::new(UserInfo::LastLoginAt).timestamp_with_time_zone())
|
||||||
.col(ColumnDef::new(UserInfo::UpdatedAt).date().not_null())
|
.col(ColumnDef::new(UserInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||||
|
.col(ColumnDef::new(UserInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
|
.col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@ -49,9 +50,9 @@ impl MigrationTrait for Migration {
|
|||||||
.col(ColumnDef::new(TagInfo::Regx).string())
|
.col(ColumnDef::new(TagInfo::Regx).string())
|
||||||
.col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
|
.col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
|
||||||
.col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
|
.col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
|
||||||
.col(ColumnDef::new(TagInfo::Dir).string())
|
.col(ColumnDef::new(TagInfo::FolderId).big_integer())
|
||||||
.col(ColumnDef::new(TagInfo::CreatedAt).date().not_null())
|
.col(ColumnDef::new(TagInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(TagInfo::UpdatedAt).date().not_null())
|
.col(ColumnDef::new(TagInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
|
.col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@ -89,6 +90,10 @@ impl MigrationTrait for Migration {
|
|||||||
)
|
)
|
||||||
.col(ColumnDef::new(Kb2Doc::KbId).big_integer())
|
.col(ColumnDef::new(Kb2Doc::KbId).big_integer())
|
||||||
.col(ColumnDef::new(Kb2Doc::Did).big_integer())
|
.col(ColumnDef::new(Kb2Doc::Did).big_integer())
|
||||||
|
.col(ColumnDef::new(Kb2Doc::KbProgress).float().default(0))
|
||||||
|
.col(ColumnDef::new(Kb2Doc::KbProgressMsg).string().default(""))
|
||||||
|
.col(ColumnDef::new(Kb2Doc::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||||
|
.col(ColumnDef::new(Kb2Doc::IsDeleted).boolean().default(false))
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
@ -141,8 +146,8 @@ impl MigrationTrait for Migration {
|
|||||||
.col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
|
.col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
|
||||||
.col(ColumnDef::new(KbInfo::KbName).string().not_null())
|
.col(ColumnDef::new(KbInfo::KbName).string().not_null())
|
||||||
.col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
|
.col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
|
||||||
.col(ColumnDef::new(KbInfo::CreatedAt).date().not_null())
|
.col(ColumnDef::new(KbInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(KbInfo::UpdatedAt).date().not_null())
|
.col(ColumnDef::new(KbInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
|
.col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@ -162,10 +167,8 @@ impl MigrationTrait for Migration {
|
|||||||
.col(ColumnDef::new(DocInfo::Location).string().not_null())
|
.col(ColumnDef::new(DocInfo::Location).string().not_null())
|
||||||
.col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
|
.col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
|
||||||
.col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
|
.col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
|
||||||
.col(ColumnDef::new(DocInfo::KbProgress).float().default(0))
|
.col(ColumnDef::new(DocInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(DocInfo::KbProgressMsg).string().default(""))
|
.col(ColumnDef::new(DocInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(DocInfo::CreatedAt).date().not_null())
|
|
||||||
.col(ColumnDef::new(DocInfo::UpdatedAt).date().not_null())
|
|
||||||
.col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
|
.col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@ -182,10 +185,11 @@ impl MigrationTrait for Migration {
|
|||||||
.auto_increment()
|
.auto_increment()
|
||||||
.primary_key())
|
.primary_key())
|
||||||
.col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
|
.col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
|
||||||
|
.col(ColumnDef::new(DialogInfo::KbId).big_integer().not_null())
|
||||||
.col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
|
.col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
|
||||||
.col(ColumnDef::new(DialogInfo::History).string().comment("json"))
|
.col(ColumnDef::new(DialogInfo::History).string().comment("json"))
|
||||||
.col(ColumnDef::new(DialogInfo::CreatedAt).date().not_null())
|
.col(ColumnDef::new(DialogInfo::CreatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(DialogInfo::UpdatedAt).date().not_null())
|
.col(ColumnDef::new(DialogInfo::UpdatedAt).timestamp_with_time_zone().not_null())
|
||||||
.col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
|
.col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
|
||||||
.to_owned(),
|
.to_owned(),
|
||||||
)
|
)
|
||||||
@ -241,11 +245,12 @@ enum UserInfo {
|
|||||||
Uid,
|
Uid,
|
||||||
Email,
|
Email,
|
||||||
Nickname,
|
Nickname,
|
||||||
AvatarUrl,
|
AvatarBase64,
|
||||||
ColorSchema,
|
ColorScheme,
|
||||||
ListStyle,
|
ListStyle,
|
||||||
Language,
|
Language,
|
||||||
Password,
|
Password,
|
||||||
|
LastLoginAt,
|
||||||
CreatedAt,
|
CreatedAt,
|
||||||
UpdatedAt,
|
UpdatedAt,
|
||||||
IsDeleted,
|
IsDeleted,
|
||||||
@ -260,7 +265,7 @@ enum TagInfo {
|
|||||||
Regx,
|
Regx,
|
||||||
Color,
|
Color,
|
||||||
Icon,
|
Icon,
|
||||||
Dir,
|
FolderId,
|
||||||
CreatedAt,
|
CreatedAt,
|
||||||
UpdatedAt,
|
UpdatedAt,
|
||||||
IsDeleted,
|
IsDeleted,
|
||||||
@ -280,6 +285,10 @@ enum Kb2Doc {
|
|||||||
Id,
|
Id,
|
||||||
KbId,
|
KbId,
|
||||||
Did,
|
Did,
|
||||||
|
KbProgress,
|
||||||
|
KbProgressMsg,
|
||||||
|
UpdatedAt,
|
||||||
|
IsDeleted,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
#[derive(DeriveIden)]
|
||||||
@ -319,8 +328,6 @@ enum DocInfo {
|
|||||||
Location,
|
Location,
|
||||||
Size,
|
Size,
|
||||||
Type,
|
Type,
|
||||||
KbProgress,
|
|
||||||
KbProgressMsg,
|
|
||||||
CreatedAt,
|
CreatedAt,
|
||||||
UpdatedAt,
|
UpdatedAt,
|
||||||
IsDeleted,
|
IsDeleted,
|
||||||
@ -329,8 +336,9 @@ enum DocInfo {
|
|||||||
#[derive(DeriveIden)]
|
#[derive(DeriveIden)]
|
||||||
enum DialogInfo {
|
enum DialogInfo {
|
||||||
Table,
|
Table,
|
||||||
DialogId,
|
|
||||||
Uid,
|
Uid,
|
||||||
|
KbId,
|
||||||
|
DialogId,
|
||||||
DialogName,
|
DialogName,
|
||||||
History,
|
History,
|
||||||
CreatedAt,
|
CreatedAt,
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
[infiniflow]
|
[infiniflow]
|
||||||
es=127.0.0.1:9200
|
es=http://127.0.0.1:9200
|
||||||
pgdb_usr=root
|
pgdb_usr=root
|
||||||
pgdb_pwd=infiniflow_docgpt
|
pgdb_pwd=infiniflow_docgpt
|
||||||
pgdb_host=127.0.0.1
|
pgdb_host=127.0.0.1
|
||||||
pgdb_port=5455
|
pgdb_port=5455
|
||||||
|
minio_host=127.0.0.1:9000
|
||||||
|
minio_usr=infiniflow
|
||||||
|
minio_pwd=infiniflow_docgpt
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ import re
|
|||||||
import os
|
import os
|
||||||
import copy
|
import copy
|
||||||
import base64
|
import base64
|
||||||
|
import magic
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List
|
from typing import List
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@ -373,6 +374,7 @@ class PptChunker(HuChunker):
|
|||||||
from pptx import Presentation
|
from pptx import Presentation
|
||||||
ppt = Presentation(fnm)
|
ppt = Presentation(fnm)
|
||||||
flds = self.Fields()
|
flds = self.Fields()
|
||||||
|
flds.text_chunks = []
|
||||||
for slide in ppt.slides:
|
for slide in ppt.slides:
|
||||||
for shape in slide.shapes:
|
for shape in slide.shapes:
|
||||||
if hasattr(shape, "text"):
|
if hasattr(shape, "text"):
|
||||||
@ -391,11 +393,21 @@ class TextChunker(HuChunker):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_binary_file(file_path):
|
||||||
|
mime = magic.Magic(mime=True)
|
||||||
|
file_type = mime.from_file(file_path)
|
||||||
|
if 'text' in file_type:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
def __call__(self, fnm):
|
def __call__(self, fnm):
|
||||||
flds = self.Fields()
|
flds = self.Fields()
|
||||||
|
if self.is_binary_file(fnm):return flds
|
||||||
with open(fnm, "r") as f:
|
with open(fnm, "r") as f:
|
||||||
txt = f.read()
|
txt = f.read()
|
||||||
flds.text_chunks = self.naive_text_chunk(txt)
|
flds.text_chunks = [(c, None) for c in self.naive_text_chunk(txt)]
|
||||||
flds.table_chunks = []
|
flds.table_chunks = []
|
||||||
return flds
|
return flds
|
||||||
|
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
import json, re, sys, os, hashlib, copy, glob, util, time, random
|
import json, os, sys, hashlib, copy, time, random, re, logging, torch
|
||||||
from util.es_conn import HuEs, Postgres
|
from os.path import dirname, realpath
|
||||||
|
sys.path.append(dirname(realpath(__file__)) + "/../")
|
||||||
|
from util.es_conn import HuEs
|
||||||
|
from util.db_conn import Postgres
|
||||||
|
from util.minio_conn import HuMinio
|
||||||
from util import rmSpace, findMaxDt
|
from util import rmSpace, findMaxDt
|
||||||
from FlagEmbedding import FlagModel
|
from FlagEmbedding import FlagModel
|
||||||
from nlp import huchunk, huqie
|
from nlp import huchunk, huqie
|
||||||
import base64, hashlib
|
import base64, hashlib
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
import pandas as pd
|
||||||
from elasticsearch_dsl import Q
|
from elasticsearch_dsl import Q
|
||||||
from parser import (
|
from parser import (
|
||||||
PdfParser,
|
PdfParser,
|
||||||
@ -22,73 +27,115 @@ from nlp.huchunk import (
|
|||||||
ES = HuEs("infiniflow")
|
ES = HuEs("infiniflow")
|
||||||
BATCH_SIZE = 64
|
BATCH_SIZE = 64
|
||||||
PG = Postgres("infiniflow", "docgpt")
|
PG = Postgres("infiniflow", "docgpt")
|
||||||
|
MINIO = HuMinio("infiniflow")
|
||||||
|
|
||||||
PDF = PdfChunker(PdfParser())
|
PDF = PdfChunker(PdfParser())
|
||||||
DOC = DocxChunker(DocxParser())
|
DOC = DocxChunker(DocxParser())
|
||||||
EXC = ExcelChunker(ExcelParser())
|
EXC = ExcelChunker(ExcelParser())
|
||||||
PPT = PptChunker()
|
PPT = PptChunker()
|
||||||
|
|
||||||
|
UPLOAD_LOCATION = os.environ.get("UPLOAD_LOCATION", "./")
|
||||||
|
logging.warning(f"The files are stored in {UPLOAD_LOCATION}, please check it!")
|
||||||
|
|
||||||
|
|
||||||
def chuck_doc(name):
|
def chuck_doc(name):
|
||||||
name = os.path.split(name)[-1].lower().split(".")[-1]
|
suff = os.path.split(name)[-1].lower().split(".")[-1]
|
||||||
if name.find("pdf") >= 0: return PDF(name)
|
if suff.find("pdf") >= 0: return PDF(name)
|
||||||
if name.find("doc") >= 0: return DOC(name)
|
if suff.find("doc") >= 0: return DOC(name)
|
||||||
if name.find("xlsx") >= 0: return EXC(name)
|
if re.match(r"(xlsx|xlsm|xltx|xltm)", suff): return EXC(name)
|
||||||
if name.find("ppt") >= 0: return PDF(name)
|
if suff.find("ppt") >= 0: return PPT(name)
|
||||||
if name.find("pdf") >= 0: return PPT(name)
|
|
||||||
|
|
||||||
if re.match(r"(txt|csv)", name): return TextChunker(name)
|
return TextChunker()(name)
|
||||||
|
|
||||||
|
|
||||||
def collect(comm, mod, tm):
|
def collect(comm, mod, tm):
|
||||||
sql = f"""
|
sql = f"""
|
||||||
select
|
select
|
||||||
|
id as kb2doc_id,
|
||||||
|
kb_id,
|
||||||
|
did,
|
||||||
|
updated_at,
|
||||||
|
is_deleted
|
||||||
|
from kb2_doc
|
||||||
|
where
|
||||||
|
updated_at >= '{tm}'
|
||||||
|
and kb_progress = 0
|
||||||
|
and MOD(did, {comm}) = {mod}
|
||||||
|
order by updated_at asc
|
||||||
|
limit 1000
|
||||||
|
"""
|
||||||
|
kb2doc = PG.select(sql)
|
||||||
|
if len(kb2doc) == 0:return pd.DataFrame()
|
||||||
|
|
||||||
|
sql = """
|
||||||
|
select
|
||||||
did,
|
did,
|
||||||
uid,
|
uid,
|
||||||
doc_name,
|
doc_name,
|
||||||
location,
|
location,
|
||||||
updated_at
|
size
|
||||||
from docinfo
|
from doc_info
|
||||||
where
|
where
|
||||||
updated_at >= '{tm}'
|
did in (%s)
|
||||||
and kb_progress = 0
|
"""%",".join([str(i) for i in kb2doc["did"].unique()])
|
||||||
and type = 'doc'
|
docs = PG.select(sql)
|
||||||
and MOD(uid, {comm}) = {mod}
|
docs = docs.fillna("")
|
||||||
order by updated_at asc
|
docs = docs.join(kb2doc.set_index("did"), on="did", how="left")
|
||||||
limit 1000
|
|
||||||
"""
|
mtm = str(docs["updated_at"].max())[:19]
|
||||||
df = PG.select(sql)
|
print("TOTAL:", len(docs), "To: ", mtm)
|
||||||
df = df.fillna("")
|
return docs
|
||||||
mtm = str(df["updated_at"].max())[:19]
|
|
||||||
print("TOTAL:", len(df), "To: ", mtm)
|
|
||||||
return df, mtm
|
|
||||||
|
|
||||||
|
|
||||||
def set_progress(did, prog, msg):
|
def set_progress(kb2doc_id, prog, msg="Processing..."):
|
||||||
sql = f"""
|
sql = f"""
|
||||||
update docinfo set kb_progress={prog}, kb_progress_msg='{msg}' where did={did}
|
update kb2_doc set kb_progress={prog}, kb_progress_msg='{msg}'
|
||||||
|
where
|
||||||
|
id={kb2doc_id}
|
||||||
"""
|
"""
|
||||||
PG.update(sql)
|
PG.update(sql)
|
||||||
|
|
||||||
|
|
||||||
def build(row):
|
def build(row):
|
||||||
if row["size"] > 256000000:
|
if row["size"] > 256000000:
|
||||||
set_progress(row["did"], -1, "File size exceeds( <= 256Mb )")
|
set_progress(row["kb2doc_id"], -1, "File size exceeds( <= 256Mb )")
|
||||||
return []
|
return []
|
||||||
|
res = ES.search(Q("term", doc_id=row["did"]))
|
||||||
|
if ES.getTotal(res) > 0:
|
||||||
|
ES.updateScriptByQuery(Q("term", doc_id=row["did"]),
|
||||||
|
scripts="""
|
||||||
|
if(!ctx._source.kb_id.contains('%s'))
|
||||||
|
ctx._source.kb_id.add('%s');
|
||||||
|
"""%(str(row["kb_id"]), str(row["kb_id"])),
|
||||||
|
idxnm = index_name(row["uid"])
|
||||||
|
)
|
||||||
|
set_progress(row["kb2doc_id"], 1, "Done")
|
||||||
|
return []
|
||||||
|
|
||||||
|
random.seed(time.time())
|
||||||
|
set_progress(row["kb2doc_id"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
|
||||||
|
try:
|
||||||
|
obj = chuck_doc(os.path.join(UPLOAD_LOCATION, row["location"]))
|
||||||
|
except Exception as e:
|
||||||
|
if re.search("(No such file|not found)", str(e)):
|
||||||
|
set_progress(row["kb2doc_id"], -1, "Can not find file <%s>"%row["doc_name"])
|
||||||
|
else:
|
||||||
|
set_progress(row["kb2doc_id"], -1, f"Internal system error: %s"%str(e).replace("'", ""))
|
||||||
|
return []
|
||||||
|
|
||||||
|
print(row["doc_name"], obj)
|
||||||
|
if not obj.text_chunks and not obj.table_chunks:
|
||||||
|
set_progress(row["kb2doc_id"], 1, "Nothing added! Mostly, file type unsupported yet.")
|
||||||
|
return []
|
||||||
|
|
||||||
|
set_progress(row["kb2doc_id"], random.randint(20, 60)/100., "Finished slicing files. Start to embedding the content.")
|
||||||
|
|
||||||
doc = {
|
doc = {
|
||||||
"doc_id": row["did"],
|
"doc_id": row["did"],
|
||||||
|
"kb_id": [str(row["kb_id"])],
|
||||||
"title_tks": huqie.qie(os.path.split(row["location"])[-1]),
|
"title_tks": huqie.qie(os.path.split(row["location"])[-1]),
|
||||||
"updated_at": row["updated_at"]
|
"updated_at": str(row["updated_at"]).replace("T", " ")[:19]
|
||||||
}
|
}
|
||||||
random.seed(time.time())
|
|
||||||
set_progress(row["did"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
|
|
||||||
obj = chuck_doc(row["location"])
|
|
||||||
if not obj:
|
|
||||||
set_progress(row["did"], -1, "Unsuported file type.")
|
|
||||||
return []
|
|
||||||
|
|
||||||
set_progress(row["did"], random.randint(20, 60)/100.)
|
|
||||||
|
|
||||||
output_buffer = BytesIO()
|
output_buffer = BytesIO()
|
||||||
docs = []
|
docs = []
|
||||||
md5 = hashlib.md5()
|
md5 = hashlib.md5()
|
||||||
@ -97,12 +144,11 @@ def build(row):
|
|||||||
md5.update((txt + str(d["doc_id"])).encode("utf-8"))
|
md5.update((txt + str(d["doc_id"])).encode("utf-8"))
|
||||||
d["_id"] = md5.hexdigest()
|
d["_id"] = md5.hexdigest()
|
||||||
d["content_ltks"] = huqie.qie(txt)
|
d["content_ltks"] = huqie.qie(txt)
|
||||||
d["docnm_kwd"] = rmSpace(d["docnm_tks"])
|
|
||||||
if not img:
|
if not img:
|
||||||
docs.append(d)
|
docs.append(d)
|
||||||
continue
|
continue
|
||||||
img.save(output_buffer, format='JPEG')
|
img.save(output_buffer, format='JPEG')
|
||||||
d["img_bin"] = base64.b64encode(output_buffer.getvalue())
|
d["img_bin"] = str(output_buffer.getvalue())
|
||||||
docs.append(d)
|
docs.append(d)
|
||||||
|
|
||||||
for arr, img in obj.table_chunks:
|
for arr, img in obj.table_chunks:
|
||||||
@ -115,9 +161,11 @@ def build(row):
|
|||||||
docs.append(d)
|
docs.append(d)
|
||||||
continue
|
continue
|
||||||
img.save(output_buffer, format='JPEG')
|
img.save(output_buffer, format='JPEG')
|
||||||
d["img_bin"] = base64.b64encode(output_buffer.getvalue())
|
MINIO.put("{}-{}".format(row["uid"], row["kb_id"]), d["_id"],
|
||||||
|
output_buffer.getvalue())
|
||||||
|
d["img_id"] = "{}-{}".format(row["uid"], row["kb_id"])
|
||||||
docs.append(d)
|
docs.append(d)
|
||||||
set_progress(row["did"], random.randint(60, 70)/100., "Finished slicing. Start to embedding the content.")
|
set_progress(row["kb2doc_id"], random.randint(60, 70)/100., "Continue embedding the content.")
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
@ -127,7 +175,7 @@ def index_name(uid):return f"docgpt_{uid}"
|
|||||||
def init_kb(row):
|
def init_kb(row):
|
||||||
idxnm = index_name(row["uid"])
|
idxnm = index_name(row["uid"])
|
||||||
if ES.indexExist(idxnm): return
|
if ES.indexExist(idxnm): return
|
||||||
return ES.createIdx(idxnm, json.load(open("res/mapping.json", "r")))
|
return ES.createIdx(idxnm, json.load(open("conf/mapping.json", "r")))
|
||||||
|
|
||||||
|
|
||||||
model = None
|
model = None
|
||||||
@ -138,27 +186,59 @@ def embedding(docs):
|
|||||||
vects = 0.1 * tts + 0.9 * cnts
|
vects = 0.1 * tts + 0.9 * cnts
|
||||||
assert len(vects) == len(docs)
|
assert len(vects) == len(docs)
|
||||||
for i,d in enumerate(docs):d["q_vec"] = vects[i].tolist()
|
for i,d in enumerate(docs):d["q_vec"] = vects[i].tolist()
|
||||||
for d in docs:
|
|
||||||
set_progress(d["doc_id"], random.randint(70, 95)/100.,
|
|
||||||
"Finished embedding! Start to build index!")
|
def rm_doc_from_kb(df):
|
||||||
|
if len(df) == 0:return
|
||||||
|
for _,r in df.iterrows():
|
||||||
|
ES.updateScriptByQuery(Q("term", doc_id=r["did"]),
|
||||||
|
scripts="""
|
||||||
|
if(ctx._source.kb_id.contains('%s'))
|
||||||
|
ctx._source.kb_id.remove(
|
||||||
|
ctx._source.kb_id.indexOf('%s')
|
||||||
|
);
|
||||||
|
"""%(str(r["kb_id"]),str(r["kb_id"])),
|
||||||
|
idxnm = index_name(r["uid"])
|
||||||
|
)
|
||||||
|
if len(df) == 0:return
|
||||||
|
sql = """
|
||||||
|
delete from kb2_doc where id in (%s)
|
||||||
|
"""%",".join([str(i) for i in df["kb2doc_id"]])
|
||||||
|
PG.update(sql)
|
||||||
|
|
||||||
|
|
||||||
def main(comm, mod):
|
def main(comm, mod):
|
||||||
|
global model
|
||||||
|
from FlagEmbedding import FlagModel
|
||||||
|
model = FlagModel('/opt/home/kevinhu/data/bge-large-zh-v1.5/',
|
||||||
|
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
|
||||||
|
use_fp16=torch.cuda.is_available())
|
||||||
tm_fnm = f"res/{comm}-{mod}.tm"
|
tm_fnm = f"res/{comm}-{mod}.tm"
|
||||||
tmf = open(tm_fnm, "a+")
|
|
||||||
tm = findMaxDt(tm_fnm)
|
tm = findMaxDt(tm_fnm)
|
||||||
rows, tm = collect(comm, mod, tm)
|
rows = collect(comm, mod, tm)
|
||||||
for r in rows:
|
if len(rows) == 0:return
|
||||||
if r["is_deleted"]:
|
|
||||||
ES.deleteByQuery(Q("term", dock_id=r["did"]), index_name(r["uid"]))
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
rm_doc_from_kb(rows.loc[rows.is_deleted == True])
|
||||||
|
rows = rows.loc[rows.is_deleted == False].reset_index(drop=True)
|
||||||
|
if len(rows) == 0:return
|
||||||
|
tmf = open(tm_fnm, "a+")
|
||||||
|
for _, r in rows.iterrows():
|
||||||
cks = build(r)
|
cks = build(r)
|
||||||
|
if not cks:
|
||||||
|
tmf.write(str(r["updated_at"]) + "\n")
|
||||||
|
continue
|
||||||
## TODO: exception handler
|
## TODO: exception handler
|
||||||
## set_progress(r["did"], -1, "ERROR: ")
|
## set_progress(r["did"], -1, "ERROR: ")
|
||||||
embedding(cks)
|
embedding(cks)
|
||||||
if cks: init_kb(r)
|
|
||||||
ES.bulk(cks, index_name(r["uid"]))
|
set_progress(r["kb2doc_id"], random.randint(70, 95)/100.,
|
||||||
|
"Finished embedding! Start to build index!")
|
||||||
|
init_kb(r)
|
||||||
|
es_r = ES.bulk(cks, index_name(r["uid"]))
|
||||||
|
if es_r:
|
||||||
|
set_progress(r["kb2doc_id"], -1, "Index failure!")
|
||||||
|
print(es_r)
|
||||||
|
else: set_progress(r["kb2doc_id"], 1., "Done!")
|
||||||
tmf.write(str(r["updated_at"]) + "\n")
|
tmf.write(str(r["updated_at"]) + "\n")
|
||||||
tmf.close()
|
tmf.close()
|
||||||
|
|
||||||
@ -166,6 +246,5 @@ def main(comm, mod):
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from mpi4py import MPI
|
from mpi4py import MPI
|
||||||
comm = MPI.COMM_WORLD
|
comm = MPI.COMM_WORLD
|
||||||
rank = comm.Get_rank()
|
main(comm.Get_size(), comm.Get_rank())
|
||||||
main(comm, rank)
|
|
||||||
|
|
||||||
|
@ -14,9 +14,9 @@ class Config:
|
|||||||
self.env = env
|
self.env = env
|
||||||
if env == "spark":CF.read("./cv.cnf")
|
if env == "spark":CF.read("./cv.cnf")
|
||||||
|
|
||||||
def get(self, key):
|
def get(self, key, default=None):
|
||||||
global CF
|
global CF
|
||||||
return CF.get(self.env, key)
|
return CF[self.env].get(key, default)
|
||||||
|
|
||||||
def init(env):
|
def init(env):
|
||||||
return Config(env)
|
return Config(env)
|
||||||
|
@ -49,7 +49,11 @@ class Postgres(object):
|
|||||||
cur = self.conn.cursor()
|
cur = self.conn.cursor()
|
||||||
cur.execute(sql)
|
cur.execute(sql)
|
||||||
updated_rows = cur.rowcount
|
updated_rows = cur.rowcount
|
||||||
|
<<<<<<< HEAD
|
||||||
|
self.conn.commit()
|
||||||
|
=======
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
>>>>>>> upstream/main
|
||||||
cur.close()
|
cur.close()
|
||||||
return updated_rows
|
return updated_rows
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -5,10 +5,10 @@ import time
|
|||||||
import copy
|
import copy
|
||||||
import elasticsearch
|
import elasticsearch
|
||||||
from elasticsearch import Elasticsearch
|
from elasticsearch import Elasticsearch
|
||||||
from elasticsearch_dsl import UpdateByQuery, Search, Index
|
from elasticsearch_dsl import UpdateByQuery, Search, Index, Q
|
||||||
from util import config
|
from util import config
|
||||||
|
|
||||||
print("Elasticsearch version: ", elasticsearch.__version__)
|
logging.info("Elasticsearch version: ", elasticsearch.__version__)
|
||||||
|
|
||||||
|
|
||||||
def instance(env):
|
def instance(env):
|
||||||
@ -20,7 +20,7 @@ def instance(env):
|
|||||||
timeout=600
|
timeout=600
|
||||||
)
|
)
|
||||||
|
|
||||||
print("ES: ", ES_DRESS, ES.info())
|
logging.info("ES: ", ES_DRESS, ES.info())
|
||||||
|
|
||||||
return ES
|
return ES
|
||||||
|
|
||||||
@ -31,7 +31,7 @@ class HuEs:
|
|||||||
self.info = {}
|
self.info = {}
|
||||||
self.config = config.init(env)
|
self.config = config.init(env)
|
||||||
self.conn()
|
self.conn()
|
||||||
self.idxnm = self.config.get("idx_nm","")
|
self.idxnm = self.config.get("idx_nm", "")
|
||||||
if not self.es.ping():
|
if not self.es.ping():
|
||||||
raise Exception("Can't connect to ES cluster")
|
raise Exception("Can't connect to ES cluster")
|
||||||
|
|
||||||
@ -46,6 +46,7 @@ class HuEs:
|
|||||||
break
|
break
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error("Fail to connect to es: " + str(e))
|
logging.error("Fail to connect to es: " + str(e))
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
def version(self):
|
def version(self):
|
||||||
v = self.info.get("version", {"number": "5.6"})
|
v = self.info.get("version", {"number": "5.6"})
|
||||||
@ -121,7 +122,6 @@ class HuEs:
|
|||||||
acts.append(
|
acts.append(
|
||||||
{"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
|
{"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
|
||||||
acts.append({"doc": d, "doc_as_upsert": "true"})
|
acts.append({"doc": d, "doc_as_upsert": "true"})
|
||||||
logging.info("bulk upsert: %s" % id)
|
|
||||||
|
|
||||||
res = []
|
res = []
|
||||||
for _ in range(100):
|
for _ in range(100):
|
||||||
@ -148,7 +148,6 @@ class HuEs:
|
|||||||
return res
|
return res
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warn("Fail to bulk: " + str(e))
|
logging.warn("Fail to bulk: " + str(e))
|
||||||
print(e)
|
|
||||||
if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
|
if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
continue
|
continue
|
||||||
@ -229,7 +228,7 @@ class HuEs:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def search(self, q, idxnm=None, src=False, timeout="2s"):
|
def search(self, q, idxnm=None, src=False, timeout="2s"):
|
||||||
print(json.dumps(q, ensure_ascii=False))
|
if not isinstance(q, dict): q = Search().query(q).to_dict()
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
try:
|
try:
|
||||||
res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
|
res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
|
||||||
@ -271,9 +270,31 @@ class HuEs:
|
|||||||
str(e) + "【Q】:" + str(q.to_dict()))
|
str(e) + "【Q】:" + str(q.to_dict()))
|
||||||
if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
|
if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
|
||||||
continue
|
continue
|
||||||
|
self.conn()
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def updateScriptByQuery(self, q, scripts, idxnm=None):
|
||||||
|
ubq = UpdateByQuery(index=self.idxnm if not idxnm else idxnm).using(self.es).query(q)
|
||||||
|
ubq = ubq.script(source=scripts)
|
||||||
|
ubq = ubq.params(refresh=True)
|
||||||
|
ubq = ubq.params(slices=5)
|
||||||
|
ubq = ubq.params(conflicts="proceed")
|
||||||
|
for i in range(3):
|
||||||
|
try:
|
||||||
|
r = ubq.execute()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logging.error("ES updateByQuery exception: " +
|
||||||
|
str(e) + "【Q】:" + str(q.to_dict()))
|
||||||
|
if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
|
||||||
|
continue
|
||||||
|
self.conn()
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def deleteByQuery(self, query, idxnm=""):
|
def deleteByQuery(self, query, idxnm=""):
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
try:
|
try:
|
||||||
@ -307,7 +328,6 @@ class HuEs:
|
|||||||
routing=routing, refresh=False) # , doc_type="_doc")
|
routing=routing, refresh=False) # , doc_type="_doc")
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
|
||||||
logging.error("ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
|
logging.error("ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
|
||||||
json.dumps(script, ensure_ascii=False))
|
json.dumps(script, ensure_ascii=False))
|
||||||
if str(e).find("Timeout") > 0:
|
if str(e).find("Timeout") > 0:
|
||||||
|
73
python/util/minio_conn.py
Normal file
73
python/util/minio_conn.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from util import config
|
||||||
|
from minio import Minio
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
class HuMinio(object):
|
||||||
|
def __init__(self, env):
|
||||||
|
self.config = config.init(env)
|
||||||
|
self.conn = None
|
||||||
|
self.__open__()
|
||||||
|
|
||||||
|
def __open__(self):
|
||||||
|
try:
|
||||||
|
if self.conn:self.__close__()
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.conn = Minio(self.config.get("minio_host"),
|
||||||
|
access_key=self.config.get("minio_usr"),
|
||||||
|
secret_key=self.config.get("minio_pwd"),
|
||||||
|
secure=False
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error("Fail to connect %s "%self.config.get("minio_host") + str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def __close__(self):
|
||||||
|
del self.conn
|
||||||
|
self.conn = None
|
||||||
|
|
||||||
|
|
||||||
|
def put(self, bucket, fnm, binary):
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
|
if not self.conn.bucket_exists(bucket):
|
||||||
|
self.conn.make_bucket(bucket)
|
||||||
|
|
||||||
|
r = self.conn.put_object(bucket, fnm,
|
||||||
|
BytesIO(binary),
|
||||||
|
len(binary)
|
||||||
|
)
|
||||||
|
return r
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Fail put {bucket}/{fnm}: "+str(e))
|
||||||
|
self.__open__()
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get(self, bucket, fnm):
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
|
r = self.conn.get_object(bucket, fnm)
|
||||||
|
return r.read()
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Fail get {bucket}/{fnm}: "+str(e))
|
||||||
|
self.__open__()
|
||||||
|
time.sleep(1)
|
||||||
|
return
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
conn = HuMinio("infiniflow")
|
||||||
|
fnm = "/opt/home/kevinhu/docgpt/upload/13/11-408.jpg"
|
||||||
|
from PIL import Image
|
||||||
|
img = Image.open(fnm)
|
||||||
|
buff = BytesIO()
|
||||||
|
img.save(buff, format='JPEG')
|
||||||
|
print(conn.put("test", "11-408.jpg", buff.getvalue()))
|
||||||
|
bts = conn.get("test", "11-408.jpg")
|
||||||
|
img = Image.open(BytesIO(bts))
|
||||||
|
img.save("test.jpg")
|
||||||
|
|
@ -1,5 +1,8 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use actix_web::{get, HttpResponse, post, web};
|
use actix_web::{HttpResponse, post, web};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::Value;
|
||||||
|
use serde_json::json;
|
||||||
use crate::api::JsonResponse;
|
use crate::api::JsonResponse;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use crate::entity::dialog_info;
|
use crate::entity::dialog_info;
|
||||||
@ -7,13 +10,46 @@ use crate::errors::AppError;
|
|||||||
use crate::service::dialog_info::Query;
|
use crate::service::dialog_info::Query;
|
||||||
use crate::service::dialog_info::Mutation;
|
use crate::service::dialog_info::Mutation;
|
||||||
|
|
||||||
#[get("/v1.0/dialogs")]
|
#[derive(Debug, Deserialize)]
|
||||||
async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
pub struct ListParams {
|
||||||
let dialogs = Query::find_dialog_infos_by_uid(&data.conn, model.uid).await?;
|
pub uid: i64,
|
||||||
|
pub dialog_id: Option<i64>
|
||||||
|
}
|
||||||
|
#[post("/v1.0/dialogs")]
|
||||||
|
async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
let mut result = HashMap::new();
|
let mut result = HashMap::new();
|
||||||
result.insert("dialogs", dialogs);
|
if let Some(dia_id) = params.dialog_id{
|
||||||
|
let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?.unwrap();
|
||||||
|
let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
|
||||||
|
print!("{:?}", dia.history);
|
||||||
|
let hist:Value = serde_json::from_str(&dia.history)?;
|
||||||
|
let detail = json!({
|
||||||
|
"dialog_id": dia_id,
|
||||||
|
"dialog_name": dia.dialog_name.to_owned(),
|
||||||
|
"created_at": dia.created_at.to_string().to_owned(),
|
||||||
|
"updated_at": dia.updated_at.to_string().to_owned(),
|
||||||
|
"history": hist,
|
||||||
|
"kb_info": kb
|
||||||
|
});
|
||||||
|
|
||||||
|
result.insert("dialogs", vec![detail]);
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
let mut dias = Vec::<Value>::new();
|
||||||
|
for dia in Query::find_dialog_infos_by_uid(&data.conn, params.uid).await?{
|
||||||
|
let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
|
||||||
|
let hist:Value = serde_json::from_str(&dia.history)?;
|
||||||
|
dias.push(json!({
|
||||||
|
"dialog_id": dia.dialog_id,
|
||||||
|
"dialog_name": dia.dialog_name.to_owned(),
|
||||||
|
"created_at": dia.created_at.to_string().to_owned(),
|
||||||
|
"updated_at": dia.updated_at.to_string().to_owned(),
|
||||||
|
"history": hist,
|
||||||
|
"kb_info": kb
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
result.insert("dialogs", dias);
|
||||||
|
}
|
||||||
let json_response = JsonResponse {
|
let json_response = JsonResponse {
|
||||||
code: 200,
|
code: 200,
|
||||||
err: "".to_owned(),
|
err: "".to_owned(),
|
||||||
@ -25,27 +61,14 @@ async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -
|
|||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/v1.0/dialog")]
|
#[derive(Debug, Deserialize)]
|
||||||
async fn detail(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
pub struct RmParams {
|
||||||
let dialogs = Query::find_dialog_info_by_id(&data.conn, model.dialog_id).await?;
|
pub uid: i64,
|
||||||
|
pub dialog_id: i64
|
||||||
let mut result = HashMap::new();
|
|
||||||
result.insert("dialogs", dialogs);
|
|
||||||
|
|
||||||
let json_response = JsonResponse {
|
|
||||||
code: 200,
|
|
||||||
err: "".to_owned(),
|
|
||||||
data: result,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok()
|
|
||||||
.content_type("application/json")
|
|
||||||
.body(serde_json::to_string(&json_response)?))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/v1.0/delete_dialog")]
|
#[post("/v1.0/delete_dialog")]
|
||||||
async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
async fn delete(params: web::Json<RmParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
let _ = Mutation::delete_dialog_info(&data.conn, model.dialog_id).await?;
|
let _ = Mutation::delete_dialog_info(&data.conn, params.dialog_id).await?;
|
||||||
|
|
||||||
let json_response = JsonResponse {
|
let json_response = JsonResponse {
|
||||||
code: 200,
|
code: 200,
|
||||||
@ -58,12 +81,25 @@ async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
|
|||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/v1.0/create_kb")]
|
#[derive(Debug, Deserialize)]
|
||||||
async fn create(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
pub struct CreateParams {
|
||||||
let model = Mutation::create_dialog_info(&data.conn, model.into_inner()).await?;
|
pub uid: i64,
|
||||||
|
pub dialog_id: Option<i64>,
|
||||||
|
pub kb_id: i64,
|
||||||
|
pub name: String
|
||||||
|
}
|
||||||
|
#[post("/v1.0/create_dialog")]
|
||||||
|
async fn create(param: web::Json<CreateParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
let mut result = HashMap::new();
|
let mut result = HashMap::new();
|
||||||
result.insert("dialog_id", model.dialog_id.unwrap());
|
if let Some(dia_id) = param.dialog_id {
|
||||||
|
result.insert("dialog_id", dia_id);
|
||||||
|
let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?;
|
||||||
|
let _ = Mutation::update_dialog_info_by_id(&data.conn, dia_id, ¶m.name, &dia.unwrap().history).await?;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
let dia = Mutation::create_dialog_info(&data.conn, param.uid, param.kb_id, ¶m.name).await?;
|
||||||
|
result.insert("dialog_id", dia.dialog_id.unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
let json_response = JsonResponse {
|
let json_response = JsonResponse {
|
||||||
code: 200,
|
code: 200,
|
||||||
@ -75,3 +111,33 @@ async fn create(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
|
|||||||
.content_type("application/json")
|
.content_type("application/json")
|
||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct UpdateHistoryParams {
|
||||||
|
pub uid: i64,
|
||||||
|
pub dialog_id: i64,
|
||||||
|
pub history: Value
|
||||||
|
}
|
||||||
|
#[post("/v1.0/update_history")]
|
||||||
|
async fn update_history(param: web::Json<UpdateHistoryParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let mut json_response = JsonResponse {
|
||||||
|
code: 200,
|
||||||
|
err: "".to_owned(),
|
||||||
|
data: (),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(dia) = Query::find_dialog_info_by_id(&data.conn, param.dialog_id).await?{
|
||||||
|
let _ = Mutation::update_dialog_info_by_id(&data.conn, param.dialog_id, &dia.dialog_name,
|
||||||
|
¶m.history.to_string()).await?;
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
json_response.code = 500;
|
||||||
|
json_response.err = "Can't find dialog data!".to_owned();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?))
|
||||||
|
}
|
@ -1,12 +1,8 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::slice::Chunks;
|
|
||||||
//use actix_multipart::{Multipart, MultipartError, Field};
|
|
||||||
use actix_multipart_extract::{File, Multipart, MultipartForm};
|
use actix_multipart_extract::{File, Multipart, MultipartForm};
|
||||||
use actix_web::{get, HttpResponse, post, web};
|
use actix_web::{get, HttpResponse, post, web};
|
||||||
use actix_web::web::Bytes;
|
use chrono::{Utc, FixedOffset};
|
||||||
use chrono::Local;
|
|
||||||
use futures_util::StreamExt;
|
|
||||||
use sea_orm::DbConn;
|
use sea_orm::DbConn;
|
||||||
use crate::api::JsonResponse;
|
use crate::api::JsonResponse;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
@ -15,14 +11,17 @@ use crate::errors::AppError;
|
|||||||
use crate::service::doc_info::{Mutation, Query};
|
use crate::service::doc_info::{Mutation, Query};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
fn now()->chrono::DateTime<FixedOffset>{
|
||||||
|
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
pub struct Params {
|
pub struct ListParams {
|
||||||
pub uid: i64,
|
pub uid: i64,
|
||||||
pub filter: FilterParams,
|
pub filter: FilterParams,
|
||||||
pub sortby: String,
|
pub sortby: String,
|
||||||
pub page: u64,
|
pub page: Option<u32>,
|
||||||
pub per_page: u64,
|
pub per_page: Option<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
@ -33,14 +32,8 @@ pub struct FilterParams {
|
|||||||
pub kb_id: Option<i64>,
|
pub kb_id: Option<i64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[post("/v1.0/docs")]
|
||||||
pub struct MvParams {
|
async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
pub dids: Vec<i64>,
|
|
||||||
pub dest_did: i64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[get("/v1.0/docs")]
|
|
||||||
async fn list(params: web::Json<Params>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
|
||||||
let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
|
let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@ -69,21 +62,21 @@ pub struct UploadForm {
|
|||||||
#[post("/v1.0/upload")]
|
#[post("/v1.0/upload")]
|
||||||
async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
let uid = payload.uid;
|
let uid = payload.uid;
|
||||||
async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64) -> String {
|
async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64, parent_id:i64) -> String {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let mut new_file_name = file_name.to_string();
|
let mut new_file_name = file_name.to_string();
|
||||||
let arr: Vec<&str> = file_name.split(".").collect();
|
let arr: Vec<&str> = file_name.split(".").collect();
|
||||||
let suffix = String::from(arr[arr.len()-1]);
|
let suffix = String::from(arr[arr.len()-1]);
|
||||||
let preffix = arr[..arr.len()-1].join(".");
|
let preffix = arr[..arr.len()-1].join(".");
|
||||||
let mut docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
|
let mut docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
|
||||||
while docs.len()>0 {
|
while docs.len()>0 {
|
||||||
i += 1;
|
i += 1;
|
||||||
new_file_name = format!("{}_{}.{}", preffix, i, suffix);
|
new_file_name = format!("{}_{}.{}", preffix, i, suffix);
|
||||||
docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
|
docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
|
||||||
}
|
}
|
||||||
new_file_name
|
new_file_name
|
||||||
}
|
}
|
||||||
let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid).await;
|
let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid, payload.did).await;
|
||||||
|
|
||||||
std::fs::create_dir_all(format!("./upload/{}/", uid));
|
std::fs::create_dir_all(format!("./upload/{}/", uid));
|
||||||
let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
|
let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
|
||||||
@ -95,13 +88,11 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
|
|||||||
uid: uid,
|
uid: uid,
|
||||||
doc_name: fnm,
|
doc_name: fnm,
|
||||||
size: payload.file_field.bytes.len() as i64,
|
size: payload.file_field.bytes.len() as i64,
|
||||||
kb_infos: Vec::new(),
|
|
||||||
kb_progress: 0.0,
|
|
||||||
kb_progress_msg: "".to_string(),
|
|
||||||
location: filepath,
|
location: filepath,
|
||||||
r#type: "doc".to_string(),
|
r#type: "doc".to_string(),
|
||||||
created_at: Local::now().date_naive(),
|
created_at: now(),
|
||||||
updated_at: Local::now().date_naive(),
|
updated_at: now(),
|
||||||
|
is_deleted:Default::default(),
|
||||||
}).await?;
|
}).await?;
|
||||||
|
|
||||||
let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
|
let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
|
||||||
@ -109,11 +100,14 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
|
|||||||
Ok(HttpResponse::Ok().body("File uploaded successfully"))
|
Ok(HttpResponse::Ok().body("File uploaded successfully"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct RmDocsParam {
|
||||||
|
uid: i64,
|
||||||
|
dids: Vec<i64>
|
||||||
|
}
|
||||||
#[post("/v1.0/delete_docs")]
|
#[post("/v1.0/delete_docs")]
|
||||||
async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
async fn delete(params: web::Json<RmDocsParam>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
for doc_id in doc_ids.iter() {
|
let _ = Mutation::delete_doc_info(&data.conn, ¶ms.dids).await?;
|
||||||
let _ = Mutation::delete_doc_info(&data.conn, *doc_id).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let json_response = JsonResponse {
|
let json_response = JsonResponse {
|
||||||
code: 200,
|
code: 200,
|
||||||
@ -126,6 +120,13 @@ async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Resu
|
|||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct MvParams {
|
||||||
|
pub uid:i64,
|
||||||
|
pub dids: Vec<i64>,
|
||||||
|
pub dest_did: i64,
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/v1.0/mv_docs")]
|
#[post("/v1.0/mv_docs")]
|
||||||
async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
Mutation::mv_doc_info(&data.conn, params.dest_did, ¶ms.dids).await?;
|
Mutation::mv_doc_info(&data.conn, params.dest_did, ¶ms.dids).await?;
|
||||||
@ -140,3 +141,61 @@ async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<Ht
|
|||||||
.content_type("application/json")
|
.content_type("application/json")
|
||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct NewFoldParams {
|
||||||
|
pub uid: i64,
|
||||||
|
pub parent_id: i64,
|
||||||
|
pub name: String
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/v1.0/new_folder")]
|
||||||
|
async fn new_folder(params: web::Json<NewFoldParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let doc = Mutation::create_doc_info(&data.conn, Model {
|
||||||
|
did:Default::default(),
|
||||||
|
uid: params.uid,
|
||||||
|
doc_name: params.name.to_string(),
|
||||||
|
size:0,
|
||||||
|
r#type: "folder".to_string(),
|
||||||
|
location: "".to_owned(),
|
||||||
|
created_at: now(),
|
||||||
|
updated_at: now(),
|
||||||
|
is_deleted:Default::default(),
|
||||||
|
}).await?;
|
||||||
|
let _ = Mutation::place_doc(&data.conn, params.parent_id, doc.did.unwrap()).await?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().body("Folder created successfully"))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct RenameParams {
|
||||||
|
pub uid: i64,
|
||||||
|
pub did: i64,
|
||||||
|
pub name: String
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/v1.0/rename")]
|
||||||
|
async fn rename(params: web::Json<RenameParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let docs = Query::find_doc_infos_by_name(&data.conn, params.uid, ¶ms.name, None).await?;
|
||||||
|
if docs.len()>0{
|
||||||
|
let json_response = JsonResponse {
|
||||||
|
code: 500,
|
||||||
|
err: "Name duplicated!".to_owned(),
|
||||||
|
data: (),
|
||||||
|
};
|
||||||
|
return Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?));
|
||||||
|
}
|
||||||
|
let doc = Mutation::rename(&data.conn, params.did, ¶ms.name).await?;
|
||||||
|
|
||||||
|
let json_response = JsonResponse {
|
||||||
|
code: 200,
|
||||||
|
err: "".to_owned(),
|
||||||
|
data: doc,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?))
|
||||||
|
}
|
||||||
|
@ -60,6 +60,20 @@ async fn add_docs_to_kb(param: web::Json<AddDocs2KbParams>, data: web::Data<AppS
|
|||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/v1.0/anti_kb_docs")]
|
||||||
|
async fn anti_kb_docs(param: web::Json<AddDocs2KbParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let _ = Mutation::remove_docs(&data.conn, param.dids.to_owned(), Some(param.kb_id)).await?;
|
||||||
|
|
||||||
|
let json_response = JsonResponse {
|
||||||
|
code: 200,
|
||||||
|
err: "".to_owned(),
|
||||||
|
data: (),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?))
|
||||||
|
}
|
||||||
#[get("/v1.0/kbs")]
|
#[get("/v1.0/kbs")]
|
||||||
async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
|
let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
|
||||||
@ -92,3 +106,27 @@ async fn delete(model: web::Json<kb_info::Model>, data: web::Data<AppState>) ->
|
|||||||
.content_type("application/json")
|
.content_type("application/json")
|
||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct DocIdsParams {
|
||||||
|
pub uid: i64,
|
||||||
|
pub dids: Vec<i64>
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/v1.0/all_relevents")]
|
||||||
|
async fn all_relevents(params: web::Json<DocIdsParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let dids = crate::service::doc_info::Query::all_descendent_ids(&data.conn, ¶ms.dids).await?;
|
||||||
|
let mut result = HashMap::new();
|
||||||
|
let kbs = Query::find_kb_by_docs(&data.conn, dids).await?;
|
||||||
|
result.insert("kbs", kbs);
|
||||||
|
let json_response = JsonResponse {
|
||||||
|
code: 200,
|
||||||
|
err: "".to_owned(),
|
||||||
|
data: result,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?))
|
||||||
|
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use actix_web::{get, HttpResponse, post, web};
|
use actix_web::{get, HttpResponse, post, web};
|
||||||
use actix_web_httpauth::middleware::HttpAuthentication;
|
use actix_web_httpauth::middleware::HttpAuthentication;
|
||||||
|
use serde::Deserialize;
|
||||||
use crate::validator;
|
use crate::validator;
|
||||||
use crate::api::JsonResponse;
|
use crate::api::JsonResponse;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
@ -8,6 +9,11 @@ use crate::entity::tag_info;
|
|||||||
use crate::errors::AppError;
|
use crate::errors::AppError;
|
||||||
use crate::service::tag_info::{Mutation, Query};
|
use crate::service::tag_info::{Mutation, Query};
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct TagListParams {
|
||||||
|
pub uid: i64
|
||||||
|
}
|
||||||
|
|
||||||
#[post("/v1.0/create_tag")]
|
#[post("/v1.0/create_tag")]
|
||||||
async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
|
let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
|
||||||
@ -41,9 +47,12 @@ async fn delete(model: web::Json<tag_info::Model>, data: web::Data<AppState>) ->
|
|||||||
.body(serde_json::to_string(&json_response)?))
|
.body(serde_json::to_string(&json_response)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
|
|
||||||
async fn list(data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
//#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
|
||||||
let tags = Query::find_tag_infos(&data.conn).await?;
|
|
||||||
|
#[post("/v1.0/tags")]
|
||||||
|
async fn list(param: web::Json<TagListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let tags = Query::find_tags_by_uid(param.uid, &data.conn).await?;
|
||||||
|
|
||||||
let mut result = HashMap::new();
|
let mut result = HashMap::new();
|
||||||
result.insert("tags", tags);
|
result.insert("tags", tags);
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use actix_identity::Identity;
|
use actix_identity::Identity;
|
||||||
use actix_web::{get, HttpResponse, post, web};
|
use actix_web::{HttpResponse, post, web};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use crate::api::JsonResponse;
|
use crate::api::JsonResponse;
|
||||||
use crate::AppState;
|
use crate::AppState;
|
||||||
use crate::entity::user_info::Model;
|
use crate::entity::user_info::Model;
|
||||||
use crate::errors::{AppError, UserError};
|
use crate::errors::{AppError, UserError};
|
||||||
|
use crate::service::user_info::Mutation;
|
||||||
use crate::service::user_info::Query;
|
use crate::service::user_info::Query;
|
||||||
|
|
||||||
pub(crate) fn create_auth_token(user: &Model) -> u64 {
|
pub(crate) fn create_auth_token(user: &Model) -> u64 {
|
||||||
@ -32,6 +35,7 @@ async fn login(
|
|||||||
) -> Result<HttpResponse, AppError> {
|
) -> Result<HttpResponse, AppError> {
|
||||||
match Query::login(&data.conn, &input.email, &input.password).await? {
|
match Query::login(&data.conn, &input.email, &input.password).await? {
|
||||||
Some(user) => {
|
Some(user) => {
|
||||||
|
let _ = Mutation::update_login_status(user.uid,&data.conn).await?;
|
||||||
let token = create_auth_token(&user).to_string();
|
let token = create_auth_token(&user).to_string();
|
||||||
|
|
||||||
identity.remember(token.clone());
|
identity.remember(token.clone());
|
||||||
@ -50,3 +54,33 @@ async fn login(
|
|||||||
None => Err(UserError::LoginFailed.into())
|
None => Err(UserError::LoginFailed.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[post("/v1.0/register")]
|
||||||
|
async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let mut result = HashMap::new();
|
||||||
|
let usr = Mutation::create_user(&data.conn, &model).await?;
|
||||||
|
result.insert("uid", usr.uid.unwrap());
|
||||||
|
let json_response = JsonResponse {
|
||||||
|
code: 200,
|
||||||
|
err: "".to_owned(),
|
||||||
|
data: result,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[post("/v1.0/setting")]
|
||||||
|
async fn setting(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
|
||||||
|
let _ = Mutation::update_user_by_id(&data.conn, &model).await?;
|
||||||
|
let json_response = JsonResponse {
|
||||||
|
code: 200,
|
||||||
|
err: "".to_owned(),
|
||||||
|
data: (),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok()
|
||||||
|
.content_type("application/json")
|
||||||
|
.body(serde_json::to_string(&json_response)?))
|
||||||
|
}
|
@ -1,3 +1,4 @@
|
|||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
use sea_orm::entity::prelude::*;
|
use sea_orm::entity::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@ -8,13 +9,17 @@ pub struct Model {
|
|||||||
pub dialog_id: i64,
|
pub dialog_id: i64,
|
||||||
#[sea_orm(index)]
|
#[sea_orm(index)]
|
||||||
pub uid: i64,
|
pub uid: i64,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub kb_id: i64,
|
||||||
pub dialog_name: String,
|
pub dialog_name: String,
|
||||||
pub history: String,
|
pub history: String,
|
||||||
|
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub created_at: Date,
|
pub created_at: DateTime<FixedOffset>,
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub updated_at: Date,
|
pub updated_at: DateTime<FixedOffset>,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub is_deleted: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use sea_orm::entity::prelude::*;
|
use sea_orm::entity::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use crate::entity::kb_info;
|
use crate::entity::kb_info;
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
||||||
#[sea_orm(table_name = "doc_info")]
|
#[sea_orm(table_name = "doc_info")]
|
||||||
@ -13,16 +14,14 @@ pub struct Model {
|
|||||||
pub size: i64,
|
pub size: i64,
|
||||||
#[sea_orm(column_name = "type")]
|
#[sea_orm(column_name = "type")]
|
||||||
pub r#type: String,
|
pub r#type: String,
|
||||||
pub kb_progress: f32,
|
#[serde(skip_deserializing)]
|
||||||
pub kb_progress_msg: String,
|
|
||||||
pub location: String,
|
pub location: String,
|
||||||
#[sea_orm(ignore)]
|
|
||||||
pub kb_infos: Vec<kb_info::Model>,
|
|
||||||
|
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub created_at: Date,
|
pub created_at: DateTime<FixedOffset>,
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub updated_at: Date,
|
pub updated_at: DateTime<FixedOffset>,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub is_deleted: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
@ -48,7 +47,7 @@ impl Related<super::kb_info::Entity> for Entity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Related<Entity> for Entity {
|
impl Related<super::doc2_doc::Entity> for Entity {
|
||||||
fn to() -> RelationDef {
|
fn to() -> RelationDef {
|
||||||
super::doc2_doc::Relation::Parent.def()
|
super::doc2_doc::Relation::Parent.def()
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
use sea_orm::entity::prelude::*;
|
use sea_orm::entity::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
||||||
#[sea_orm(table_name = "kb2_doc")]
|
#[sea_orm(table_name = "kb2_doc")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
#[sea_orm(primary_key, auto_increment = true)]
|
#[sea_orm(primary_key, auto_increment = true)]
|
||||||
@ -10,6 +11,17 @@ pub struct Model {
|
|||||||
pub kb_id: i64,
|
pub kb_id: i64,
|
||||||
#[sea_orm(index)]
|
#[sea_orm(index)]
|
||||||
pub did: i64,
|
pub did: i64,
|
||||||
|
<<<<<<< HEAD
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub kb_progress: f32,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub kb_progress_msg: String,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub updated_at: DateTime<FixedOffset>,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub is_deleted: bool,
|
||||||
|
=======
|
||||||
|
>>>>>>> upstream/main
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, EnumIter)]
|
#[derive(Debug, Clone, Copy, EnumIter)]
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
use sea_orm::entity::prelude::*;
|
use sea_orm::entity::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
||||||
#[sea_orm(table_name = "kb_info")]
|
#[sea_orm(table_name = "kb_info")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
#[sea_orm(primary_key, auto_increment = false)]
|
#[sea_orm(primary_key, auto_increment = false)]
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
pub kb_id: i64,
|
pub kb_id: i64,
|
||||||
#[sea_orm(index)]
|
#[sea_orm(index)]
|
||||||
pub uid: i64,
|
pub uid: i64,
|
||||||
@ -12,9 +14,11 @@ pub struct Model {
|
|||||||
pub icon: i16,
|
pub icon: i16,
|
||||||
|
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub created_at: Date,
|
pub created_at: DateTime<FixedOffset>,
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub updated_at: Date,
|
pub updated_at: DateTime<FixedOffset>,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub is_deleted: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
@ -1,23 +1,34 @@
|
|||||||
use sea_orm::entity::prelude::*;
|
use sea_orm::entity::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
|
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
|
||||||
#[sea_orm(table_name = "tag_info")]
|
#[sea_orm(table_name = "tag_info")]
|
||||||
pub struct Model {
|
pub struct Model {
|
||||||
#[sea_orm(primary_key)]
|
#[sea_orm(primary_key)]
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub tid: i64,
|
pub tid: i64,
|
||||||
|
#[sea_orm(index)]
|
||||||
pub uid: i64,
|
pub uid: i64,
|
||||||
pub tag_name: String,
|
pub tag_name: String,
|
||||||
|
<<<<<<< HEAD
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub regx: String,
|
||||||
|
pub color: i16,
|
||||||
|
pub icon: i16,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub folder_id: i64,
|
||||||
|
=======
|
||||||
pub regx: Option<String>,
|
pub regx: Option<String>,
|
||||||
pub color: u16,
|
pub color: u16,
|
||||||
pub icon: u16,
|
pub icon: u16,
|
||||||
pub dir: Option<String>,
|
pub dir: Option<String>,
|
||||||
|
>>>>>>> upstream/main
|
||||||
|
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub created_at: Date,
|
pub created_at: DateTime<FixedOffset>,
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub updated_at: Date,
|
pub updated_at: DateTime<FixedOffset>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use sea_orm::entity::prelude::*;
|
use sea_orm::entity::prelude::*;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
|
||||||
#[sea_orm(table_name = "user_info")]
|
#[sea_orm(table_name = "user_info")]
|
||||||
@ -9,19 +10,22 @@ pub struct Model {
|
|||||||
pub uid: i64,
|
pub uid: i64,
|
||||||
pub email: String,
|
pub email: String,
|
||||||
pub nickname: String,
|
pub nickname: String,
|
||||||
pub avatar_url: Option<String>,
|
pub avatar_base64: String,
|
||||||
pub color_schema: String,
|
pub color_scheme: String,
|
||||||
pub list_style: String,
|
pub list_style: String,
|
||||||
pub language: String,
|
pub language: String,
|
||||||
pub password: String,
|
pub password: String,
|
||||||
|
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub created_at: Date,
|
pub last_login_at: DateTime<FixedOffset>,
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub updated_at: Date,
|
pub created_at: DateTime<FixedOffset>,
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
pub updated_at: DateTime<FixedOffset>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||||
pub enum Relation {}
|
pub enum Relation {}
|
||||||
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
impl ActiveModelBehavior for ActiveModel {}
|
11
src/main.rs
11
src/main.rs
@ -98,16 +98,25 @@ fn init(cfg: &mut web::ServiceConfig) {
|
|||||||
cfg.service(api::kb_info::delete);
|
cfg.service(api::kb_info::delete);
|
||||||
cfg.service(api::kb_info::list);
|
cfg.service(api::kb_info::list);
|
||||||
cfg.service(api::kb_info::add_docs_to_kb);
|
cfg.service(api::kb_info::add_docs_to_kb);
|
||||||
|
<<<<<<< HEAD
|
||||||
|
cfg.service(api::kb_info::anti_kb_docs);
|
||||||
|
cfg.service(api::kb_info::all_relevents);
|
||||||
|
=======
|
||||||
|
>>>>>>> upstream/main
|
||||||
|
|
||||||
cfg.service(api::doc_info::list);
|
cfg.service(api::doc_info::list);
|
||||||
cfg.service(api::doc_info::delete);
|
cfg.service(api::doc_info::delete);
|
||||||
cfg.service(api::doc_info::mv);
|
cfg.service(api::doc_info::mv);
|
||||||
cfg.service(api::doc_info::upload);
|
cfg.service(api::doc_info::upload);
|
||||||
|
cfg.service(api::doc_info::new_folder);
|
||||||
|
cfg.service(api::doc_info::rename);
|
||||||
|
|
||||||
cfg.service(api::dialog_info::list);
|
cfg.service(api::dialog_info::list);
|
||||||
cfg.service(api::dialog_info::delete);
|
cfg.service(api::dialog_info::delete);
|
||||||
cfg.service(api::dialog_info::detail);
|
|
||||||
cfg.service(api::dialog_info::create);
|
cfg.service(api::dialog_info::create);
|
||||||
|
cfg.service(api::dialog_info::update_history);
|
||||||
|
|
||||||
cfg.service(api::user_info::login);
|
cfg.service(api::user_info::login);
|
||||||
|
cfg.service(api::user_info::register);
|
||||||
|
cfg.service(api::user_info::setting);
|
||||||
}
|
}
|
@ -1,11 +1,15 @@
|
|||||||
use chrono::Local;
|
use chrono::{Local, FixedOffset, Utc};
|
||||||
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
|
use migration::Expr;
|
||||||
|
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, UpdateResult};
|
||||||
use sea_orm::ActiveValue::Set;
|
use sea_orm::ActiveValue::Set;
|
||||||
use sea_orm::QueryFilter;
|
use sea_orm::QueryFilter;
|
||||||
use sea_orm::ColumnTrait;
|
use sea_orm::ColumnTrait;
|
||||||
use crate::entity::dialog_info;
|
use crate::entity::dialog_info;
|
||||||
use crate::entity::dialog_info::Entity;
|
use crate::entity::dialog_info::Entity;
|
||||||
|
|
||||||
|
fn now()->chrono::DateTime<FixedOffset>{
|
||||||
|
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||||
|
}
|
||||||
pub struct Query;
|
pub struct Query;
|
||||||
|
|
||||||
impl Query {
|
impl Query {
|
||||||
@ -20,6 +24,7 @@ impl Query {
|
|||||||
pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
|
pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
|
||||||
Entity::find()
|
Entity::find()
|
||||||
.filter(dialog_info::Column::Uid.eq(uid))
|
.filter(dialog_info::Column::Uid.eq(uid))
|
||||||
|
.filter(dialog_info::Column::IsDeleted.eq(false))
|
||||||
.all(db)
|
.all(db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
@ -45,15 +50,19 @@ pub struct Mutation;
|
|||||||
impl Mutation {
|
impl Mutation {
|
||||||
pub async fn create_dialog_info(
|
pub async fn create_dialog_info(
|
||||||
db: &DbConn,
|
db: &DbConn,
|
||||||
form_data: dialog_info::Model,
|
uid: i64,
|
||||||
|
kb_id: i64,
|
||||||
|
name: &String
|
||||||
) -> Result<dialog_info::ActiveModel, DbErr> {
|
) -> Result<dialog_info::ActiveModel, DbErr> {
|
||||||
dialog_info::ActiveModel {
|
dialog_info::ActiveModel {
|
||||||
dialog_id: Default::default(),
|
dialog_id: Default::default(),
|
||||||
uid: Set(form_data.uid.to_owned()),
|
uid: Set(uid),
|
||||||
dialog_name: Set(form_data.dialog_name.to_owned()),
|
kb_id: Set(kb_id),
|
||||||
history: Set(form_data.history.to_owned()),
|
dialog_name: Set(name.to_owned()),
|
||||||
created_at: Set(Local::now().date_naive()),
|
history: Set("".to_owned()),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
created_at: Set(now()),
|
||||||
|
updated_at: Set(now()),
|
||||||
|
is_deleted: Default::default()
|
||||||
}
|
}
|
||||||
.save(db)
|
.save(db)
|
||||||
.await
|
.await
|
||||||
@ -61,35 +70,25 @@ impl Mutation {
|
|||||||
|
|
||||||
pub async fn update_dialog_info_by_id(
|
pub async fn update_dialog_info_by_id(
|
||||||
db: &DbConn,
|
db: &DbConn,
|
||||||
id: i64,
|
dialog_id: i64,
|
||||||
form_data: dialog_info::Model,
|
dialog_name:&String,
|
||||||
) -> Result<dialog_info::Model, DbErr> {
|
history: &String
|
||||||
let dialog_info: dialog_info::ActiveModel = Entity::find_by_id(id)
|
) -> Result<UpdateResult, DbErr> {
|
||||||
.one(db)
|
Entity::update_many()
|
||||||
.await?
|
.col_expr(dialog_info::Column::DialogName, Expr::value(dialog_name))
|
||||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
.col_expr(dialog_info::Column::History, Expr::value(history))
|
||||||
.map(Into::into)?;
|
.col_expr(dialog_info::Column::UpdatedAt, Expr::value(now()))
|
||||||
|
.filter(dialog_info::Column::DialogId.eq(dialog_id))
|
||||||
dialog_info::ActiveModel {
|
.exec(db)
|
||||||
dialog_id: dialog_info.dialog_id,
|
|
||||||
uid: dialog_info.uid,
|
|
||||||
dialog_name: Set(form_data.dialog_name.to_owned()),
|
|
||||||
history: Set(form_data.history.to_owned()),
|
|
||||||
created_at: Default::default(),
|
|
||||||
updated_at: Set(Local::now().date_naive()),
|
|
||||||
}
|
|
||||||
.update(db)
|
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_dialog_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_dialog_info(db: &DbConn, dialog_id: i64) -> Result<UpdateResult, DbErr> {
|
||||||
let tag: dialog_info::ActiveModel = Entity::find_by_id(kb_id)
|
Entity::update_many()
|
||||||
.one(db)
|
.col_expr(dialog_info::Column::IsDeleted, Expr::value(true))
|
||||||
.await?
|
.filter(dialog_info::Column::DialogId.eq(dialog_id))
|
||||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
.exec(db)
|
||||||
.map(Into::into)?;
|
.await
|
||||||
|
|
||||||
tag.delete(db).await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||||
|
@ -1,10 +1,15 @@
|
|||||||
use chrono::Local;
|
use chrono::{Utc, FixedOffset};
|
||||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement};
|
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement, QuerySelect, JoinType, RelationTrait, DbBackend, Statement, UpdateResult};
|
||||||
use sea_orm::ActiveValue::Set;
|
use sea_orm::ActiveValue::Set;
|
||||||
use sea_orm::QueryFilter;
|
use sea_orm::QueryFilter;
|
||||||
use crate::api::doc_info::Params;
|
use crate::api::doc_info::ListParams;
|
||||||
use crate::entity::{doc2_doc, doc_info, kb_info, tag_info};
|
use crate::entity::{doc2_doc, doc_info};
|
||||||
use crate::entity::doc_info::Entity;
|
use crate::entity::doc_info::Entity;
|
||||||
|
use crate::service;
|
||||||
|
|
||||||
|
fn now()->chrono::DateTime<FixedOffset>{
|
||||||
|
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Query;
|
pub struct Query;
|
||||||
|
|
||||||
@ -24,42 +29,121 @@ impl Query {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: String) -> Result<Vec<doc_info::Model>, DbErr> {
|
pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: &String, parent_id:Option<i64>) -> Result<Vec<doc_info::Model>, DbErr> {
|
||||||
|
let mut dids = Vec::<i64>::new();
|
||||||
|
if let Some(pid) = parent_id {
|
||||||
|
for d2d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(pid)).all(db).await?{
|
||||||
|
dids.push(d2d.did);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
let doc = Entity::find()
|
||||||
|
.filter(doc_info::Column::DocName.eq(name.clone()))
|
||||||
|
.filter(doc_info::Column::Uid.eq(uid))
|
||||||
|
.all(db)
|
||||||
|
.await?;
|
||||||
|
if doc.len() == 0{
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
assert!(doc.len()>0);
|
||||||
|
let d2d = doc2_doc::Entity::find().filter(doc2_doc::Column::Did.eq(doc[0].did)).all(db).await?;
|
||||||
|
assert!(d2d.len() <= 1, "Did: {}->{}", doc[0].did, d2d.len());
|
||||||
|
if d2d.len()>0{
|
||||||
|
for d2d_ in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(d2d[0].parent_id)).all(db).await?{
|
||||||
|
dids.push(d2d_.did);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Entity::find()
|
Entity::find()
|
||||||
.filter(doc_info::Column::DocName.eq(name))
|
.filter(doc_info::Column::DocName.eq(name.clone()))
|
||||||
.filter(doc_info::Column::Uid.eq(uid))
|
.filter(doc_info::Column::Uid.eq(uid))
|
||||||
|
.filter(doc_info::Column::Did.is_in(dids))
|
||||||
|
.filter(doc_info::Column::IsDeleted.eq(false))
|
||||||
.all(db)
|
.all(db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_doc_infos_by_params(db: &DbConn, params: Params) -> Result<Vec<doc_info::Model>, DbErr> {
|
pub async fn all_descendent_ids(db: &DbConn, doc_ids: &Vec<i64>) -> Result<Vec<i64>, DbErr> {
|
||||||
// Setup paginator
|
let mut dids = doc_ids.clone();
|
||||||
let paginator = Entity::find();
|
let mut i:usize = 0;
|
||||||
|
loop {
|
||||||
|
if dids.len() == i {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
|
||||||
|
dids.push(d.did);
|
||||||
|
}
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
Ok(dids)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn find_doc_infos_by_params(db: &DbConn, params: ListParams) -> Result<Vec<doc_info::Model>, DbErr> {
|
||||||
|
// Setup paginator
|
||||||
|
let mut sql:String = "
|
||||||
|
select
|
||||||
|
a.did,
|
||||||
|
a.uid,
|
||||||
|
a.doc_name,
|
||||||
|
a.location,
|
||||||
|
a.size,
|
||||||
|
a.type,
|
||||||
|
a.created_at,
|
||||||
|
a.updated_at,
|
||||||
|
a.is_deleted
|
||||||
|
from
|
||||||
|
doc_info as a
|
||||||
|
".to_owned();
|
||||||
|
|
||||||
|
let mut cond:String = format!(" a.uid={} and a.is_deleted=False ", params.uid);
|
||||||
|
|
||||||
// Fetch paginated posts
|
|
||||||
let mut query = paginator
|
|
||||||
.find_with_related(kb_info::Entity);
|
|
||||||
if let Some(kb_id) = params.filter.kb_id {
|
if let Some(kb_id) = params.filter.kb_id {
|
||||||
query = query.filter(kb_info::Column::KbId.eq(kb_id));
|
sql.push_str(&format!(" inner join kb2_doc on kb2_doc.did = a.did and kb2_doc.kb_id={}", kb_id));
|
||||||
}
|
}
|
||||||
if let Some(folder_id) = params.filter.folder_id {
|
if let Some(folder_id) = params.filter.folder_id {
|
||||||
|
sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", folder_id));
|
||||||
}
|
}
|
||||||
|
// Fetch paginated posts
|
||||||
if let Some(tag_id) = params.filter.tag_id {
|
if let Some(tag_id) = params.filter.tag_id {
|
||||||
query = query.filter(tag_info::Column::Tid.eq(tag_id));
|
let tag = service::tag_info::Query::find_tag_info_by_id(tag_id, &db).await.unwrap().unwrap();
|
||||||
|
if tag.folder_id > 0{
|
||||||
|
sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", tag.folder_id));
|
||||||
|
}
|
||||||
|
if tag.regx.len()>0{
|
||||||
|
cond.push_str(&format!(" and doc_name ~ '{}'", tag.regx));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if let Some(keywords) = params.filter.keywords {
|
|
||||||
|
|
||||||
|
if let Some(keywords) = params.filter.keywords {
|
||||||
|
cond.push_str(&format!(" and doc_name like '%{}%'", keywords));
|
||||||
}
|
}
|
||||||
Ok(query.order_by_asc(doc_info::Column::Did)
|
if cond.len() > 0{
|
||||||
.all(db)
|
sql.push_str(&" where ");
|
||||||
.await?
|
sql.push_str(&cond);
|
||||||
.into_iter()
|
}
|
||||||
.map(|(mut doc_info, kb_infos)| {
|
let mut orderby = params.sortby.clone();
|
||||||
doc_info.kb_infos = kb_infos;
|
if orderby.len() == 0 {
|
||||||
doc_info
|
orderby = "updated_at desc".to_owned();
|
||||||
})
|
}
|
||||||
.collect())
|
sql.push_str(&format!(" order by {}", orderby));
|
||||||
|
let mut page_size:u32 = 30;
|
||||||
|
if let Some(pg_sz) = params.per_page {
|
||||||
|
page_size = pg_sz;
|
||||||
|
}
|
||||||
|
let mut page:u32 = 0;
|
||||||
|
if let Some(pg) = params.page {
|
||||||
|
page = pg;
|
||||||
|
}
|
||||||
|
sql.push_str(&format!(" limit {} offset {} ;", page_size, page*page_size));
|
||||||
|
|
||||||
|
print!("{}", sql);
|
||||||
|
Entity::find()
|
||||||
|
.from_raw_sql(
|
||||||
|
Statement::from_sql_and_values(DbBackend::Postgres,sql,vec![])
|
||||||
|
).all(db).await
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_doc_infos_in_page(
|
pub async fn find_doc_infos_in_page(
|
||||||
@ -126,11 +210,10 @@ impl Mutation {
|
|||||||
doc_name: Set(form_data.doc_name.to_owned()),
|
doc_name: Set(form_data.doc_name.to_owned()),
|
||||||
size: Set(form_data.size.to_owned()),
|
size: Set(form_data.size.to_owned()),
|
||||||
r#type: Set(form_data.r#type.to_owned()),
|
r#type: Set(form_data.r#type.to_owned()),
|
||||||
kb_progress: Set(form_data.kb_progress.to_owned()),
|
|
||||||
kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
|
|
||||||
location: Set(form_data.location.to_owned()),
|
location: Set(form_data.location.to_owned()),
|
||||||
created_at: Set(Local::now().date_naive()),
|
created_at: Set(form_data.created_at.to_owned()),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
updated_at: Set(form_data.updated_at.to_owned()),
|
||||||
|
is_deleted:Default::default()
|
||||||
}
|
}
|
||||||
.save(db)
|
.save(db)
|
||||||
.await
|
.await
|
||||||
@ -153,24 +236,50 @@ impl Mutation {
|
|||||||
doc_name: Set(form_data.doc_name.to_owned()),
|
doc_name: Set(form_data.doc_name.to_owned()),
|
||||||
size: Set(form_data.size.to_owned()),
|
size: Set(form_data.size.to_owned()),
|
||||||
r#type: Set(form_data.r#type.to_owned()),
|
r#type: Set(form_data.r#type.to_owned()),
|
||||||
kb_progress: Set(form_data.kb_progress.to_owned()),
|
|
||||||
kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
|
|
||||||
location: Set(form_data.location.to_owned()),
|
location: Set(form_data.location.to_owned()),
|
||||||
created_at: Default::default(),
|
created_at: doc_info.created_at,
|
||||||
updated_at: Set(Local::now().date_naive()),
|
updated_at: Set(now()),
|
||||||
|
is_deleted: Default::default(),
|
||||||
}
|
}
|
||||||
.update(db)
|
.update(db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_doc_info(db: &DbConn, doc_id: i64) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_doc_info(db: &DbConn, doc_ids: &Vec<i64>) -> Result<UpdateResult, DbErr> {
|
||||||
let tag: doc_info::ActiveModel = Entity::find_by_id(doc_id)
|
let mut dids = doc_ids.clone();
|
||||||
|
let mut i:usize = 0;
|
||||||
|
loop {
|
||||||
|
if dids.len() == i {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let mut doc: doc_info::ActiveModel = Entity::find_by_id(dids[i])
|
||||||
.one(db)
|
.one(db)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
.ok_or(DbErr::Custom(format!("Can't find doc:{}", dids[i])))
|
||||||
.map(Into::into)?;
|
.map(Into::into)?;
|
||||||
|
doc.updated_at = Set(now());
|
||||||
|
doc.is_deleted = Set(true);
|
||||||
|
let _ = doc.update(db).await?;
|
||||||
|
|
||||||
tag.delete(db).await
|
for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
|
||||||
|
dids.push(d.did);
|
||||||
|
}
|
||||||
|
let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::ParentId.eq(dids[i])).exec(db).await?;
|
||||||
|
let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::Did.eq(dids[i])).exec(db).await?;
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
crate::service::kb_info::Mutation::remove_docs(&db, dids,None).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn rename(db: &DbConn, doc_id: i64, name: &String) -> Result<doc_info::Model, DbErr> {
|
||||||
|
let mut doc: doc_info::ActiveModel = Entity::find_by_id(doc_id)
|
||||||
|
.one(db)
|
||||||
|
.await?
|
||||||
|
.ok_or(DbErr::Custom(format!("Can't find doc:{}", doc_id)))
|
||||||
|
.map(Into::into)?;
|
||||||
|
doc.updated_at = Set(now());
|
||||||
|
doc.doc_name = Set(name.clone());
|
||||||
|
doc.update(db).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_all_doc_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_all_doc_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||||
|
@ -1,10 +1,14 @@
|
|||||||
use chrono::Local;
|
use chrono::{Local, FixedOffset, Utc};
|
||||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
|
use migration::Expr;
|
||||||
|
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
|
||||||
use sea_orm::ActiveValue::Set;
|
use sea_orm::ActiveValue::Set;
|
||||||
use crate::entity::kb_info;
|
use crate::entity::kb_info;
|
||||||
use crate::entity::kb2_doc;
|
use crate::entity::kb2_doc;
|
||||||
use crate::entity::kb_info::Entity;
|
use crate::entity::kb_info::Entity;
|
||||||
|
|
||||||
|
fn now()->chrono::DateTime<FixedOffset>{
|
||||||
|
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||||
|
}
|
||||||
pub struct Query;
|
pub struct Query;
|
||||||
|
|
||||||
impl Query {
|
impl Query {
|
||||||
@ -30,6 +34,14 @@ impl Query {
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn find_kb_by_docs(db: &DbConn, doc_ids: Vec<i64>) -> Result<Vec<kb_info::Model>, DbErr> {
|
||||||
|
let mut kbids = Vec::<i64>::new();
|
||||||
|
for k in kb2_doc::Entity::find().filter(kb2_doc::Column::Did.is_in(doc_ids)).all(db).await?{
|
||||||
|
kbids.push(k.kb_id);
|
||||||
|
}
|
||||||
|
Entity::find().filter(kb_info::Column::KbId.is_in(kbids)).all(db).await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn find_kb_infos_in_page(
|
pub async fn find_kb_infos_in_page(
|
||||||
db: &DbConn,
|
db: &DbConn,
|
||||||
page: u64,
|
page: u64,
|
||||||
@ -58,8 +70,9 @@ impl Mutation {
|
|||||||
uid: Set(form_data.uid.to_owned()),
|
uid: Set(form_data.uid.to_owned()),
|
||||||
kb_name: Set(form_data.kb_name.to_owned()),
|
kb_name: Set(form_data.kb_name.to_owned()),
|
||||||
icon: Set(form_data.icon.to_owned()),
|
icon: Set(form_data.icon.to_owned()),
|
||||||
created_at: Set(Local::now().date_naive()),
|
created_at: Set(now()),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
updated_at: Set(now()),
|
||||||
|
is_deleted:Default::default()
|
||||||
}
|
}
|
||||||
.save(db)
|
.save(db)
|
||||||
.await
|
.await
|
||||||
@ -71,10 +84,20 @@ impl Mutation {
|
|||||||
doc_ids: Vec<i64>
|
doc_ids: Vec<i64>
|
||||||
)-> Result<(), DbErr> {
|
)-> Result<(), DbErr> {
|
||||||
for did in doc_ids{
|
for did in doc_ids{
|
||||||
|
let res = kb2_doc::Entity::find()
|
||||||
|
.filter(kb2_doc::Column::KbId.eq(kb_id))
|
||||||
|
.filter(kb2_doc::Column::Did.eq(did))
|
||||||
|
.all(db)
|
||||||
|
.await?;
|
||||||
|
if res.len()>0{continue;}
|
||||||
let _ = kb2_doc::ActiveModel {
|
let _ = kb2_doc::ActiveModel {
|
||||||
id: Default::default(),
|
id: Default::default(),
|
||||||
kb_id: Set(kb_id),
|
kb_id: Set(kb_id),
|
||||||
did: Set(did),
|
did: Set(did),
|
||||||
|
kb_progress: Set(0.0),
|
||||||
|
kb_progress_msg: Set("".to_owned()),
|
||||||
|
updated_at: Set(now()),
|
||||||
|
is_deleted:Default::default()
|
||||||
}
|
}
|
||||||
.save(db)
|
.save(db)
|
||||||
.await?;
|
.await?;
|
||||||
@ -83,6 +106,25 @@ impl Mutation {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn remove_docs(
|
||||||
|
db: &DbConn,
|
||||||
|
doc_ids: Vec<i64>,
|
||||||
|
kb_id: Option<i64>
|
||||||
|
)-> Result<UpdateResult, DbErr> {
|
||||||
|
let update = kb2_doc::Entity::update_many()
|
||||||
|
.col_expr(kb2_doc::Column::IsDeleted, Expr::value(true))
|
||||||
|
.col_expr(kb2_doc::Column::KbProgress, Expr::value(0))
|
||||||
|
.col_expr(kb2_doc::Column::KbProgressMsg, Expr::value(""))
|
||||||
|
.filter(kb2_doc::Column::Did.is_in(doc_ids));
|
||||||
|
if let Some(kbid) = kb_id{
|
||||||
|
update.filter(kb2_doc::Column::KbId.eq(kbid))
|
||||||
|
.exec(db)
|
||||||
|
.await
|
||||||
|
}else{
|
||||||
|
update.exec(db).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn update_kb_info_by_id(
|
pub async fn update_kb_info_by_id(
|
||||||
db: &DbConn,
|
db: &DbConn,
|
||||||
id: i64,
|
id: i64,
|
||||||
@ -99,21 +141,22 @@ impl Mutation {
|
|||||||
uid: kb_info.uid,
|
uid: kb_info.uid,
|
||||||
kb_name: Set(form_data.kb_name.to_owned()),
|
kb_name: Set(form_data.kb_name.to_owned()),
|
||||||
icon: Set(form_data.icon.to_owned()),
|
icon: Set(form_data.icon.to_owned()),
|
||||||
created_at: Default::default(),
|
created_at: kb_info.created_at,
|
||||||
updated_at: Set(Local::now().date_naive()),
|
updated_at: Set(now()),
|
||||||
|
is_deleted: Default::default()
|
||||||
}
|
}
|
||||||
.update(db)
|
.update(db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
|
||||||
let tag: kb_info::ActiveModel = Entity::find_by_id(kb_id)
|
let kb: kb_info::ActiveModel = Entity::find_by_id(kb_id)
|
||||||
.one(db)
|
.one(db)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
|
||||||
.map(Into::into)?;
|
.map(Into::into)?;
|
||||||
|
|
||||||
tag.delete(db).await
|
kb.delete(db).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_all_kb_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_all_kb_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||||
|
@ -1,18 +1,24 @@
|
|||||||
use chrono::Local;
|
use chrono::{FixedOffset, Utc};
|
||||||
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
|
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, ColumnTrait, QueryFilter};
|
||||||
use sea_orm::ActiveValue::Set;
|
use sea_orm::ActiveValue::Set;
|
||||||
use crate::entity::tag_info;
|
use crate::entity::tag_info;
|
||||||
use crate::entity::tag_info::Entity;
|
use crate::entity::tag_info::Entity;
|
||||||
|
|
||||||
|
fn now()->chrono::DateTime<FixedOffset>{
|
||||||
|
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||||
|
}
|
||||||
pub struct Query;
|
pub struct Query;
|
||||||
|
|
||||||
impl Query {
|
impl Query {
|
||||||
pub async fn find_tag_info_by_id(db: &DbConn, id: i64) -> Result<Option<tag_info::Model>, DbErr> {
|
pub async fn find_tag_info_by_id(id: i64, db: &DbConn) -> Result<Option<tag_info::Model>, DbErr> {
|
||||||
Entity::find_by_id(id).one(db).await
|
Entity::find_by_id(id).one(db).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_tag_infos(db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
|
pub async fn find_tags_by_uid(uid:i64, db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
|
||||||
Entity::find().all(db).await
|
Entity::find()
|
||||||
|
.filter(tag_info::Column::Uid.eq(uid))
|
||||||
|
.all(db)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn find_tag_infos_in_page(
|
pub async fn find_tag_infos_in_page(
|
||||||
@ -45,9 +51,9 @@ impl Mutation {
|
|||||||
regx: Set(form_data.regx.to_owned()),
|
regx: Set(form_data.regx.to_owned()),
|
||||||
color: Set(form_data.color.to_owned()),
|
color: Set(form_data.color.to_owned()),
|
||||||
icon: Set(form_data.icon.to_owned()),
|
icon: Set(form_data.icon.to_owned()),
|
||||||
dir: Set(form_data.dir.to_owned()),
|
folder_id: Set(form_data.folder_id.to_owned()),
|
||||||
created_at: Set(Local::now().date_naive()),
|
created_at: Set(now()),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
updated_at: Set(now()),
|
||||||
}
|
}
|
||||||
.save(db)
|
.save(db)
|
||||||
.await
|
.await
|
||||||
@ -71,9 +77,9 @@ impl Mutation {
|
|||||||
regx: Set(form_data.regx.to_owned()),
|
regx: Set(form_data.regx.to_owned()),
|
||||||
color: Set(form_data.color.to_owned()),
|
color: Set(form_data.color.to_owned()),
|
||||||
icon: Set(form_data.icon.to_owned()),
|
icon: Set(form_data.icon.to_owned()),
|
||||||
dir: Set(form_data.dir.to_owned()),
|
folder_id: Set(form_data.folder_id.to_owned()),
|
||||||
created_at: Default::default(),
|
created_at: Default::default(),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
updated_at: Set(now()),
|
||||||
}
|
}
|
||||||
.update(db)
|
.update(db)
|
||||||
.await
|
.await
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
use chrono::Local;
|
use chrono::{FixedOffset, Utc};
|
||||||
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
|
use migration::Expr;
|
||||||
|
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
|
||||||
use sea_orm::ActiveValue::Set;
|
use sea_orm::ActiveValue::Set;
|
||||||
use crate::entity::user_info;
|
use crate::entity::user_info;
|
||||||
use crate::entity::user_info::Entity;
|
use crate::entity::user_info::Entity;
|
||||||
|
|
||||||
|
fn now()->chrono::DateTime<FixedOffset>{
|
||||||
|
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
|
||||||
|
}
|
||||||
pub struct Query;
|
pub struct Query;
|
||||||
|
|
||||||
impl Query {
|
impl Query {
|
||||||
@ -44,52 +48,64 @@ pub struct Mutation;
|
|||||||
impl Mutation {
|
impl Mutation {
|
||||||
pub async fn create_user(
|
pub async fn create_user(
|
||||||
db: &DbConn,
|
db: &DbConn,
|
||||||
form_data: user_info::Model,
|
form_data: &user_info::Model,
|
||||||
) -> Result<user_info::ActiveModel, DbErr> {
|
) -> Result<user_info::ActiveModel, DbErr> {
|
||||||
user_info::ActiveModel {
|
user_info::ActiveModel {
|
||||||
uid: Default::default(),
|
uid: Default::default(),
|
||||||
email: Set(form_data.email.to_owned()),
|
email: Set(form_data.email.to_owned()),
|
||||||
nickname: Set(form_data.nickname.to_owned()),
|
nickname: Set(form_data.nickname.to_owned()),
|
||||||
avatar_url: Set(form_data.avatar_url.to_owned()),
|
avatar_base64: Set(form_data.avatar_base64.to_owned()),
|
||||||
color_schema: Set(form_data.color_schema.to_owned()),
|
color_scheme: Set(form_data.color_scheme.to_owned()),
|
||||||
list_style: Set(form_data.list_style.to_owned()),
|
list_style: Set(form_data.list_style.to_owned()),
|
||||||
language: Set(form_data.language.to_owned()),
|
language: Set(form_data.language.to_owned()),
|
||||||
password: Set(form_data.password.to_owned()),
|
password: Set(form_data.password.to_owned()),
|
||||||
created_at: Set(Local::now().date_naive()),
|
last_login_at: Set(now()),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
created_at: Set(now()),
|
||||||
|
updated_at: Set(now()),
|
||||||
}
|
}
|
||||||
.save(db)
|
.save(db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_tag_by_id(
|
pub async fn update_user_by_id(
|
||||||
db: &DbConn,
|
db: &DbConn,
|
||||||
id: i64,
|
form_data: &user_info::Model,
|
||||||
form_data: user_info::Model,
|
|
||||||
) -> Result<user_info::Model, DbErr> {
|
) -> Result<user_info::Model, DbErr> {
|
||||||
let user: user_info::ActiveModel = Entity::find_by_id(id)
|
let usr: user_info::ActiveModel = Entity::find_by_id(form_data.uid)
|
||||||
.one(db)
|
.one(db)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
|
.ok_or(DbErr::Custom("Cannot find user.".to_owned()))
|
||||||
.map(Into::into)?;
|
.map(Into::into)?;
|
||||||
|
|
||||||
user_info::ActiveModel {
|
user_info::ActiveModel {
|
||||||
uid: user.uid,
|
uid: Set(form_data.uid),
|
||||||
email: Set(form_data.email.to_owned()),
|
email: Set(form_data.email.to_owned()),
|
||||||
nickname: Set(form_data.nickname.to_owned()),
|
nickname: Set(form_data.nickname.to_owned()),
|
||||||
avatar_url: Set(form_data.avatar_url.to_owned()),
|
avatar_base64: Set(form_data.avatar_base64.to_owned()),
|
||||||
color_schema: Set(form_data.color_schema.to_owned()),
|
color_scheme: Set(form_data.color_scheme.to_owned()),
|
||||||
list_style: Set(form_data.list_style.to_owned()),
|
list_style: Set(form_data.list_style.to_owned()),
|
||||||
language: Set(form_data.language.to_owned()),
|
language: Set(form_data.language.to_owned()),
|
||||||
password: Set(form_data.password.to_owned()),
|
password: Set(form_data.password.to_owned()),
|
||||||
created_at: Default::default(),
|
updated_at: Set(now()),
|
||||||
updated_at: Set(Local::now().date_naive()),
|
last_login_at: usr.last_login_at,
|
||||||
|
created_at:usr.created_at,
|
||||||
}
|
}
|
||||||
.update(db)
|
.update(db)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_tag(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
|
pub async fn update_login_status(
|
||||||
|
uid: i64,
|
||||||
|
db: &DbConn
|
||||||
|
) -> Result<UpdateResult, DbErr> {
|
||||||
|
Entity::update_many()
|
||||||
|
.col_expr(user_info::Column::LastLoginAt, Expr::value(now()))
|
||||||
|
.filter(user_info::Column::Uid.eq(uid))
|
||||||
|
.exec(db)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn delete_user(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
|
||||||
let tag: user_info::ActiveModel = Entity::find_by_id(tid)
|
let tag: user_info::ActiveModel = Entity::find_by_id(tid)
|
||||||
.one(db)
|
.one(db)
|
||||||
.await?
|
.await?
|
||||||
@ -99,7 +115,7 @@ impl Mutation {
|
|||||||
tag.delete(db).await
|
tag.delete(db).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_all_tags(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
pub async fn delete_all(db: &DbConn) -> Result<DeleteResult, DbErr> {
|
||||||
Entity::delete_many().exec(db).await
|
Entity::delete_many().exec(db).await
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
x
Reference in New Issue
Block a user