format code (#14)

This commit is contained in:
KevinHuSh 2023-12-25 10:17:13 +08:00 committed by GitHub
parent 8a65ad888e
commit 6858ec5f38
30 changed files with 1062 additions and 771 deletions

View File

@ -1,8 +1,8 @@
use sea_orm_migration::{prelude::*, sea_orm::Statement};
use chrono::{FixedOffset, Utc};
use sea_orm_migration::{ prelude::*, sea_orm::Statement };
use chrono::{ FixedOffset, Utc };
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
#[derive(DeriveMigrationName)]
pub struct Migration;
@ -10,8 +10,7 @@ pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
manager.create_table(
Table::create()
.table(UserInfo::Table)
.if_not_exists()
@ -20,7 +19,7 @@ impl MigrationTrait for Migration {
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
.primary_key()
)
.col(ColumnDef::new(UserInfo::Email).string().not_null())
.col(ColumnDef::new(UserInfo::Nickname).string().not_null())
@ -29,16 +28,28 @@ impl MigrationTrait for Migration {
.col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
.col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
.col(ColumnDef::new(UserInfo::Password).string().not_null())
.col(ColumnDef::new(UserInfo::LastLoginAt).timestamp_with_time_zone().default(Expr::current_timestamp()))
.col(ColumnDef::new(UserInfo::CreatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(UserInfo::UpdatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
.to_owned(),
.col(
ColumnDef::new(UserInfo::LastLoginAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
)
.await?;
.col(
ColumnDef::new(UserInfo::CreatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.col(
ColumnDef::new(UserInfo::UpdatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(TagInfo::Table)
.if_not_exists()
@ -47,7 +58,7 @@ impl MigrationTrait for Migration {
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
.primary_key()
)
.col(ColumnDef::new(TagInfo::Uid).big_integer().not_null())
.col(ColumnDef::new(TagInfo::TagName).string().not_null())
@ -55,15 +66,23 @@ impl MigrationTrait for Migration {
.col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
.col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
.col(ColumnDef::new(TagInfo::FolderId).big_integer())
.col(ColumnDef::new(TagInfo::CreatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(TagInfo::UpdatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
.to_owned(),
.col(
ColumnDef::new(TagInfo::CreatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.await?;
.col(
ColumnDef::new(TagInfo::UpdatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(Tag2Doc::Table)
.if_not_exists()
@ -72,16 +91,14 @@ impl MigrationTrait for Migration {
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
.primary_key()
)
.col(ColumnDef::new(Tag2Doc::TagId).big_integer())
.col(ColumnDef::new(Tag2Doc::Did).big_integer())
.to_owned(),
)
.await?;
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(Kb2Doc::Table)
.if_not_exists()
@ -90,20 +107,23 @@ impl MigrationTrait for Migration {
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
.primary_key()
)
.col(ColumnDef::new(Kb2Doc::KbId).big_integer())
.col(ColumnDef::new(Kb2Doc::Did).big_integer())
.col(ColumnDef::new(Kb2Doc::KbProgress).float().default(0))
.col(ColumnDef::new(Kb2Doc::KbProgressMsg).string().default(""))
.col(ColumnDef::new(Kb2Doc::UpdatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(Kb2Doc::IsDeleted).boolean().default(false))
.to_owned(),
.col(
ColumnDef::new(Kb2Doc::UpdatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.await?;
.col(ColumnDef::new(Kb2Doc::IsDeleted).boolean().default(false))
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(Dialog2Kb::Table)
.if_not_exists()
@ -112,16 +132,14 @@ impl MigrationTrait for Migration {
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
.primary_key()
)
.col(ColumnDef::new(Dialog2Kb::DialogId).big_integer())
.col(ColumnDef::new(Dialog2Kb::KbId).big_integer())
.to_owned(),
)
.await?;
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(Doc2Doc::Table)
.if_not_exists()
@ -130,106 +148,157 @@ impl MigrationTrait for Migration {
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
.primary_key()
)
.col(ColumnDef::new(Doc2Doc::ParentId).big_integer())
.col(ColumnDef::new(Doc2Doc::Did).big_integer())
.to_owned(),
)
.await?;
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(KbInfo::Table)
.if_not_exists()
.col(ColumnDef::new(KbInfo::KbId).big_integer()
.col(
ColumnDef::new(KbInfo::KbId)
.big_integer()
.auto_increment()
.not_null()
.primary_key())
.primary_key()
)
.col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
.col(ColumnDef::new(KbInfo::KbName).string().not_null())
.col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
.col(ColumnDef::new(KbInfo::CreatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(KbInfo::UpdatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
.to_owned(),
.col(
ColumnDef::new(KbInfo::CreatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.await?;
.col(
ColumnDef::new(KbInfo::UpdatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(DocInfo::Table)
.if_not_exists()
.col(ColumnDef::new(DocInfo::Did).big_integer()
.col(
ColumnDef::new(DocInfo::Did)
.big_integer()
.not_null()
.auto_increment()
.primary_key())
.primary_key()
)
.col(ColumnDef::new(DocInfo::Uid).big_integer().not_null())
.col(ColumnDef::new(DocInfo::DocName).string().not_null())
.col(ColumnDef::new(DocInfo::Location).string().not_null())
.col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
.col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
.col(ColumnDef::new(DocInfo::CreatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(DocInfo::UpdatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
.to_owned(),
.col(ColumnDef::new(DocInfo::Type).string().not_null())
.comment("doc|folder")
.col(
ColumnDef::new(DocInfo::CreatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.await?;
.col(
ColumnDef::new(DocInfo::UpdatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
.to_owned()
).await?;
manager
.create_table(
manager.create_table(
Table::create()
.table(DialogInfo::Table)
.if_not_exists()
.col(ColumnDef::new(DialogInfo::DialogId)
.col(
ColumnDef::new(DialogInfo::DialogId)
.big_integer()
.not_null()
.auto_increment()
.primary_key())
.primary_key()
)
.col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
.col(ColumnDef::new(DialogInfo::KbId).big_integer().not_null())
.col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
.col(ColumnDef::new(DialogInfo::History).string().comment("json"))
.col(ColumnDef::new(DialogInfo::CreatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(DialogInfo::UpdatedAt).timestamp_with_time_zone().default(Expr::current_timestamp()).not_null())
.col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
.to_owned(),
.col(
ColumnDef::new(DialogInfo::CreatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.await?;
.col(
ColumnDef::new(DialogInfo::UpdatedAt)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null()
)
.col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
.to_owned()
).await?;
let tm = now();
let root_insert = Query::insert()
.into_table(UserInfo::Table)
.columns([UserInfo::Email, UserInfo::Nickname, UserInfo::Password])
.values_panic([
"kai.hu@infiniflow.org".into(),
"root".into(),
"123456".into()
])
.values_panic(["kai.hu@infiniflow.org".into(), "root".into(), "123456".into()])
.to_owned();
let doc_insert = Query::insert()
.into_table(DocInfo::Table)
.columns([DocInfo::Uid, DocInfo::DocName, DocInfo::Size, DocInfo::Type,
DocInfo::Location])
.values_panic([
1.into(),
"/".into(),
0.into(),
"folder".into(),
"".into()
.columns([
DocInfo::Uid,
DocInfo::DocName,
DocInfo::Size,
DocInfo::Type,
DocInfo::Location,
])
.values_panic([(1).into(), "/".into(), (0).into(), "folder".into(), "".into()])
.to_owned();
let tag_insert = Query::insert()
.into_table(TagInfo::Table)
.columns([TagInfo::Uid, TagInfo::TagName, TagInfo::Regx, TagInfo::Color, TagInfo::Icon])
.values_panic([1.into(), "视频".into(),".*\\.(mpg|mpeg|avi|rm|rmvb|mov|wmv|asf|dat|asx|wvx|mpe|mpa)".into(),1.into(),1.into()])
.values_panic([1.into(), "图片".into(),".*\\.(png|tif|gif|pcx|tga|exif|fpx|svg|psd|cdr|pcd|dxf|ufo|eps|ai|raw|WMF|webp|avif|apng)".into(),2.into(),2.into()])
.values_panic([1.into(), "音乐".into(),".*\\.(WAV|FLAC|APE|ALAC|WavPack|WV|MP3|AAC|Ogg|Vorbis|Opus)".into(),3.into(),3.into()])
.values_panic([1.into(), "文档".into(),".*\\.(pdf|doc|ppt|yml|xml|htm|json|csv|txt|ini|xsl|wps|rtf|hlp)".into(),3.into(),3.into()])
.values_panic([
(1).into(),
"视频".into(),
".*\\.(mpg|mpeg|avi|rm|rmvb|mov|wmv|asf|dat|asx|wvx|mpe|mpa)".into(),
(1).into(),
(1).into(),
])
.values_panic([
(1).into(),
"图片".into(),
".*\\.(png|tif|gif|pcx|tga|exif|fpx|svg|psd|cdr|pcd|dxf|ufo|eps|ai|raw|WMF|webp|avif|apng)".into(),
(2).into(),
(2).into(),
])
.values_panic([
(1).into(),
"音乐".into(),
".*\\.(WAV|FLAC|APE|ALAC|WavPack|WV|MP3|AAC|Ogg|Vorbis|Opus)".into(),
(3).into(),
(3).into(),
])
.values_panic([
(1).into(),
"文档".into(),
".*\\.(pdf|doc|ppt|yml|xml|htm|json|csv|txt|ini|xsl|wps|rtf|hlp)".into(),
(3).into(),
(3).into(),
])
.to_owned();
manager.exec_stmt(root_insert).await?;
@ -239,41 +308,23 @@ impl MigrationTrait for Migration {
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(UserInfo::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(UserInfo::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(TagInfo::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(TagInfo::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(Tag2Doc::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(Tag2Doc::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(Kb2Doc::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(Kb2Doc::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(Dialog2Kb::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(Dialog2Kb::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(Doc2Doc::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(Doc2Doc::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(KbInfo::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(KbInfo::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(DocInfo::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(DocInfo::Table).to_owned()).await?;
manager
.drop_table(Table::drop().table(DialogInfo::Table).to_owned())
.await?;
manager.drop_table(Table::drop().table(DialogInfo::Table).to_owned()).await?;
Ok(())
}

1
python/llm/__init__.py Normal file
View File

@ -0,0 +1 @@
from .embedding_model import HuEmbedding

View File

@ -0,0 +1,31 @@
from abc import ABC
from FlagEmbedding import FlagModel
import torch
class Base(ABC):
def encode(self, texts: list, batch_size=32):
raise NotImplementedError("Please implement encode method!")
class HuEmbedding(Base):
def __init__(self):
"""
If you have trouble downloading HuggingFace models, -_^ this might help!!
For Linux:
export HF_ENDPOINT=https://hf-mirror.com
For Windows:
Good luck
^_-
"""
self.model = FlagModel("BAAI/bge-large-zh-v1.5",
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
use_fp16=torch.cuda.is_available())
def encode(self, texts: list, batch_size=32):
res = []
for i in range(0, len(texts), batch_size):
res.extend(self.encode(texts[i:i+batch_size]))
return res

View File

@ -209,10 +209,8 @@ def rm_doc_from_kb(df):
def main(comm, mod):
global model
from FlagEmbedding import FlagModel
model = FlagModel('/opt/home/kevinhu/data/bge-large-zh-v1.5/',
query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
use_fp16=torch.cuda.is_available())
from llm import HuEmbedding
model = HuEmbedding()
tm_fnm = f"res/{comm}-{mod}.tm"
tm = findMaxDt(tm_fnm)
rows = collect(comm, mod, tm)

View File

@ -16,7 +16,9 @@ class Config:
def get(self, key, default=None):
global CF
return CF[self.env].get(key, default)
return os.environ.get(key.upper(), \
CF[self.env].get(key, default)
)
def init(env):
return Config(env)

View File

@ -49,11 +49,7 @@ class Postgres(object):
cur = self.conn.cursor()
cur.execute(sql)
updated_rows = cur.rowcount
<<<<<<< HEAD
self.conn.commit()
=======
conn.commit()
>>>>>>> upstream/main
cur.close()
return updated_rows
except Exception as e:

View File

@ -1,11 +1,10 @@
use std::collections::HashMap;
use actix_web::{HttpResponse, post, web};
use actix_web::{ HttpResponse, post, web };
use serde::Deserialize;
use serde_json::Value;
use serde_json::json;
use crate::api::JsonResponse;
use crate::AppState;
use crate::entity::dialog_info;
use crate::errors::AppError;
use crate::service::dialog_info::Query;
use crate::service::dialog_info::Mutation;
@ -13,17 +12,23 @@ use crate::service::dialog_info::Mutation;
#[derive(Debug, Deserialize)]
pub struct ListParams {
pub uid: i64,
pub dialog_id: Option<i64>
pub dialog_id: Option<i64>,
}
#[post("/v1.0/dialogs")]
async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn list(
params: web::Json<ListParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let mut result = HashMap::new();
if let Some(dia_id) = params.dialog_id{
if let Some(dia_id) = params.dialog_id {
let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?.unwrap();
let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
let kb = crate::service::kb_info::Query
::find_kb_info_by_id(&data.conn, dia.kb_id).await?
.unwrap();
print!("{:?}", dia.history);
let hist:Value = serde_json::from_str(&dia.history)?;
let detail = json!({
let hist: Value = serde_json::from_str(&dia.history)?;
let detail =
json!({
"dialog_id": dia_id,
"dialog_name": dia.dialog_name.to_owned(),
"created_at": dia.created_at.to_string().to_owned(),
@ -33,20 +38,23 @@ async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Resul
});
result.insert("dialogs", vec![detail]);
}
else{
} else {
let mut dias = Vec::<Value>::new();
for dia in Query::find_dialog_infos_by_uid(&data.conn, params.uid).await?{
let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
let hist:Value = serde_json::from_str(&dia.history)?;
dias.push(json!({
for dia in Query::find_dialog_infos_by_uid(&data.conn, params.uid).await? {
let kb = crate::service::kb_info::Query
::find_kb_info_by_id(&data.conn, dia.kb_id).await?
.unwrap();
let hist: Value = serde_json::from_str(&dia.history)?;
dias.push(
json!({
"dialog_id": dia.dialog_id,
"dialog_name": dia.dialog_name.to_owned(),
"created_at": dia.created_at.to_string().to_owned(),
"updated_at": dia.updated_at.to_string().to_owned(),
"history": hist,
"kb_info": kb
}));
})
);
}
result.insert("dialogs", dias);
}
@ -56,18 +64,23 @@ async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Resul
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Debug, Deserialize)]
pub struct RmParams {
pub uid: i64,
pub dialog_id: i64
pub dialog_id: i64,
}
#[post("/v1.0/delete_dialog")]
async fn delete(params: web::Json<RmParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn delete(
params: web::Json<RmParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::delete_dialog_info(&data.conn, params.dialog_id).await?;
let json_response = JsonResponse {
@ -76,9 +89,11 @@ async fn delete(params: web::Json<RmParams>, data: web::Data<AppState>) -> Resul
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Debug, Deserialize)]
@ -86,18 +101,30 @@ pub struct CreateParams {
pub uid: i64,
pub dialog_id: Option<i64>,
pub kb_id: i64,
pub name: String
pub name: String,
}
#[post("/v1.0/create_dialog")]
async fn create(param: web::Json<CreateParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn create(
param: web::Json<CreateParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let mut result = HashMap::new();
if let Some(dia_id) = param.dialog_id {
result.insert("dialog_id", dia_id);
let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?;
let _ = Mutation::update_dialog_info_by_id(&data.conn, dia_id, &param.name, &dia.unwrap().history).await?;
}
else{
let dia = Mutation::create_dialog_info(&data.conn, param.uid, param.kb_id, &param.name).await?;
let _ = Mutation::update_dialog_info_by_id(
&data.conn,
dia_id,
&param.name,
&dia.unwrap().history
).await?;
} else {
let dia = Mutation::create_dialog_info(
&data.conn,
param.uid,
param.kb_id,
&param.name
).await?;
result.insert("dialog_id", dia.dialog_id.unwrap());
}
@ -107,37 +134,45 @@ async fn create(param: web::Json<CreateParams>, data: web::Data<AppState>) -> Re
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Debug, Deserialize)]
pub struct UpdateHistoryParams {
pub uid: i64,
pub dialog_id: i64,
pub history: Value
pub history: Value,
}
#[post("/v1.0/update_history")]
async fn update_history(param: web::Json<UpdateHistoryParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn update_history(
param: web::Json<UpdateHistoryParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let mut json_response = JsonResponse {
code: 200,
err: "".to_owned(),
data: (),
};
if let Some(dia) = Query::find_dialog_info_by_id(&data.conn, param.dialog_id).await?{
let _ = Mutation::update_dialog_info_by_id(&data.conn, param.dialog_id, &dia.dialog_name,
&param.history.to_string()).await?;
}
else{
if let Some(dia) = Query::find_dialog_info_by_id(&data.conn, param.dialog_id).await? {
let _ = Mutation::update_dialog_info_by_id(
&data.conn,
param.dialog_id,
&dia.dialog_name,
&param.history.to_string()
).await?;
} else {
json_response.code = 500;
json_response.err = "Can't find dialog data!".to_owned();
}
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}

View File

@ -1,18 +1,18 @@
use std::collections::HashMap;
use std::io::Write;
use actix_multipart_extract::{File, Multipart, MultipartForm};
use actix_web::{get, HttpResponse, post, web};
use chrono::{Utc, FixedOffset};
use actix_multipart_extract::{ File, Multipart, MultipartForm };
use actix_web::{ HttpResponse, post, web };
use chrono::{ Utc, FixedOffset };
use sea_orm::DbConn;
use crate::api::JsonResponse;
use crate::AppState;
use crate::entity::doc_info::Model;
use crate::errors::AppError;
use crate::service::doc_info::{Mutation, Query};
use crate::service::doc_info::{ Mutation, Query };
use serde::Deserialize;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
#[derive(Debug, Deserialize)]
@ -33,9 +33,11 @@ pub struct FilterParams {
}
#[post("/v1.0/docs")]
async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
.await?;
async fn list(
params: web::Json<ListParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner()).await?;
let mut result = HashMap::new();
result.insert("docs", docs);
@ -46,9 +48,11 @@ async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Resul
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Deserialize, MultipartForm, Debug)]
@ -56,35 +60,58 @@ pub struct UploadForm {
#[multipart(max_size = 512MB)]
file_field: File,
uid: i64,
did: i64
did: i64,
}
#[post("/v1.0/upload")]
async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn upload(
payload: Multipart<UploadForm>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let uid = payload.uid;
async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64, parent_id:i64) -> String {
async fn add_number_to_filename(
file_name: String,
conn: &DbConn,
uid: i64,
parent_id: i64
) -> String {
let mut i = 0;
let mut new_file_name = file_name.to_string();
let arr: Vec<&str> = file_name.split(".").collect();
let suffix = String::from(arr[arr.len()-1]);
let preffix = arr[..arr.len()-1].join(".");
let mut docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
while docs.len()>0 {
let suffix = String::from(arr[arr.len() - 1]);
let preffix = arr[..arr.len() - 1].join(".");
let mut docs = Query::find_doc_infos_by_name(
conn,
uid,
&new_file_name,
Some(parent_id)
).await.unwrap();
while docs.len() > 0 {
i += 1;
new_file_name = format!("{}_{}.{}", preffix, i, suffix);
docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
docs = Query::find_doc_infos_by_name(
conn,
uid,
&new_file_name,
Some(parent_id)
).await.unwrap();
}
new_file_name
}
let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid, payload.did).await;
let fnm = add_number_to_filename(
payload.file_field.name.clone(),
&data.conn,
uid,
payload.did
).await;
std::fs::create_dir_all(format!("./upload/{}/", uid));
let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
let mut f =std::fs::File::create(&filepath)?;
let mut f = std::fs::File::create(&filepath)?;
f.write(&payload.file_field.bytes)?;
let doc = Mutation::create_doc_info(&data.conn, Model {
did:Default::default(),
did: Default::default(),
uid: uid,
doc_name: fnm,
size: payload.file_field.bytes.len() as i64,
@ -92,7 +119,7 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
r#type: "doc".to_string(),
created_at: now(),
updated_at: now(),
is_deleted:Default::default(),
is_deleted: Default::default(),
}).await?;
let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
@ -103,10 +130,13 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
#[derive(Deserialize, Debug)]
pub struct RmDocsParam {
uid: i64,
dids: Vec<i64>
dids: Vec<i64>,
}
#[post("/v1.0/delete_docs")]
async fn delete(params: web::Json<RmDocsParam>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn delete(
params: web::Json<RmDocsParam>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::delete_doc_info(&data.conn, &params.dids).await?;
let json_response = JsonResponse {
@ -115,20 +145,25 @@ async fn delete(params: web::Json<RmDocsParam>, data: web::Data<AppState>) -> Re
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Debug, Deserialize)]
pub struct MvParams {
pub uid:i64,
pub uid: i64,
pub dids: Vec<i64>,
pub dest_did: i64,
}
#[post("/v1.0/mv_docs")]
async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn mv(
params: web::Json<MvParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
Mutation::mv_doc_info(&data.conn, params.dest_did, &params.dids).await?;
let json_response = JsonResponse {
@ -137,30 +172,35 @@ async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<Ht
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Debug, Deserialize)]
pub struct NewFoldParams {
pub uid: i64,
pub parent_id: i64,
pub name: String
pub name: String,
}
#[post("/v1.0/new_folder")]
async fn new_folder(params: web::Json<NewFoldParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn new_folder(
params: web::Json<NewFoldParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let doc = Mutation::create_doc_info(&data.conn, Model {
did:Default::default(),
did: Default::default(),
uid: params.uid,
doc_name: params.name.to_string(),
size:0,
size: 0,
r#type: "folder".to_string(),
location: "".to_owned(),
created_at: now(),
updated_at: now(),
is_deleted:Default::default(),
is_deleted: Default::default(),
}).await?;
let _ = Mutation::place_doc(&data.conn, params.parent_id, doc.did.unwrap()).await?;
@ -171,21 +211,26 @@ async fn new_folder(params: web::Json<NewFoldParams>, data: web::Data<AppState>)
pub struct RenameParams {
pub uid: i64,
pub did: i64,
pub name: String
pub name: String,
}
#[post("/v1.0/rename")]
async fn rename(params: web::Json<RenameParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn rename(
params: web::Json<RenameParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let docs = Query::find_doc_infos_by_name(&data.conn, params.uid, &params.name, None).await?;
if docs.len()>0{
if docs.len() > 0 {
let json_response = JsonResponse {
code: 500,
err: "Name duplicated!".to_owned(),
data: (),
};
return Ok(HttpResponse::Ok()
return Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?));
.body(serde_json::to_string(&json_response)?)
);
}
let doc = Mutation::rename(&data.conn, params.did, &params.name).await?;
@ -195,7 +240,9 @@ async fn rename(params: web::Json<RenameParams>, data: web::Data<AppState>) -> R
data: doc,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}

View File

@ -1,5 +1,5 @@
use std::collections::HashMap;
use actix_web::{get, HttpResponse, post, web};
use actix_web::{ get, HttpResponse, post, web };
use serde::Serialize;
use crate::api::JsonResponse;
use crate::AppState;
@ -16,18 +16,26 @@ pub struct AddDocs2KbParams {
pub kb_id: i64,
}
#[post("/v1.0/create_kb")]
async fn create(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
let mut docs = Query::find_kb_infos_by_name(&data.conn, model.kb_name.to_owned()).await.unwrap();
if docs.len() >0 {
async fn create(
model: web::Json<kb_info::Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let mut docs = Query::find_kb_infos_by_name(
&data.conn,
model.kb_name.to_owned()
).await.unwrap();
if docs.len() > 0 {
let json_response = JsonResponse {
code: 201,
err: "Duplicated name.".to_owned(),
data: ()
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
}else{
.body(serde_json::to_string(&json_response)?)
)
} else {
let model = Mutation::create_kb_info(&data.conn, model.into_inner()).await?;
let mut result = HashMap::new();
@ -39,14 +47,19 @@ async fn create(model: web::Json<kb_info::Model>, data: web::Data<AppState>) ->
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
}
#[post("/v1.0/add_docs_to_kb")]
async fn add_docs_to_kb(param: web::Json<AddDocs2KbParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn add_docs_to_kb(
param: web::Json<AddDocs2KbParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::add_docs(&data.conn, param.kb_id, param.dids.to_owned()).await?;
let json_response = JsonResponse {
@ -55,13 +68,18 @@ async fn add_docs_to_kb(param: web::Json<AddDocs2KbParams>, data: web::Data<AppS
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[post("/v1.0/anti_kb_docs")]
async fn anti_kb_docs(param: web::Json<AddDocs2KbParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn anti_kb_docs(
param: web::Json<AddDocs2KbParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::remove_docs(&data.conn, param.dids.to_owned(), Some(param.kb_id)).await?;
let json_response = JsonResponse {
@ -70,12 +88,17 @@ async fn anti_kb_docs(param: web::Json<AddDocs2KbParams>, data: web::Data<AppSta
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[get("/v1.0/kbs")]
async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn list(
model: web::Json<kb_info::Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
let mut result = HashMap::new();
@ -87,13 +110,18 @@ async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Re
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[post("/v1.0/delete_kb")]
async fn delete(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn delete(
model: web::Json<kb_info::Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::delete_kb_info(&data.conn, model.kb_id).await?;
let json_response = JsonResponse {
@ -102,19 +130,24 @@ async fn delete(model: web::Json<kb_info::Model>, data: web::Data<AppState>) ->
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct DocIdsParams {
pub uid: i64,
pub dids: Vec<i64>
pub dids: Vec<i64>,
}
#[post("/v1.0/all_relevents")]
async fn all_relevents(params: web::Json<DocIdsParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn all_relevents(
params: web::Json<DocIdsParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let dids = crate::service::doc_info::Query::all_descendent_ids(&data.conn, &params.dids).await?;
let mut result = HashMap::new();
let kbs = Query::find_kb_by_docs(&data.conn, dids).await?;
@ -125,8 +158,9 @@ async fn all_relevents(params: web::Json<DocIdsParams>, data: web::Data<AppState
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}

View File

@ -1,4 +1,4 @@
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
pub(crate) mod tag_info;
pub(crate) mod kb_info;

View File

@ -1,21 +1,22 @@
use std::collections::HashMap;
use actix_web::{get, HttpResponse, post, web};
use actix_web_httpauth::middleware::HttpAuthentication;
use actix_web::{ get, HttpResponse, post, web };
use serde::Deserialize;
use crate::validator;
use crate::api::JsonResponse;
use crate::AppState;
use crate::entity::tag_info;
use crate::errors::AppError;
use crate::service::tag_info::{Mutation, Query};
use crate::service::tag_info::{ Mutation, Query };
#[derive(Debug, Deserialize)]
pub struct TagListParams {
pub uid: i64
pub uid: i64,
}
#[post("/v1.0/create_tag")]
async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn create(
model: web::Json<tag_info::Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
let mut result = HashMap::new();
@ -27,13 +28,18 @@ async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) ->
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[post("/v1.0/delete_tag")]
async fn delete(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn delete(
model: web::Json<tag_info::Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::delete_tag(&data.conn, model.tid).await?;
let json_response = JsonResponse {
@ -42,16 +48,20 @@ async fn delete(model: web::Json<tag_info::Model>, data: web::Data<AppState>) ->
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
//#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
#[post("/v1.0/tags")]
async fn list(param: web::Json<TagListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn list(
param: web::Json<TagListParams>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let tags = Query::find_tags_by_uid(param.uid, &data.conn).await?;
let mut result = HashMap::new();
@ -63,7 +73,9 @@ async fn list(param: web::Json<TagListParams>, data: web::Data<AppState>) -> Res
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}

View File

@ -2,27 +2,24 @@ use std::collections::HashMap;
use std::io::SeekFrom;
use std::ptr::null;
use actix_identity::Identity;
use actix_web::{HttpResponse, post, web};
use chrono::{FixedOffset, Utc};
use actix_web::{ HttpResponse, post, web };
use chrono::{ FixedOffset, Utc };
use sea_orm::ActiveValue::NotSet;
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
use crate::api::JsonResponse;
use crate::AppState;
use crate::entity::{doc_info, tag_info};
use crate::entity::{ doc_info, tag_info };
use crate::entity::user_info::Model;
use crate::errors::{AppError, UserError};
use crate::errors::{ AppError, UserError };
use crate::service::user_info::Mutation;
use crate::service::user_info::Query;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
pub(crate) fn create_auth_token(user: &Model) -> u64 {
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use std::{ collections::hash_map::DefaultHasher, hash::{ Hash, Hasher } };
let mut hasher = DefaultHasher::new();
user.hash(&mut hasher);
@ -43,7 +40,7 @@ async fn login(
) -> Result<HttpResponse, AppError> {
match Query::login(&data.conn, &input.email, &input.password).await? {
Some(user) => {
let _ = Mutation::update_login_status(user.uid,&data.conn).await?;
let _ = Mutation::update_login_status(user.uid, &data.conn).await?;
let token = create_auth_token(&user).to_string();
identity.remember(token.clone());
@ -54,17 +51,22 @@ async fn login(
data: token.clone(),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.append_header(("X-Auth-Token", token))
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
None => Err(UserError::LoginFailed.into())
None => Err(UserError::LoginFailed.into()),
}
}
#[post("/v1.0/register")]
async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn register(
model: web::Json<Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let mut result = HashMap::new();
let u = Query::find_user_infos(&data.conn, &model.email).await?;
if let Some(_) = u {
@ -73,15 +75,17 @@ async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<
err: "Email registered!".to_owned(),
data: (),
};
return Ok(HttpResponse::Ok()
return Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?));
.body(serde_json::to_string(&json_response)?)
);
}
let usr = Mutation::create_user(&data.conn, &model).await?;
result.insert("uid", usr.uid.clone().unwrap());
crate::service::doc_info::Mutation::create_doc_info(&data.conn, doc_info::Model{
did:Default::default(),
crate::service::doc_info::Mutation::create_doc_info(&data.conn, doc_info::Model {
did: Default::default(),
uid: usr.uid.clone().unwrap(),
doc_name: "/".into(),
size: 0,
@ -89,9 +93,9 @@ async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<
r#type: "folder".to_string(),
created_at: now(),
updated_at: now(),
is_deleted:Default::default(),
is_deleted: Default::default(),
}).await?;
let tnm = vec!["视频","图片","音乐","文档"];
let tnm = vec!["视频", "图片", "音乐", "文档"];
let tregx = vec![
".*\\.(mpg|mpeg|avi|rm|rmvb|mov|wmv|asf|dat|asx|wvx|mpe|mpa)",
".*\\.(png|tif|gif|pcx|tga|exif|fpx|svg|psd|cdr|pcd|dxf|ufo|eps|ai|raw|WMF|webp|avif|apng)",
@ -99,13 +103,13 @@ async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<
".*\\.(pdf|doc|ppt|yml|xml|htm|json|csv|txt|ini|xsl|wps|rtf|hlp)"
];
for i in 0..4 {
crate::service::tag_info::Mutation::create_tag(&data.conn, tag_info::Model{
crate::service::tag_info::Mutation::create_tag(&data.conn, tag_info::Model {
tid: Default::default(),
uid: usr.uid.clone().unwrap(),
tag_name: tnm[i].to_owned(),
regx: tregx[i].to_owned(),
color: (i+1).to_owned() as i16,
icon: (i+1).to_owned() as i16,
color: (i + 1).to_owned() as i16,
icon: (i + 1).to_owned() as i16,
folder_id: 0,
created_at: Default::default(),
updated_at: Default::default(),
@ -117,13 +121,18 @@ async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<
data: result,
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}
#[post("/v1.0/setting")]
async fn setting(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
async fn setting(
model: web::Json<Model>,
data: web::Data<AppState>
) -> Result<HttpResponse, AppError> {
let _ = Mutation::update_user_by_id(&data.conn, &model).await?;
let json_response = JsonResponse {
code: 200,
@ -131,7 +140,9 @@ async fn setting(model: web::Json<Model>, data: web::Data<AppState>) -> Result<H
data: (),
};
Ok(HttpResponse::Ok()
Ok(
HttpResponse::Ok()
.content_type("application/json")
.body(serde_json::to_string(&json_response)?))
.body(serde_json::to_string(&json_response)?)
)
}

View File

@ -1,5 +1,5 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "dialog2_kb")]
@ -21,11 +21,13 @@ pub enum Relation {
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
match self {
Self::DialogInfo => Entity::belongs_to(super::dialog_info::Entity)
Self::DialogInfo =>
Entity::belongs_to(super::dialog_info::Entity)
.from(Column::DialogId)
.to(super::dialog_info::Column::DialogId)
.into(),
Self::KbInfo => Entity::belongs_to(super::kb_info::Entity)
Self::KbInfo =>
Entity::belongs_to(super::kb_info::Entity)
.from(Column::KbId)
.to(super::kb_info::Column::KbId)
.into(),

View File

@ -1,6 +1,6 @@
use chrono::{DateTime, FixedOffset};
use chrono::{ DateTime, FixedOffset };
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "dialog_info")]
@ -19,7 +19,7 @@ pub struct Model {
#[serde(skip_deserializing)]
pub updated_at: DateTime<FixedOffset>,
#[serde(skip_deserializing)]
pub is_deleted: bool
pub is_deleted: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -1,5 +1,5 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "doc2_doc")]
@ -15,17 +15,19 @@ pub struct Model {
#[derive(Debug, Clone, Copy, EnumIter)]
pub enum Relation {
Parent,
Child
Child,
}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
match self {
Self::Parent => Entity::belongs_to(super::doc_info::Entity)
Self::Parent =>
Entity::belongs_to(super::doc_info::Entity)
.from(Column::ParentId)
.to(super::doc_info::Column::Did)
.into(),
Self::Child => Entity::belongs_to(super::doc_info::Entity)
Self::Child =>
Entity::belongs_to(super::doc_info::Entity)
.from(Column::Did)
.to(super::doc_info::Column::Did)
.into(),

View File

@ -1,7 +1,7 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
use crate::entity::kb_info;
use chrono::{DateTime, FixedOffset};
use chrono::{ DateTime, FixedOffset };
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "doc_info")]
@ -21,7 +21,7 @@ pub struct Model {
#[serde(skip_deserializing)]
pub updated_at: DateTime<FixedOffset>,
#[serde(skip_deserializing)]
pub is_deleted: bool
pub is_deleted: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@ -1,6 +1,6 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use chrono::{DateTime, FixedOffset};
use serde::{ Deserialize, Serialize };
use chrono::{ DateTime, FixedOffset };
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "kb2_doc")]
@ -30,11 +30,13 @@ pub enum Relation {
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
match self {
Self::DocInfo => Entity::belongs_to(super::doc_info::Entity)
Self::DocInfo =>
Entity::belongs_to(super::doc_info::Entity)
.from(Column::Did)
.to(super::doc_info::Column::Did)
.into(),
Self::KbInfo => Entity::belongs_to(super::kb_info::Entity)
Self::KbInfo =>
Entity::belongs_to(super::kb_info::Entity)
.from(Column::KbId)
.to(super::kb_info::Column::KbId)
.into(),

View File

@ -1,6 +1,6 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use chrono::{DateTime, FixedOffset};
use serde::{ Deserialize, Serialize };
use chrono::{ DateTime, FixedOffset };
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "kb_info")]

View File

@ -1,5 +1,5 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use serde::{ Deserialize, Serialize };
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "tag2_doc")]
@ -21,11 +21,13 @@ pub enum Relation {
impl RelationTrait for Relation {
fn def(&self) -> sea_orm::RelationDef {
match self {
Self::DocInfo => Entity::belongs_to(super::doc_info::Entity)
Self::DocInfo =>
Entity::belongs_to(super::doc_info::Entity)
.from(Column::Uid)
.to(super::doc_info::Column::Uid)
.into(),
Self::Tag => Entity::belongs_to(super::tag_info::Entity)
Self::Tag =>
Entity::belongs_to(super::tag_info::Entity)
.from(Column::TagId)
.to(super::tag_info::Column::Tid)
.into(),

View File

@ -1,6 +1,6 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use chrono::{DateTime, FixedOffset};
use serde::{ Deserialize, Serialize };
use chrono::{ DateTime, FixedOffset };
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "tag_info")]

View File

@ -1,6 +1,6 @@
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
use chrono::{DateTime, FixedOffset};
use serde::{ Deserialize, Serialize };
use chrono::{ DateTime, FixedOffset };
#[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "user_info")]
@ -27,5 +27,4 @@ pub struct Model {
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,22 +1,17 @@
use actix_web::{HttpResponse, ResponseError};
use actix_web::{ HttpResponse, ResponseError };
use thiserror::Error;
#[derive(Debug, Error)]
pub(crate) enum AppError {
#[error("`{0}`")]
User(#[from] UserError),
#[error("`{0}`")] User(#[from] UserError),
#[error("`{0}`")]
Json(#[from] serde_json::Error),
#[error("`{0}`")] Json(#[from] serde_json::Error),
#[error("`{0}`")]
Actix(#[from] actix_web::Error),
#[error("`{0}`")] Actix(#[from] actix_web::Error),
#[error("`{0}`")]
Db(#[from] sea_orm::DbErr),
#[error("`{0}`")] Db(#[from] sea_orm::DbErr),
#[error("`{0}`")]
Std(#[from] std::io::Error),
#[error("`{0}`")] Std(#[from] std::io::Error),
}
#[derive(Debug, Error)]
@ -33,8 +28,7 @@ pub(crate) enum UserError {
#[error("`password` field of `User` cannot contain whitespaces!")]
PasswordInvalidCharacter,
#[error("Could not find any `User` for id: `{0}`!")]
NotFound(i64),
#[error("Could not find any `User` for id: `{0}`!")] NotFound(i64),
#[error("Failed to login user!")]
LoginFailed,
@ -52,7 +46,8 @@ pub(crate) enum UserError {
impl ResponseError for AppError {
fn status_code(&self) -> actix_web::http::StatusCode {
match self {
AppError::User(user_error) => match user_error {
AppError::User(user_error) =>
match user_error {
UserError::EmptyUsername => actix_web::http::StatusCode::UNPROCESSABLE_ENTITY,
UserError::UsernameInvalidCharacter => {
actix_web::http::StatusCode::UNPROCESSABLE_ENTITY
@ -66,7 +61,7 @@ impl ResponseError for AppError {
UserError::Empty => actix_web::http::StatusCode::NOT_FOUND,
UserError::LoginFailed => actix_web::http::StatusCode::NOT_FOUND,
UserError::InvalidToken => actix_web::http::StatusCode::UNAUTHORIZED,
},
}
AppError::Json(_) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR,
AppError::Actix(fail) => fail.as_response_error().status_code(),
AppError::Db(_) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR,

View File

@ -5,16 +5,16 @@ mod errors;
use std::env;
use actix_files::Files;
use actix_identity::{CookieIdentityPolicy, IdentityService, RequestIdentity};
use actix_identity::{ CookieIdentityPolicy, IdentityService, RequestIdentity };
use actix_session::CookieSession;
use actix_web::{web, App, HttpServer, middleware, Error};
use actix_web::{ web, App, HttpServer, middleware, Error };
use actix_web::cookie::time::Duration;
use actix_web::dev::ServiceRequest;
use actix_web::error::ErrorUnauthorized;
use actix_web_httpauth::extractors::bearer::BearerAuth;
use listenfd::ListenFd;
use sea_orm::{Database, DatabaseConnection};
use migration::{Migrator, MigratorTrait};
use sea_orm::{ Database, DatabaseConnection };
use migration::{ Migrator, MigratorTrait };
use crate::errors::UserError;
#[derive(Debug, Clone)]
@ -24,10 +24,10 @@ struct AppState {
pub(crate) async fn validator(
req: ServiceRequest,
credentials: BearerAuth,
credentials: BearerAuth
) -> Result<ServiceRequest, Error> {
if let Some(token) = req.get_identity() {
println!("{}, {}",credentials.token(), token);
println!("{}, {}", credentials.token(), token);
(credentials.token() == token)
.then(|| req)
.ok_or(ErrorUnauthorized(UserError::InvalidToken))
@ -61,18 +61,20 @@ async fn main() -> std::io::Result<()> {
App::new()
.service(Files::new("/static", "./static"))
.app_data(web::Data::new(state.clone()))
.wrap(IdentityService::new(
.wrap(
IdentityService::new(
CookieIdentityPolicy::new(&[0; 32])
.name("auth-cookie")
.login_deadline(Duration::seconds(120))
.secure(false),
))
.secure(false)
)
)
.wrap(
CookieSession::signed(&[0; 32])
.name("session-cookie")
.secure(false)
// WARNING(alex): This uses the `time` crate, not `std::time`!
.expires_in_time(Duration::seconds(60)),
.expires_in_time(Duration::seconds(60))
)
.wrap(middleware::Logger::default())
.configure(init)

View File

@ -1,19 +1,31 @@
use chrono::{Local, FixedOffset, Utc};
use chrono::{ Local, FixedOffset, Utc };
use migration::Expr;
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, UpdateResult};
use sea_orm::{
ActiveModelTrait,
DbConn,
DbErr,
DeleteResult,
EntityTrait,
PaginatorTrait,
QueryOrder,
UpdateResult,
};
use sea_orm::ActiveValue::Set;
use sea_orm::QueryFilter;
use sea_orm::ColumnTrait;
use crate::entity::dialog_info;
use crate::entity::dialog_info::Entity;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
pub struct Query;
impl Query {
pub async fn find_dialog_info_by_id(db: &DbConn, id: i64) -> Result<Option<dialog_info::Model>, DbErr> {
pub async fn find_dialog_info_by_id(
db: &DbConn,
id: i64
) -> Result<Option<dialog_info::Model>, DbErr> {
Entity::find_by_id(id).one(db).await
}
@ -21,18 +33,20 @@ impl Query {
Entity::find().all(db).await
}
pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
pub async fn find_dialog_infos_by_uid(
db: &DbConn,
uid: i64
) -> Result<Vec<dialog_info::Model>, DbErr> {
Entity::find()
.filter(dialog_info::Column::Uid.eq(uid))
.filter(dialog_info::Column::IsDeleted.eq(false))
.all(db)
.await
.all(db).await
}
pub async fn find_dialog_infos_in_page(
db: &DbConn,
page: u64,
posts_per_page: u64,
posts_per_page: u64
) -> Result<(Vec<dialog_info::Model>, u64), DbErr> {
// Setup paginator
let paginator = Entity::find()
@ -54,7 +68,7 @@ impl Mutation {
kb_id: i64,
name: &String
) -> Result<dialog_info::ActiveModel, DbErr> {
dialog_info::ActiveModel {
(dialog_info::ActiveModel {
dialog_id: Default::default(),
uid: Set(uid),
kb_id: Set(kb_id),
@ -62,16 +76,14 @@ impl Mutation {
history: Set("".to_owned()),
created_at: Set(now()),
updated_at: Set(now()),
is_deleted: Default::default()
}
.save(db)
.await
is_deleted: Default::default(),
}).save(db).await
}
pub async fn update_dialog_info_by_id(
db: &DbConn,
dialog_id: i64,
dialog_name:&String,
dialog_name: &String,
history: &String
) -> Result<UpdateResult, DbErr> {
Entity::update_many()
@ -79,16 +91,14 @@ impl Mutation {
.col_expr(dialog_info::Column::History, Expr::value(history))
.col_expr(dialog_info::Column::UpdatedAt, Expr::value(now()))
.filter(dialog_info::Column::DialogId.eq(dialog_id))
.exec(db)
.await
.exec(db).await
}
pub async fn delete_dialog_info(db: &DbConn, dialog_id: i64) -> Result<UpdateResult, DbErr> {
Entity::update_many()
.col_expr(dialog_info::Column::IsDeleted, Expr::value(true))
.filter(dialog_info::Column::DialogId.eq(dialog_id))
.exec(db)
.await
.exec(db).await
}
pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {

View File

@ -1,20 +1,41 @@
use chrono::{Utc, FixedOffset};
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement, QuerySelect, JoinType, RelationTrait, DbBackend, Statement, UpdateResult};
use chrono::{ Utc, FixedOffset };
use sea_orm::{
ActiveModelTrait,
ColumnTrait,
DbConn,
DbErr,
DeleteResult,
EntityTrait,
PaginatorTrait,
QueryOrder,
Unset,
Unchanged,
ConditionalStatement,
QuerySelect,
JoinType,
RelationTrait,
DbBackend,
Statement,
UpdateResult,
};
use sea_orm::ActiveValue::Set;
use sea_orm::QueryFilter;
use crate::api::doc_info::ListParams;
use crate::entity::{doc2_doc, doc_info};
use crate::entity::{ doc2_doc, doc_info };
use crate::entity::doc_info::Entity;
use crate::service;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
pub struct Query;
impl Query {
pub async fn find_doc_info_by_id(db: &DbConn, id: i64) -> Result<Option<doc_info::Model>, DbErr> {
pub async fn find_doc_info_by_id(
db: &DbConn,
id: i64
) -> Result<Option<doc_info::Model>, DbErr> {
Entity::find_by_id(id).one(db).await
}
@ -22,34 +43,46 @@ impl Query {
Entity::find().all(db).await
}
pub async fn find_doc_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<doc_info::Model>, DbErr> {
Entity::find()
.filter(doc_info::Column::Uid.eq(uid))
.all(db)
.await
pub async fn find_doc_infos_by_uid(
db: &DbConn,
uid: i64
) -> Result<Vec<doc_info::Model>, DbErr> {
Entity::find().filter(doc_info::Column::Uid.eq(uid)).all(db).await
}
pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: &String, parent_id:Option<i64>) -> Result<Vec<doc_info::Model>, DbErr> {
pub async fn find_doc_infos_by_name(
db: &DbConn,
uid: i64,
name: &String,
parent_id: Option<i64>
) -> Result<Vec<doc_info::Model>, DbErr> {
let mut dids = Vec::<i64>::new();
if let Some(pid) = parent_id {
for d2d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(pid)).all(db).await?{
for d2d in doc2_doc::Entity
::find()
.filter(doc2_doc::Column::ParentId.eq(pid))
.all(db).await? {
dids.push(d2d.did);
}
}
else{
} else {
let doc = Entity::find()
.filter(doc_info::Column::DocName.eq(name.clone()))
.filter(doc_info::Column::Uid.eq(uid))
.all(db)
.await?;
if doc.len() == 0{
.all(db).await?;
if doc.len() == 0 {
return Ok(vec![]);
}
assert!(doc.len()>0);
let d2d = doc2_doc::Entity::find().filter(doc2_doc::Column::Did.eq(doc[0].did)).all(db).await?;
assert!(doc.len() > 0);
let d2d = doc2_doc::Entity
::find()
.filter(doc2_doc::Column::Did.eq(doc[0].did))
.all(db).await?;
assert!(d2d.len() <= 1, "Did: {}->{}", doc[0].did, d2d.len());
if d2d.len()>0{
for d2d_ in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(d2d[0].parent_id)).all(db).await?{
if d2d.len() > 0 {
for d2d_ in doc2_doc::Entity
::find()
.filter(doc2_doc::Column::ParentId.eq(d2d[0].parent_id))
.all(db).await? {
dids.push(d2d_.did);
}
}
@ -60,19 +93,21 @@ impl Query {
.filter(doc_info::Column::Uid.eq(uid))
.filter(doc_info::Column::Did.is_in(dids))
.filter(doc_info::Column::IsDeleted.eq(false))
.all(db)
.await
.all(db).await
}
pub async fn all_descendent_ids(db: &DbConn, doc_ids: &Vec<i64>) -> Result<Vec<i64>, DbErr> {
let mut dids = doc_ids.clone();
let mut i:usize = 0;
let mut i: usize = 0;
loop {
if dids.len() == i {
break;
}
for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
for d in doc2_doc::Entity
::find()
.filter(doc2_doc::Column::ParentId.eq(dids[i]))
.all(db).await? {
dids.push(d.did);
}
i += 1;
@ -80,9 +115,13 @@ impl Query {
Ok(dids)
}
pub async fn find_doc_infos_by_params(db: &DbConn, params: ListParams) -> Result<Vec<doc_info::Model>, DbErr> {
pub async fn find_doc_infos_by_params(
db: &DbConn,
params: ListParams
) -> Result<Vec<doc_info::Model>, DbErr> {
// Setup paginator
let mut sql:String = "
let mut sql: String =
"
select
a.did,
a.uid,
@ -97,21 +136,33 @@ impl Query {
doc_info as a
".to_owned();
let mut cond:String = format!(" a.uid={} and a.is_deleted=False ", params.uid);
let mut cond: String = format!(" a.uid={} and a.is_deleted=False ", params.uid);
if let Some(kb_id) = params.filter.kb_id {
sql.push_str(&format!(" inner join kb2_doc on kb2_doc.did = a.did and kb2_doc.kb_id={}", kb_id));
sql.push_str(
&format!(" inner join kb2_doc on kb2_doc.did = a.did and kb2_doc.kb_id={}", kb_id)
);
}
if let Some(folder_id) = params.filter.folder_id {
sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", folder_id));
sql.push_str(
&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", folder_id)
);
}
// Fetch paginated posts
if let Some(tag_id) = params.filter.tag_id {
let tag = service::tag_info::Query::find_tag_info_by_id(tag_id, &db).await.unwrap().unwrap();
if tag.folder_id > 0{
sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", tag.folder_id));
let tag = service::tag_info::Query
::find_tag_info_by_id(tag_id, &db).await
.unwrap()
.unwrap();
if tag.folder_id > 0 {
sql.push_str(
&format!(
" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}",
tag.folder_id
)
);
}
if tag.regx.len()>0{
if tag.regx.len() > 0 {
cond.push_str(&format!(" and doc_name ~ '{}'", tag.regx));
}
}
@ -119,7 +170,7 @@ impl Query {
if let Some(keywords) = params.filter.keywords {
cond.push_str(&format!(" and doc_name like '%{}%'", keywords));
}
if cond.len() > 0{
if cond.len() > 0 {
sql.push_str(&" where ");
sql.push_str(&cond);
}
@ -128,28 +179,26 @@ impl Query {
orderby = "updated_at desc".to_owned();
}
sql.push_str(&format!(" order by {}", orderby));
let mut page_size:u32 = 30;
let mut page_size: u32 = 30;
if let Some(pg_sz) = params.per_page {
page_size = pg_sz;
}
let mut page:u32 = 0;
let mut page: u32 = 0;
if let Some(pg) = params.page {
page = pg;
}
sql.push_str(&format!(" limit {} offset {} ;", page_size, page*page_size));
sql.push_str(&format!(" limit {} offset {} ;", page_size, page * page_size));
print!("{}", sql);
Entity::find()
.from_raw_sql(
Statement::from_sql_and_values(DbBackend::Postgres,sql,vec![])
).all(db).await
.from_raw_sql(Statement::from_sql_and_values(DbBackend::Postgres, sql, vec![]))
.all(db).await
}
pub async fn find_doc_infos_in_page(
db: &DbConn,
page: u64,
posts_per_page: u64,
posts_per_page: u64
) -> Result<(Vec<doc_info::Model>, u64), DbErr> {
// Setup paginator
let paginator = Entity::find()
@ -165,22 +214,18 @@ impl Query {
pub struct Mutation;
impl Mutation {
pub async fn mv_doc_info(
db: &DbConn,
dest_did: i64,
dids: &[i64]
) -> Result<(), DbErr> {
pub async fn mv_doc_info(db: &DbConn, dest_did: i64, dids: &[i64]) -> Result<(), DbErr> {
for did in dids {
let d = doc2_doc::Entity::find().filter(doc2_doc::Column::Did.eq(did.to_owned())).all(db).await?;
let d = doc2_doc::Entity
::find()
.filter(doc2_doc::Column::Did.eq(did.to_owned()))
.all(db).await?;
let _ = doc2_doc::ActiveModel {
let _ = (doc2_doc::ActiveModel {
id: Set(d[0].id),
did: Set(did.to_owned()),
parent_id: Set(dest_did)
}
.update(db)
.await?;
parent_id: Set(dest_did),
}).update(db).await?;
}
Ok(())
@ -191,20 +236,18 @@ impl Mutation {
dest_did: i64,
did: i64
) -> Result<doc2_doc::ActiveModel, DbErr> {
doc2_doc::ActiveModel {
(doc2_doc::ActiveModel {
id: Default::default(),
parent_id: Set(dest_did),
did: Set(did),
}
.save(db)
.await
}).save(db).await
}
pub async fn create_doc_info(
db: &DbConn,
form_data: doc_info::Model,
form_data: doc_info::Model
) -> Result<doc_info::ActiveModel, DbErr> {
doc_info::ActiveModel {
(doc_info::ActiveModel {
did: Default::default(),
uid: Set(form_data.uid.to_owned()),
doc_name: Set(form_data.doc_name.to_owned()),
@ -213,24 +256,21 @@ impl Mutation {
location: Set(form_data.location.to_owned()),
created_at: Set(form_data.created_at.to_owned()),
updated_at: Set(form_data.updated_at.to_owned()),
is_deleted:Default::default()
}
.save(db)
.await
is_deleted: Default::default(),
}).save(db).await
}
pub async fn update_doc_info_by_id(
db: &DbConn,
id: i64,
form_data: doc_info::Model,
form_data: doc_info::Model
) -> Result<doc_info::Model, DbErr> {
let doc_info: doc_info::ActiveModel = Entity::find_by_id(id)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
.map(Into::into)?;
doc_info::ActiveModel {
(doc_info::ActiveModel {
did: doc_info.did,
uid: Set(form_data.uid.to_owned()),
doc_name: Set(form_data.doc_name.to_owned()),
@ -240,41 +280,46 @@ impl Mutation {
created_at: doc_info.created_at,
updated_at: Set(now()),
is_deleted: Default::default(),
}
.update(db)
.await
}).update(db).await
}
pub async fn delete_doc_info(db: &DbConn, doc_ids: &Vec<i64>) -> Result<UpdateResult, DbErr> {
let mut dids = doc_ids.clone();
let mut i:usize = 0;
let mut i: usize = 0;
loop {
if dids.len() == i {
break;
}
let mut doc: doc_info::ActiveModel = Entity::find_by_id(dids[i])
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom(format!("Can't find doc:{}", dids[i])))
.map(Into::into)?;
doc.updated_at = Set(now());
doc.is_deleted = Set(true);
let _ = doc.update(db).await?;
for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
for d in doc2_doc::Entity
::find()
.filter(doc2_doc::Column::ParentId.eq(dids[i]))
.all(db).await? {
dids.push(d.did);
}
let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::ParentId.eq(dids[i])).exec(db).await?;
let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::Did.eq(dids[i])).exec(db).await?;
let _ = doc2_doc::Entity
::delete_many()
.filter(doc2_doc::Column::ParentId.eq(dids[i]))
.exec(db).await?;
let _ = doc2_doc::Entity
::delete_many()
.filter(doc2_doc::Column::Did.eq(dids[i]))
.exec(db).await?;
i += 1;
}
crate::service::kb_info::Mutation::remove_docs(&db, dids,None).await
crate::service::kb_info::Mutation::remove_docs(&db, dids, None).await
}
pub async fn rename(db: &DbConn, doc_id: i64, name: &String) -> Result<doc_info::Model, DbErr> {
let mut doc: doc_info::ActiveModel = Entity::find_by_id(doc_id)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom(format!("Can't find doc:{}", doc_id)))
.map(Into::into)?;
doc.updated_at = Set(now());

View File

@ -1,13 +1,24 @@
use chrono::{Local, FixedOffset, Utc};
use chrono::{ Local, FixedOffset, Utc };
use migration::Expr;
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
use sea_orm::{
ActiveModelTrait,
ColumnTrait,
DbConn,
DbErr,
DeleteResult,
EntityTrait,
PaginatorTrait,
QueryFilter,
QueryOrder,
UpdateResult,
};
use sea_orm::ActiveValue::Set;
use crate::entity::kb_info;
use crate::entity::kb2_doc;
use crate::entity::kb_info::Entity;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
pub struct Query;
@ -21,22 +32,25 @@ impl Query {
}
pub async fn find_kb_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<kb_info::Model>, DbErr> {
Entity::find()
.filter(kb_info::Column::Uid.eq(uid))
.all(db)
.await
Entity::find().filter(kb_info::Column::Uid.eq(uid)).all(db).await
}
pub async fn find_kb_infos_by_name(db: &DbConn, name: String) -> Result<Vec<kb_info::Model>, DbErr> {
Entity::find()
.filter(kb_info::Column::KbName.eq(name))
.all(db)
.await
pub async fn find_kb_infos_by_name(
db: &DbConn,
name: String
) -> Result<Vec<kb_info::Model>, DbErr> {
Entity::find().filter(kb_info::Column::KbName.eq(name)).all(db).await
}
pub async fn find_kb_by_docs(db: &DbConn, doc_ids: Vec<i64>) -> Result<Vec<kb_info::Model>, DbErr> {
pub async fn find_kb_by_docs(
db: &DbConn,
doc_ids: Vec<i64>
) -> Result<Vec<kb_info::Model>, DbErr> {
let mut kbids = Vec::<i64>::new();
for k in kb2_doc::Entity::find().filter(kb2_doc::Column::Did.is_in(doc_ids)).all(db).await?{
for k in kb2_doc::Entity
::find()
.filter(kb2_doc::Column::Did.is_in(doc_ids))
.all(db).await? {
kbids.push(k.kb_id);
}
Entity::find().filter(kb_info::Column::KbId.is_in(kbids)).all(db).await
@ -45,7 +59,7 @@ impl Query {
pub async fn find_kb_infos_in_page(
db: &DbConn,
page: u64,
posts_per_page: u64,
posts_per_page: u64
) -> Result<(Vec<kb_info::Model>, u64), DbErr> {
// Setup paginator
let paginator = Entity::find()
@ -63,44 +77,38 @@ pub struct Mutation;
impl Mutation {
pub async fn create_kb_info(
db: &DbConn,
form_data: kb_info::Model,
form_data: kb_info::Model
) -> Result<kb_info::ActiveModel, DbErr> {
kb_info::ActiveModel {
(kb_info::ActiveModel {
kb_id: Default::default(),
uid: Set(form_data.uid.to_owned()),
kb_name: Set(form_data.kb_name.to_owned()),
icon: Set(form_data.icon.to_owned()),
created_at: Set(now()),
updated_at: Set(now()),
is_deleted:Default::default()
}
.save(db)
.await
is_deleted: Default::default(),
}).save(db).await
}
pub async fn add_docs(
db: &DbConn,
kb_id: i64,
doc_ids: Vec<i64>
)-> Result<(), DbErr> {
for did in doc_ids{
let res = kb2_doc::Entity::find()
pub async fn add_docs(db: &DbConn, kb_id: i64, doc_ids: Vec<i64>) -> Result<(), DbErr> {
for did in doc_ids {
let res = kb2_doc::Entity
::find()
.filter(kb2_doc::Column::KbId.eq(kb_id))
.filter(kb2_doc::Column::Did.eq(did))
.all(db)
.await?;
if res.len()>0{continue;}
let _ = kb2_doc::ActiveModel {
.all(db).await?;
if res.len() > 0 {
continue;
}
let _ = (kb2_doc::ActiveModel {
id: Default::default(),
kb_id: Set(kb_id),
did: Set(did),
kb_progress: Set(0.0),
kb_progress_msg: Set("".to_owned()),
updated_at: Set(now()),
is_deleted:Default::default()
}
.save(db)
.await?;
is_deleted: Default::default(),
}).save(db).await?;
}
Ok(())
@ -110,17 +118,16 @@ impl Mutation {
db: &DbConn,
doc_ids: Vec<i64>,
kb_id: Option<i64>
)-> Result<UpdateResult, DbErr> {
let update = kb2_doc::Entity::update_many()
) -> Result<UpdateResult, DbErr> {
let update = kb2_doc::Entity
::update_many()
.col_expr(kb2_doc::Column::IsDeleted, Expr::value(true))
.col_expr(kb2_doc::Column::KbProgress, Expr::value(0))
.col_expr(kb2_doc::Column::KbProgressMsg, Expr::value(""))
.filter(kb2_doc::Column::Did.is_in(doc_ids));
if let Some(kbid) = kb_id{
update.filter(kb2_doc::Column::KbId.eq(kbid))
.exec(db)
.await
}else{
if let Some(kbid) = kb_id {
update.filter(kb2_doc::Column::KbId.eq(kbid)).exec(db).await
} else {
update.exec(db).await
}
}
@ -128,31 +135,27 @@ impl Mutation {
pub async fn update_kb_info_by_id(
db: &DbConn,
id: i64,
form_data: kb_info::Model,
form_data: kb_info::Model
) -> Result<kb_info::Model, DbErr> {
let kb_info: kb_info::ActiveModel = Entity::find_by_id(id)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
.map(Into::into)?;
kb_info::ActiveModel {
(kb_info::ActiveModel {
kb_id: kb_info.kb_id,
uid: kb_info.uid,
kb_name: Set(form_data.kb_name.to_owned()),
icon: Set(form_data.icon.to_owned()),
created_at: kb_info.created_at,
updated_at: Set(now()),
is_deleted: Default::default()
}
.update(db)
.await
is_deleted: Default::default(),
}).update(db).await
}
pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
let kb: kb_info::ActiveModel = Entity::find_by_id(kb_id)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find.".to_owned()))
.map(Into::into)?;

View File

@ -1,30 +1,40 @@
use chrono::{FixedOffset, Utc};
use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, ColumnTrait, QueryFilter};
use sea_orm::ActiveValue::{Set, NotSet};
use chrono::{ FixedOffset, Utc };
use sea_orm::{
ActiveModelTrait,
DbConn,
DbErr,
DeleteResult,
EntityTrait,
PaginatorTrait,
QueryOrder,
ColumnTrait,
QueryFilter,
};
use sea_orm::ActiveValue::{ Set, NotSet };
use crate::entity::tag_info;
use crate::entity::tag_info::Entity;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
pub struct Query;
impl Query {
pub async fn find_tag_info_by_id(id: i64, db: &DbConn) -> Result<Option<tag_info::Model>, DbErr> {
pub async fn find_tag_info_by_id(
id: i64,
db: &DbConn
) -> Result<Option<tag_info::Model>, DbErr> {
Entity::find_by_id(id).one(db).await
}
pub async fn find_tags_by_uid(uid:i64, db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
Entity::find()
.filter(tag_info::Column::Uid.eq(uid))
.all(db)
.await
pub async fn find_tags_by_uid(uid: i64, db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
Entity::find().filter(tag_info::Column::Uid.eq(uid)).all(db).await
}
pub async fn find_tag_infos_in_page(
db: &DbConn,
page: u64,
posts_per_page: u64,
posts_per_page: u64
) -> Result<(Vec<tag_info::Model>, u64), DbErr> {
// Setup paginator
let paginator = Entity::find()
@ -42,9 +52,9 @@ pub struct Mutation;
impl Mutation {
pub async fn create_tag(
db: &DbConn,
form_data: tag_info::Model,
form_data: tag_info::Model
) -> Result<tag_info::ActiveModel, DbErr> {
tag_info::ActiveModel {
(tag_info::ActiveModel {
tid: Default::default(),
uid: Set(form_data.uid.to_owned()),
tag_name: Set(form_data.tag_name.to_owned()),
@ -53,27 +63,24 @@ impl Mutation {
icon: Set(form_data.icon.to_owned()),
folder_id: match form_data.folder_id {
0 => NotSet,
_ => Set(form_data.folder_id.to_owned())
_ => Set(form_data.folder_id.to_owned()),
},
created_at: Set(now()),
updated_at: Set(now()),
}
.save(db)
.await
}).save(db).await
}
pub async fn update_tag_by_id(
db: &DbConn,
id: i64,
form_data: tag_info::Model,
form_data: tag_info::Model
) -> Result<tag_info::Model, DbErr> {
let tag: tag_info::ActiveModel = Entity::find_by_id(id)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
.map(Into::into)?;
tag_info::ActiveModel {
(tag_info::ActiveModel {
tid: tag.tid,
uid: tag.uid,
tag_name: Set(form_data.tag_name.to_owned()),
@ -83,15 +90,12 @@ impl Mutation {
folder_id: Set(form_data.folder_id.to_owned()),
created_at: Default::default(),
updated_at: Set(now()),
}
.update(db)
.await
}).update(db).await
}
pub async fn delete_tag(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
let tag: tag_info::ActiveModel = Entity::find_by_id(tid)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
.map(Into::into)?;

View File

@ -1,39 +1,56 @@
use chrono::{FixedOffset, Utc};
use chrono::{ FixedOffset, Utc };
use migration::Expr;
use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
use sea_orm::{
ActiveModelTrait,
ColumnTrait,
DbConn,
DbErr,
DeleteResult,
EntityTrait,
PaginatorTrait,
QueryFilter,
QueryOrder,
UpdateResult,
};
use sea_orm::ActiveValue::Set;
use crate::entity::user_info;
use crate::entity::user_info::Entity;
fn now()->chrono::DateTime<FixedOffset>{
Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
fn now() -> chrono::DateTime<FixedOffset> {
Utc::now().with_timezone(&FixedOffset::east_opt(3600 * 8).unwrap())
}
pub struct Query;
impl Query {
pub async fn find_user_info_by_id(db: &DbConn, id: i64) -> Result<Option<user_info::Model>, DbErr> {
pub async fn find_user_info_by_id(
db: &DbConn,
id: i64
) -> Result<Option<user_info::Model>, DbErr> {
Entity::find_by_id(id).one(db).await
}
pub async fn login(db: &DbConn, email: &str, password: &str) -> Result<Option<user_info::Model>, DbErr> {
pub async fn login(
db: &DbConn,
email: &str,
password: &str
) -> Result<Option<user_info::Model>, DbErr> {
Entity::find()
.filter(user_info::Column::Email.eq(email))
.filter(user_info::Column::Password.eq(password))
.one(db)
.await
.one(db).await
}
pub async fn find_user_infos(db: &DbConn, email:&String) -> Result<Option<user_info::Model>, DbErr> {
Entity::find()
.filter(user_info::Column::Email.eq(email))
.one(db)
.await
pub async fn find_user_infos(
db: &DbConn,
email: &String
) -> Result<Option<user_info::Model>, DbErr> {
Entity::find().filter(user_info::Column::Email.eq(email)).one(db).await
}
pub async fn find_user_infos_in_page(
db: &DbConn,
page: u64,
posts_per_page: u64,
posts_per_page: u64
) -> Result<(Vec<user_info::Model>, u64), DbErr> {
// Setup paginator
let paginator = Entity::find()
@ -51,9 +68,9 @@ pub struct Mutation;
impl Mutation {
pub async fn create_user(
db: &DbConn,
form_data: &user_info::Model,
form_data: &user_info::Model
) -> Result<user_info::ActiveModel, DbErr> {
user_info::ActiveModel {
(user_info::ActiveModel {
uid: Default::default(),
email: Set(form_data.email.to_owned()),
nickname: Set(form_data.nickname.to_owned()),
@ -65,22 +82,19 @@ impl Mutation {
last_login_at: Set(now()),
created_at: Set(now()),
updated_at: Set(now()),
}
.save(db)
.await
}).save(db).await
}
pub async fn update_user_by_id(
db: &DbConn,
form_data: &user_info::Model,
form_data: &user_info::Model
) -> Result<user_info::Model, DbErr> {
let usr: user_info::ActiveModel = Entity::find_by_id(form_data.uid)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find user.".to_owned()))
.map(Into::into)?;
user_info::ActiveModel {
(user_info::ActiveModel {
uid: Set(form_data.uid),
email: Set(form_data.email.to_owned()),
nickname: Set(form_data.nickname.to_owned()),
@ -91,27 +105,20 @@ impl Mutation {
password: Set(form_data.password.to_owned()),
updated_at: Set(now()),
last_login_at: usr.last_login_at,
created_at:usr.created_at,
}
.update(db)
.await
created_at: usr.created_at,
}).update(db).await
}
pub async fn update_login_status(
uid: i64,
db: &DbConn
) -> Result<UpdateResult, DbErr> {
pub async fn update_login_status(uid: i64, db: &DbConn) -> Result<UpdateResult, DbErr> {
Entity::update_many()
.col_expr(user_info::Column::LastLoginAt, Expr::value(now()))
.filter(user_info::Column::Uid.eq(uid))
.exec(db)
.await
.exec(db).await
}
pub async fn delete_user(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
let tag: user_info::ActiveModel = Entity::find_by_id(tid)
.one(db)
.await?
.one(db).await?
.ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
.map(Into::into)?;