mirror of
https://git.mirrors.martin98.com/https://github.com/open-webui/open-webui
synced 2025-08-18 03:35:54 +08:00
Merge branch 'open-webui:dev' into dev
This commit is contained in:
commit
e039b4ec54
@ -170,7 +170,7 @@ docker run --rm --volume /var/run/docker.sock:/var/run/docker.sock containrrr/wa
|
|||||||
|
|
||||||
In the last part of the command, replace `open-webui` with your container name if it is different.
|
In the last part of the command, replace `open-webui` with your container name if it is different.
|
||||||
|
|
||||||
Check our Migration Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/migration/).
|
Check our Migration Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/tutorials/migration/).
|
||||||
|
|
||||||
### Using the Dev Branch 🌙
|
### Using the Dev Branch 🌙
|
||||||
|
|
||||||
|
@ -27,7 +27,6 @@ from fastapi.responses import FileResponse, StreamingResponse
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from starlette.background import BackgroundTask
|
from starlette.background import BackgroundTask
|
||||||
|
|
||||||
|
|
||||||
from open_webui.utils.payload import (
|
from open_webui.utils.payload import (
|
||||||
apply_model_params_to_body_openai,
|
apply_model_params_to_body_openai,
|
||||||
apply_model_system_prompt_to_body,
|
apply_model_system_prompt_to_body,
|
||||||
@ -47,7 +46,6 @@ app.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
app.state.config = AppConfig()
|
app.state.config = AppConfig()
|
||||||
|
|
||||||
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||||
@ -407,20 +405,25 @@ async def generate_chat_completion(
|
|||||||
|
|
||||||
url = app.state.config.OPENAI_API_BASE_URLS[idx]
|
url = app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||||
key = app.state.config.OPENAI_API_KEYS[idx]
|
key = app.state.config.OPENAI_API_KEYS[idx]
|
||||||
|
is_o1 = payload["model"].lower().startswith("o1-")
|
||||||
|
|
||||||
# Change max_completion_tokens to max_tokens (Backward compatible)
|
# Change max_completion_tokens to max_tokens (Backward compatible)
|
||||||
if "api.openai.com" not in url and not payload["model"].lower().startswith("o1-"):
|
if "api.openai.com" not in url and not is_o1:
|
||||||
if "max_completion_tokens" in payload:
|
if "max_completion_tokens" in payload:
|
||||||
# Remove "max_completion_tokens" from the payload
|
# Remove "max_completion_tokens" from the payload
|
||||||
payload["max_tokens"] = payload["max_completion_tokens"]
|
payload["max_tokens"] = payload["max_completion_tokens"]
|
||||||
del payload["max_completion_tokens"]
|
del payload["max_completion_tokens"]
|
||||||
else:
|
else:
|
||||||
if payload["model"].lower().startswith("o1-") and "max_tokens" in payload:
|
if is_o1 and "max_tokens" in payload:
|
||||||
payload["max_completion_tokens"] = payload["max_tokens"]
|
payload["max_completion_tokens"] = payload["max_tokens"]
|
||||||
del payload["max_tokens"]
|
del payload["max_tokens"]
|
||||||
if "max_tokens" in payload and "max_completion_tokens" in payload:
|
if "max_tokens" in payload and "max_completion_tokens" in payload:
|
||||||
del payload["max_tokens"]
|
del payload["max_tokens"]
|
||||||
|
|
||||||
|
# Fix: O1 does not support the "system" parameter, Modify "system" to "user"
|
||||||
|
if is_o1 and payload["messages"][0]["role"] == "system":
|
||||||
|
payload["messages"][0]["role"] = "user"
|
||||||
|
|
||||||
# Convert the modified body back to JSON
|
# Convert the modified body back to JSON
|
||||||
payload = json.dumps(payload)
|
payload = json.dumps(payload)
|
||||||
|
|
||||||
|
190
backend/open_webui/apps/retrieval/loader/main.py
Normal file
190
backend/open_webui/apps/retrieval/loader/main.py
Normal file
@ -0,0 +1,190 @@
|
|||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import ftfy
|
||||||
|
|
||||||
|
from langchain_community.document_loaders import (
|
||||||
|
BSHTMLLoader,
|
||||||
|
CSVLoader,
|
||||||
|
Docx2txtLoader,
|
||||||
|
OutlookMessageLoader,
|
||||||
|
PyPDFLoader,
|
||||||
|
TextLoader,
|
||||||
|
UnstructuredEPubLoader,
|
||||||
|
UnstructuredExcelLoader,
|
||||||
|
UnstructuredMarkdownLoader,
|
||||||
|
UnstructuredPowerPointLoader,
|
||||||
|
UnstructuredRSTLoader,
|
||||||
|
UnstructuredXMLLoader,
|
||||||
|
YoutubeLoader,
|
||||||
|
)
|
||||||
|
from langchain_core.documents import Document
|
||||||
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||||
|
|
||||||
|
known_source_ext = [
|
||||||
|
"go",
|
||||||
|
"py",
|
||||||
|
"java",
|
||||||
|
"sh",
|
||||||
|
"bat",
|
||||||
|
"ps1",
|
||||||
|
"cmd",
|
||||||
|
"js",
|
||||||
|
"ts",
|
||||||
|
"css",
|
||||||
|
"cpp",
|
||||||
|
"hpp",
|
||||||
|
"h",
|
||||||
|
"c",
|
||||||
|
"cs",
|
||||||
|
"sql",
|
||||||
|
"log",
|
||||||
|
"ini",
|
||||||
|
"pl",
|
||||||
|
"pm",
|
||||||
|
"r",
|
||||||
|
"dart",
|
||||||
|
"dockerfile",
|
||||||
|
"env",
|
||||||
|
"php",
|
||||||
|
"hs",
|
||||||
|
"hsc",
|
||||||
|
"lua",
|
||||||
|
"nginxconf",
|
||||||
|
"conf",
|
||||||
|
"m",
|
||||||
|
"mm",
|
||||||
|
"plsql",
|
||||||
|
"perl",
|
||||||
|
"rb",
|
||||||
|
"rs",
|
||||||
|
"db2",
|
||||||
|
"scala",
|
||||||
|
"bash",
|
||||||
|
"swift",
|
||||||
|
"vue",
|
||||||
|
"svelte",
|
||||||
|
"msg",
|
||||||
|
"ex",
|
||||||
|
"exs",
|
||||||
|
"erl",
|
||||||
|
"tsx",
|
||||||
|
"jsx",
|
||||||
|
"hs",
|
||||||
|
"lhs",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TikaLoader:
|
||||||
|
def __init__(self, url, file_path, mime_type=None):
|
||||||
|
self.url = url
|
||||||
|
self.file_path = file_path
|
||||||
|
self.mime_type = mime_type
|
||||||
|
|
||||||
|
def load(self) -> list[Document]:
|
||||||
|
with open(self.file_path, "rb") as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
if self.mime_type is not None:
|
||||||
|
headers = {"Content-Type": self.mime_type}
|
||||||
|
else:
|
||||||
|
headers = {}
|
||||||
|
|
||||||
|
endpoint = self.url
|
||||||
|
if not endpoint.endswith("/"):
|
||||||
|
endpoint += "/"
|
||||||
|
endpoint += "tika/text"
|
||||||
|
|
||||||
|
r = requests.put(endpoint, data=data, headers=headers)
|
||||||
|
|
||||||
|
if r.ok:
|
||||||
|
raw_metadata = r.json()
|
||||||
|
text = raw_metadata.get("X-TIKA:content", "<No text content found>")
|
||||||
|
|
||||||
|
if "Content-Type" in raw_metadata:
|
||||||
|
headers["Content-Type"] = raw_metadata["Content-Type"]
|
||||||
|
|
||||||
|
log.info("Tika extracted text: %s", text)
|
||||||
|
|
||||||
|
return [Document(page_content=text, metadata=headers)]
|
||||||
|
else:
|
||||||
|
raise Exception(f"Error calling Tika: {r.reason}")
|
||||||
|
|
||||||
|
|
||||||
|
class Loader:
|
||||||
|
def __init__(self, engine: str = "", **kwargs):
|
||||||
|
self.engine = engine
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def load(
|
||||||
|
self, filename: str, file_content_type: str, file_path: str
|
||||||
|
) -> list[Document]:
|
||||||
|
loader = self._get_loader(filename, file_content_type, file_path)
|
||||||
|
docs = loader.load()
|
||||||
|
|
||||||
|
return [
|
||||||
|
Document(
|
||||||
|
page_content=ftfy.fix_text(doc.page_content), metadata=doc.metadata
|
||||||
|
)
|
||||||
|
for doc in docs
|
||||||
|
]
|
||||||
|
|
||||||
|
def _get_loader(self, filename: str, file_content_type: str, file_path: str):
|
||||||
|
file_ext = filename.split(".")[-1].lower()
|
||||||
|
|
||||||
|
if self.engine == "tika" and self.kwargs.get("TIKA_SERVER_URL"):
|
||||||
|
if file_ext in known_source_ext or (
|
||||||
|
file_content_type and file_content_type.find("text/") >= 0
|
||||||
|
):
|
||||||
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||||
|
else:
|
||||||
|
loader = TikaLoader(
|
||||||
|
url=self.kwargs.get("TIKA_SERVER_URL"),
|
||||||
|
file_path=file_path,
|
||||||
|
mime_type=file_content_type,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if file_ext == "pdf":
|
||||||
|
loader = PyPDFLoader(
|
||||||
|
file_path, extract_images=self.kwargs.get("PDF_EXTRACT_IMAGES")
|
||||||
|
)
|
||||||
|
elif file_ext == "csv":
|
||||||
|
loader = CSVLoader(file_path)
|
||||||
|
elif file_ext == "rst":
|
||||||
|
loader = UnstructuredRSTLoader(file_path, mode="elements")
|
||||||
|
elif file_ext == "xml":
|
||||||
|
loader = UnstructuredXMLLoader(file_path)
|
||||||
|
elif file_ext in ["htm", "html"]:
|
||||||
|
loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
|
||||||
|
elif file_ext == "md":
|
||||||
|
loader = UnstructuredMarkdownLoader(file_path)
|
||||||
|
elif file_content_type == "application/epub+zip":
|
||||||
|
loader = UnstructuredEPubLoader(file_path)
|
||||||
|
elif (
|
||||||
|
file_content_type
|
||||||
|
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||||
|
or file_ext == "docx"
|
||||||
|
):
|
||||||
|
loader = Docx2txtLoader(file_path)
|
||||||
|
elif file_content_type in [
|
||||||
|
"application/vnd.ms-excel",
|
||||||
|
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||||
|
] or file_ext in ["xls", "xlsx"]:
|
||||||
|
loader = UnstructuredExcelLoader(file_path)
|
||||||
|
elif file_content_type in [
|
||||||
|
"application/vnd.ms-powerpoint",
|
||||||
|
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||||
|
] or file_ext in ["ppt", "pptx"]:
|
||||||
|
loader = UnstructuredPowerPointLoader(file_path)
|
||||||
|
elif file_ext == "msg":
|
||||||
|
loader = OutlookMessageLoader(file_path)
|
||||||
|
elif file_ext in known_source_ext or (
|
||||||
|
file_content_type and file_content_type.find("text/") >= 0
|
||||||
|
):
|
||||||
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||||
|
else:
|
||||||
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||||
|
|
||||||
|
return loader
|
File diff suppressed because it is too large
Load Diff
81
backend/open_webui/apps/retrieval/model/colbert.py
Normal file
81
backend/open_webui/apps/retrieval/model/colbert.py
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import os
|
||||||
|
import torch
|
||||||
|
import numpy as np
|
||||||
|
from colbert.infra import ColBERTConfig
|
||||||
|
from colbert.modeling.checkpoint import Checkpoint
|
||||||
|
|
||||||
|
|
||||||
|
class ColBERT:
|
||||||
|
def __init__(self, name, **kwargs) -> None:
|
||||||
|
print("ColBERT: Loading model", name)
|
||||||
|
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||||
|
|
||||||
|
DOCKER = kwargs.get("env") == "docker"
|
||||||
|
if DOCKER:
|
||||||
|
# This is a workaround for the issue with the docker container
|
||||||
|
# where the torch extension is not loaded properly
|
||||||
|
# and the following error is thrown:
|
||||||
|
# /root/.cache/torch_extensions/py311_cpu/segmented_maxsim_cpp/segmented_maxsim_cpp.so: cannot open shared object file: No such file or directory
|
||||||
|
|
||||||
|
lock_file = (
|
||||||
|
"/root/.cache/torch_extensions/py311_cpu/segmented_maxsim_cpp/lock"
|
||||||
|
)
|
||||||
|
if os.path.exists(lock_file):
|
||||||
|
os.remove(lock_file)
|
||||||
|
|
||||||
|
self.ckpt = Checkpoint(
|
||||||
|
name,
|
||||||
|
colbert_config=ColBERTConfig(model_name=name),
|
||||||
|
).to(self.device)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def calculate_similarity_scores(self, query_embeddings, document_embeddings):
|
||||||
|
|
||||||
|
query_embeddings = query_embeddings.to(self.device)
|
||||||
|
document_embeddings = document_embeddings.to(self.device)
|
||||||
|
|
||||||
|
# Validate dimensions to ensure compatibility
|
||||||
|
if query_embeddings.dim() != 3:
|
||||||
|
raise ValueError(
|
||||||
|
f"Expected query embeddings to have 3 dimensions, but got {query_embeddings.dim()}."
|
||||||
|
)
|
||||||
|
if document_embeddings.dim() != 3:
|
||||||
|
raise ValueError(
|
||||||
|
f"Expected document embeddings to have 3 dimensions, but got {document_embeddings.dim()}."
|
||||||
|
)
|
||||||
|
if query_embeddings.size(0) not in [1, document_embeddings.size(0)]:
|
||||||
|
raise ValueError(
|
||||||
|
"There should be either one query or queries equal to the number of documents."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Transpose the query embeddings to align for matrix multiplication
|
||||||
|
transposed_query_embeddings = query_embeddings.permute(0, 2, 1)
|
||||||
|
# Compute similarity scores using batch matrix multiplication
|
||||||
|
computed_scores = torch.matmul(document_embeddings, transposed_query_embeddings)
|
||||||
|
# Apply max pooling to extract the highest semantic similarity across each document's sequence
|
||||||
|
maximum_scores = torch.max(computed_scores, dim=1).values
|
||||||
|
|
||||||
|
# Sum up the maximum scores across features to get the overall document relevance scores
|
||||||
|
final_scores = maximum_scores.sum(dim=1)
|
||||||
|
|
||||||
|
normalized_scores = torch.softmax(final_scores, dim=0)
|
||||||
|
|
||||||
|
return normalized_scores.detach().cpu().numpy().astype(np.float32)
|
||||||
|
|
||||||
|
def predict(self, sentences):
|
||||||
|
|
||||||
|
query = sentences[0][0]
|
||||||
|
docs = [i[1] for i in sentences]
|
||||||
|
|
||||||
|
# Embedding the documents
|
||||||
|
embedded_docs = self.ckpt.docFromText(docs, bsize=32)[0]
|
||||||
|
# Embedding the queries
|
||||||
|
embedded_queries = self.ckpt.queryFromText([query], bsize=32)
|
||||||
|
embedded_query = embedded_queries[0]
|
||||||
|
|
||||||
|
# Calculate retrieval scores for the query against all documents
|
||||||
|
scores = self.calculate_similarity_scores(
|
||||||
|
embedded_query.unsqueeze(0), embedded_docs
|
||||||
|
)
|
||||||
|
|
||||||
|
return scores
|
@ -15,7 +15,7 @@ from open_webui.apps.ollama.main import (
|
|||||||
GenerateEmbeddingsForm,
|
GenerateEmbeddingsForm,
|
||||||
generate_ollama_embeddings,
|
generate_ollama_embeddings,
|
||||||
)
|
)
|
||||||
from open_webui.apps.rag.vector.connector import VECTOR_DB_CLIENT
|
from open_webui.apps.retrieval.vector.connector import VECTOR_DB_CLIENT
|
||||||
from open_webui.utils.misc import get_last_user_message
|
from open_webui.utils.misc import get_last_user_message
|
||||||
|
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
@ -1,5 +1,5 @@
|
|||||||
from open_webui.apps.rag.vector.dbs.chroma import ChromaClient
|
from open_webui.apps.retrieval.vector.dbs.chroma import ChromaClient
|
||||||
from open_webui.apps.rag.vector.dbs.milvus import MilvusClient
|
from open_webui.apps.retrieval.vector.dbs.milvus import MilvusClient
|
||||||
|
|
||||||
|
|
||||||
from open_webui.config import VECTOR_DB
|
from open_webui.config import VECTOR_DB
|
@ -4,7 +4,7 @@ from chromadb.utils.batch_utils import create_batches
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from open_webui.apps.rag.vector.main import VectorItem, SearchResult, GetResult
|
from open_webui.apps.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||||
from open_webui.config import (
|
from open_webui.config import (
|
||||||
CHROMA_DATA_PATH,
|
CHROMA_DATA_PATH,
|
||||||
CHROMA_HTTP_HOST,
|
CHROMA_HTTP_HOST,
|
@ -4,7 +4,7 @@ import json
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from open_webui.apps.rag.vector.main import VectorItem, SearchResult, GetResult
|
from open_webui.apps.retrieval.vector.main import VectorItem, SearchResult, GetResult
|
||||||
from open_webui.config import (
|
from open_webui.config import (
|
||||||
MILVUS_URI,
|
MILVUS_URI,
|
||||||
)
|
)
|
||||||
@ -98,7 +98,10 @@ class MilvusClient:
|
|||||||
|
|
||||||
index_params = self.client.prepare_index_params()
|
index_params = self.client.prepare_index_params()
|
||||||
index_params.add_index(
|
index_params.add_index(
|
||||||
field_name="vector", index_type="HNSW", metric_type="COSINE", params={}
|
field_name="vector",
|
||||||
|
index_type="HNSW",
|
||||||
|
metric_type="COSINE",
|
||||||
|
params={"M": 16, "efConstruction": 100},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.client.create_collection(
|
self.client.create_collection(
|
@ -2,7 +2,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from duckduckgo_search import DDGS
|
from duckduckgo_search import DDGS
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
@ -2,7 +2,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult
|
from open_webui.apps.retrieval.web.main import SearchResult
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
from yarl import URL
|
from yarl import URL
|
||||||
|
|
@ -3,7 +3,7 @@ from typing import Optional
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -2,7 +2,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -3,7 +3,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -3,7 +3,7 @@ from typing import Optional
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -2,7 +2,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult, get_filtered_results
|
from open_webui.apps.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
@ -1,7 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from open_webui.apps.rag.search.main import SearchResult
|
from open_webui.apps.retrieval.web.main import SearchResult
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
97
backend/open_webui/apps/retrieval/web/utils.py
Normal file
97
backend/open_webui/apps/retrieval/web/utils.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
import socket
|
||||||
|
import urllib.parse
|
||||||
|
import validators
|
||||||
|
from typing import Union, Sequence, Iterator
|
||||||
|
|
||||||
|
from langchain_community.document_loaders import (
|
||||||
|
WebBaseLoader,
|
||||||
|
)
|
||||||
|
from langchain_core.documents import Document
|
||||||
|
|
||||||
|
|
||||||
|
from open_webui.constants import ERROR_MESSAGES
|
||||||
|
from open_webui.config import ENABLE_RAG_LOCAL_WEB_FETCH
|
||||||
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||||
|
|
||||||
|
|
||||||
|
def validate_url(url: Union[str, Sequence[str]]):
|
||||||
|
if isinstance(url, str):
|
||||||
|
if isinstance(validators.url(url), validators.ValidationError):
|
||||||
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||||
|
if not ENABLE_RAG_LOCAL_WEB_FETCH:
|
||||||
|
# Local web fetch is disabled, filter out any URLs that resolve to private IP addresses
|
||||||
|
parsed_url = urllib.parse.urlparse(url)
|
||||||
|
# Get IPv4 and IPv6 addresses
|
||||||
|
ipv4_addresses, ipv6_addresses = resolve_hostname(parsed_url.hostname)
|
||||||
|
# Check if any of the resolved addresses are private
|
||||||
|
# This is technically still vulnerable to DNS rebinding attacks, as we don't control WebBaseLoader
|
||||||
|
for ip in ipv4_addresses:
|
||||||
|
if validators.ipv4(ip, private=True):
|
||||||
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||||
|
for ip in ipv6_addresses:
|
||||||
|
if validators.ipv6(ip, private=True):
|
||||||
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||||
|
return True
|
||||||
|
elif isinstance(url, Sequence):
|
||||||
|
return all(validate_url(u) for u in url)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_hostname(hostname):
|
||||||
|
# Get address information
|
||||||
|
addr_info = socket.getaddrinfo(hostname, None)
|
||||||
|
|
||||||
|
# Extract IP addresses from address information
|
||||||
|
ipv4_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET]
|
||||||
|
ipv6_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET6]
|
||||||
|
|
||||||
|
return ipv4_addresses, ipv6_addresses
|
||||||
|
|
||||||
|
|
||||||
|
class SafeWebBaseLoader(WebBaseLoader):
|
||||||
|
"""WebBaseLoader with enhanced error handling for URLs."""
|
||||||
|
|
||||||
|
def lazy_load(self) -> Iterator[Document]:
|
||||||
|
"""Lazy load text from the url(s) in web_path with error handling."""
|
||||||
|
for path in self.web_paths:
|
||||||
|
try:
|
||||||
|
soup = self._scrape(path, bs_kwargs=self.bs_kwargs)
|
||||||
|
text = soup.get_text(**self.bs_get_text_kwargs)
|
||||||
|
|
||||||
|
# Build metadata
|
||||||
|
metadata = {"source": path}
|
||||||
|
if title := soup.find("title"):
|
||||||
|
metadata["title"] = title.get_text()
|
||||||
|
if description := soup.find("meta", attrs={"name": "description"}):
|
||||||
|
metadata["description"] = description.get(
|
||||||
|
"content", "No description found."
|
||||||
|
)
|
||||||
|
if html := soup.find("html"):
|
||||||
|
metadata["language"] = html.get("lang", "No language found.")
|
||||||
|
|
||||||
|
yield Document(page_content=text, metadata=metadata)
|
||||||
|
except Exception as e:
|
||||||
|
# Log the error and continue with the next URL
|
||||||
|
log.error(f"Error loading {path}: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
def get_web_loader(
|
||||||
|
url: Union[str, Sequence[str]],
|
||||||
|
verify_ssl: bool = True,
|
||||||
|
requests_per_second: int = 2,
|
||||||
|
):
|
||||||
|
# Check if the URL is valid
|
||||||
|
if not validate_url(url):
|
||||||
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
||||||
|
return SafeWebBaseLoader(
|
||||||
|
url,
|
||||||
|
verify_ssl=verify_ssl,
|
||||||
|
requests_per_second=requests_per_second,
|
||||||
|
continue_on_failure=True,
|
||||||
|
)
|
@ -97,6 +97,17 @@ class FilesTable:
|
|||||||
for file in db.query(File).filter_by(user_id=user_id).all()
|
for file in db.query(File).filter_by(user_id=user_id).all()
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def update_files_metadata_by_id(self, id: str, meta: dict) -> Optional[FileModel]:
|
||||||
|
with get_db() as db:
|
||||||
|
try:
|
||||||
|
file = db.query(File).filter_by(id=id).first()
|
||||||
|
file.meta = {**file.meta, **meta}
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return FileModel.model_validate(file)
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
def delete_file_by_id(self, id: str) -> bool:
|
def delete_file_by_id(self, id: str) -> bool:
|
||||||
with get_db() as db:
|
with get_db() as db:
|
||||||
try:
|
try:
|
||||||
|
@ -171,6 +171,19 @@ async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{id}/content/text")
|
||||||
|
async def get_file_text_content_by_id(id: str, user=Depends(get_verified_user)):
|
||||||
|
file = Files.get_file_by_id(id)
|
||||||
|
|
||||||
|
if file and (file.user_id == user.id or user.role == "admin"):
|
||||||
|
return {"text": file.meta.get("content", {}).get("text", None)}
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
|
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{id}/content/{file_name}", response_model=Optional[FileModel])
|
@router.get("/{id}/content/{file_name}", response_model=Optional[FileModel])
|
||||||
async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
|
async def get_file_content_by_id(id: str, user=Depends(get_verified_user)):
|
||||||
file = Files.get_file_by_id(id)
|
file = Files.get_file_by_id(id)
|
||||||
|
@ -4,7 +4,7 @@ import logging
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from open_webui.apps.webui.models.memories import Memories, MemoryModel
|
from open_webui.apps.webui.models.memories import Memories, MemoryModel
|
||||||
from open_webui.apps.rag.vector.connector import VECTOR_DB_CLIENT
|
from open_webui.apps.retrieval.vector.connector import VECTOR_DB_CLIENT
|
||||||
from open_webui.utils.utils import get_verified_user
|
from open_webui.utils.utils import get_verified_user
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
|
@ -921,7 +921,7 @@ CHROMA_HTTP_SSL = os.environ.get("CHROMA_HTTP_SSL", "false").lower() == "true"
|
|||||||
MILVUS_URI = os.environ.get("MILVUS_URI", f"{DATA_DIR}/vector_db/milvus.db")
|
MILVUS_URI = os.environ.get("MILVUS_URI", f"{DATA_DIR}/vector_db/milvus.db")
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# RAG
|
# Information Retrieval (RAG)
|
||||||
####################################
|
####################################
|
||||||
|
|
||||||
# RAG Content Extraction
|
# RAG Content Extraction
|
||||||
|
@ -16,37 +16,45 @@ from typing import Optional
|
|||||||
import aiohttp
|
import aiohttp
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
|
||||||
from open_webui.apps.audio.main import app as audio_app
|
|
||||||
from open_webui.apps.images.main import app as images_app
|
|
||||||
from open_webui.apps.ollama.main import app as ollama_app
|
|
||||||
from open_webui.apps.ollama.main import (
|
from open_webui.apps.ollama.main import (
|
||||||
GenerateChatCompletionForm,
|
app as ollama_app,
|
||||||
|
get_all_models as get_ollama_models,
|
||||||
generate_chat_completion as generate_ollama_chat_completion,
|
generate_chat_completion as generate_ollama_chat_completion,
|
||||||
generate_openai_chat_completion as generate_ollama_openai_chat_completion,
|
generate_openai_chat_completion as generate_ollama_openai_chat_completion,
|
||||||
|
GenerateChatCompletionForm,
|
||||||
)
|
)
|
||||||
from open_webui.apps.ollama.main import get_all_models as get_ollama_models
|
|
||||||
from open_webui.apps.openai.main import app as openai_app
|
|
||||||
from open_webui.apps.openai.main import (
|
from open_webui.apps.openai.main import (
|
||||||
|
app as openai_app,
|
||||||
generate_chat_completion as generate_openai_chat_completion,
|
generate_chat_completion as generate_openai_chat_completion,
|
||||||
|
get_all_models as get_openai_models,
|
||||||
)
|
)
|
||||||
from open_webui.apps.openai.main import get_all_models as get_openai_models
|
|
||||||
from open_webui.apps.rag.main import app as rag_app
|
from open_webui.apps.retrieval.main import app as retrieval_app
|
||||||
from open_webui.apps.rag.utils import get_rag_context, rag_template
|
from open_webui.apps.retrieval.utils import get_rag_context, rag_template
|
||||||
from open_webui.apps.socket.main import app as socket_app, periodic_usage_pool_cleanup
|
|
||||||
from open_webui.apps.socket.main import get_event_call, get_event_emitter
|
from open_webui.apps.socket.main import (
|
||||||
from open_webui.apps.webui.internal.db import Session
|
app as socket_app,
|
||||||
from open_webui.apps.webui.main import app as webui_app
|
periodic_usage_pool_cleanup,
|
||||||
|
get_event_call,
|
||||||
|
get_event_emitter,
|
||||||
|
)
|
||||||
|
|
||||||
from open_webui.apps.webui.main import (
|
from open_webui.apps.webui.main import (
|
||||||
|
app as webui_app,
|
||||||
generate_function_chat_completion,
|
generate_function_chat_completion,
|
||||||
get_pipe_models,
|
get_pipe_models,
|
||||||
)
|
)
|
||||||
|
from open_webui.apps.webui.internal.db import Session
|
||||||
|
|
||||||
from open_webui.apps.webui.models.auths import Auths
|
from open_webui.apps.webui.models.auths import Auths
|
||||||
from open_webui.apps.webui.models.functions import Functions
|
from open_webui.apps.webui.models.functions import Functions
|
||||||
from open_webui.apps.webui.models.models import Models
|
from open_webui.apps.webui.models.models import Models
|
||||||
from open_webui.apps.webui.models.users import UserModel, Users
|
from open_webui.apps.webui.models.users import UserModel, Users
|
||||||
|
|
||||||
from open_webui.apps.webui.utils import load_function_module_by_id
|
from open_webui.apps.webui.utils import load_function_module_by_id
|
||||||
|
|
||||||
|
from open_webui.apps.audio.main import app as audio_app
|
||||||
|
from open_webui.apps.images.main import app as images_app
|
||||||
|
|
||||||
from authlib.integrations.starlette_client import OAuth
|
from authlib.integrations.starlette_client import OAuth
|
||||||
from authlib.oidc.core import UserInfo
|
from authlib.oidc.core import UserInfo
|
||||||
@ -492,11 +500,11 @@ async def chat_completion_files_handler(body) -> tuple[dict, dict[str, list]]:
|
|||||||
contexts, citations = get_rag_context(
|
contexts, citations = get_rag_context(
|
||||||
files=files,
|
files=files,
|
||||||
messages=body["messages"],
|
messages=body["messages"],
|
||||||
embedding_function=rag_app.state.EMBEDDING_FUNCTION,
|
embedding_function=retrieval_app.state.EMBEDDING_FUNCTION,
|
||||||
k=rag_app.state.config.TOP_K,
|
k=retrieval_app.state.config.TOP_K,
|
||||||
reranking_function=rag_app.state.sentence_transformer_rf,
|
reranking_function=retrieval_app.state.sentence_transformer_rf,
|
||||||
r=rag_app.state.config.RELEVANCE_THRESHOLD,
|
r=retrieval_app.state.config.RELEVANCE_THRESHOLD,
|
||||||
hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
|
hybrid_search=retrieval_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
|
||||||
)
|
)
|
||||||
|
|
||||||
log.debug(f"rag_contexts: {contexts}, citations: {citations}")
|
log.debug(f"rag_contexts: {contexts}, citations: {citations}")
|
||||||
@ -609,7 +617,7 @@ class ChatCompletionMiddleware(BaseHTTPMiddleware):
|
|||||||
if prompt is None:
|
if prompt is None:
|
||||||
raise Exception("No user message found")
|
raise Exception("No user message found")
|
||||||
if (
|
if (
|
||||||
rag_app.state.config.RELEVANCE_THRESHOLD == 0
|
retrieval_app.state.config.RELEVANCE_THRESHOLD == 0
|
||||||
and context_string.strip() == ""
|
and context_string.strip() == ""
|
||||||
):
|
):
|
||||||
log.debug(
|
log.debug(
|
||||||
@ -621,14 +629,14 @@ class ChatCompletionMiddleware(BaseHTTPMiddleware):
|
|||||||
if model["owned_by"] == "ollama":
|
if model["owned_by"] == "ollama":
|
||||||
body["messages"] = prepend_to_first_user_message_content(
|
body["messages"] = prepend_to_first_user_message_content(
|
||||||
rag_template(
|
rag_template(
|
||||||
rag_app.state.config.RAG_TEMPLATE, context_string, prompt
|
retrieval_app.state.config.RAG_TEMPLATE, context_string, prompt
|
||||||
),
|
),
|
||||||
body["messages"],
|
body["messages"],
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
body["messages"] = add_or_update_system_message(
|
body["messages"] = add_or_update_system_message(
|
||||||
rag_template(
|
rag_template(
|
||||||
rag_app.state.config.RAG_TEMPLATE, context_string, prompt
|
retrieval_app.state.config.RAG_TEMPLATE, context_string, prompt
|
||||||
),
|
),
|
||||||
body["messages"],
|
body["messages"],
|
||||||
)
|
)
|
||||||
@ -762,10 +770,22 @@ class PipelineMiddleware(BaseHTTPMiddleware):
|
|||||||
# Parse string to JSON
|
# Parse string to JSON
|
||||||
data = json.loads(body_str) if body_str else {}
|
data = json.loads(body_str) if body_str else {}
|
||||||
|
|
||||||
|
try:
|
||||||
user = get_current_user(
|
user = get_current_user(
|
||||||
request,
|
request,
|
||||||
get_http_authorization_cred(request.headers["Authorization"]),
|
get_http_authorization_cred(request.headers["Authorization"]),
|
||||||
)
|
)
|
||||||
|
except KeyError as e:
|
||||||
|
if len(e.args) > 1:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=e.args[0],
|
||||||
|
content={"detail": e.args[1]},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
|
content={"detail": "Not authenticated"},
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = filter_pipeline(data, user)
|
data = filter_pipeline(data, user)
|
||||||
@ -838,7 +858,7 @@ async def check_url(request: Request, call_next):
|
|||||||
async def update_embedding_function(request: Request, call_next):
|
async def update_embedding_function(request: Request, call_next):
|
||||||
response = await call_next(request)
|
response = await call_next(request)
|
||||||
if "/embedding/update" in request.url.path:
|
if "/embedding/update" in request.url.path:
|
||||||
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
|
webui_app.state.EMBEDDING_FUNCTION = retrieval_app.state.EMBEDDING_FUNCTION
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@ -866,11 +886,12 @@ app.mount("/openai", openai_app)
|
|||||||
|
|
||||||
app.mount("/images/api/v1", images_app)
|
app.mount("/images/api/v1", images_app)
|
||||||
app.mount("/audio/api/v1", audio_app)
|
app.mount("/audio/api/v1", audio_app)
|
||||||
app.mount("/rag/api/v1", rag_app)
|
app.mount("/retrieval/api/v1", retrieval_app)
|
||||||
|
|
||||||
app.mount("/api/v1", webui_app)
|
app.mount("/api/v1", webui_app)
|
||||||
|
|
||||||
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
|
|
||||||
|
webui_app.state.EMBEDDING_FUNCTION = retrieval_app.state.EMBEDDING_FUNCTION
|
||||||
|
|
||||||
|
|
||||||
async def get_all_models():
|
async def get_all_models():
|
||||||
@ -2055,7 +2076,7 @@ async def get_app_config(request: Request):
|
|||||||
"enable_login_form": webui_app.state.config.ENABLE_LOGIN_FORM,
|
"enable_login_form": webui_app.state.config.ENABLE_LOGIN_FORM,
|
||||||
**(
|
**(
|
||||||
{
|
{
|
||||||
"enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
|
"enable_web_search": retrieval_app.state.config.ENABLE_RAG_WEB_SEARCH,
|
||||||
"enable_image_generation": images_app.state.config.ENABLED,
|
"enable_image_generation": images_app.state.config.ENABLED,
|
||||||
"enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
|
"enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
|
||||||
"enable_message_rating": webui_app.state.config.ENABLE_MESSAGE_RATING,
|
"enable_message_rating": webui_app.state.config.ENABLE_MESSAGE_RATING,
|
||||||
@ -2081,8 +2102,8 @@ async def get_app_config(request: Request):
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
"file": {
|
"file": {
|
||||||
"max_size": rag_app.state.config.FILE_MAX_SIZE,
|
"max_size": retrieval_app.state.config.FILE_MAX_SIZE,
|
||||||
"max_count": rag_app.state.config.FILE_MAX_COUNT,
|
"max_count": retrieval_app.state.config.FILE_MAX_COUNT,
|
||||||
},
|
},
|
||||||
"permissions": {**webui_app.state.config.USER_PERMISSIONS},
|
"permissions": {**webui_app.state.config.USER_PERMISSIONS},
|
||||||
}
|
}
|
||||||
@ -2154,7 +2175,8 @@ async def get_app_changelog():
|
|||||||
@app.get("/api/version/updates")
|
@app.get("/api/version/updates")
|
||||||
async def get_app_latest_release_version():
|
async def get_app_latest_release_version():
|
||||||
try:
|
try:
|
||||||
async with aiohttp.ClientSession(trust_env=True) as session:
|
timeout = aiohttp.ClientTimeout(total=1)
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
|
||||||
async with session.get(
|
async with session.get(
|
||||||
"https://api.github.com/repos/open-webui/open-webui/releases/latest"
|
"https://api.github.com/repos/open-webui/open-webui/releases/latest"
|
||||||
) as response:
|
) as response:
|
||||||
@ -2164,10 +2186,7 @@ async def get_app_latest_release_version():
|
|||||||
|
|
||||||
return {"current": VERSION, "latest": latest_version[1:]}
|
return {"current": VERSION, "latest": latest_version[1:]}
|
||||||
except aiohttp.ClientError:
|
except aiohttp.ClientError:
|
||||||
raise HTTPException(
|
return {"current": VERSION, "latest": VERSION}
|
||||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
|
||||||
detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
############################
|
############################
|
||||||
|
@ -46,6 +46,8 @@ sentence-transformers==3.0.1
|
|||||||
colbert-ai==0.2.21
|
colbert-ai==0.2.21
|
||||||
einops==0.8.0
|
einops==0.8.0
|
||||||
|
|
||||||
|
|
||||||
|
ftfy==6.2.3
|
||||||
pypdf==4.3.1
|
pypdf==4.3.1
|
||||||
docx2txt==0.8
|
docx2txt==0.8
|
||||||
python-pptx==1.0.0
|
python-pptx==1.0.0
|
||||||
|
@ -53,6 +53,8 @@ dependencies = [
|
|||||||
"colbert-ai==0.2.21",
|
"colbert-ai==0.2.21",
|
||||||
"einops==0.8.0",
|
"einops==0.8.0",
|
||||||
|
|
||||||
|
|
||||||
|
"ftfy==6.2.3",
|
||||||
"pypdf==4.3.1",
|
"pypdf==4.3.1",
|
||||||
"docx2txt==0.8",
|
"docx2txt==0.8",
|
||||||
"python-pptx==1.0.0",
|
"python-pptx==1.0.0",
|
||||||
|
@ -170,284 +170,6 @@ export const updateQuerySettings = async (token: string, settings: QuerySettings
|
|||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const processDocToVectorDB = async (token: string, file_id: string) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/process/doc`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
file_id: file_id
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
console.log(err);
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const uploadDocToVectorDB = async (token: string, collection_name: string, file: File) => {
|
|
||||||
const data = new FormData();
|
|
||||||
data.append('file', file);
|
|
||||||
data.append('collection_name', collection_name);
|
|
||||||
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/doc`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
},
|
|
||||||
body: data
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
console.log(err);
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const uploadWebToVectorDB = async (token: string, collection_name: string, url: string) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/web`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
url: url,
|
|
||||||
collection_name: collection_name
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
console.log(err);
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const uploadYoutubeTranscriptionToVectorDB = async (token: string, url: string) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/youtube`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
url: url
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
console.log(err);
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const queryDoc = async (
|
|
||||||
token: string,
|
|
||||||
collection_name: string,
|
|
||||||
query: string,
|
|
||||||
k: number | null = null
|
|
||||||
) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/query/doc`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
collection_name: collection_name,
|
|
||||||
query: query,
|
|
||||||
k: k
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const queryCollection = async (
|
|
||||||
token: string,
|
|
||||||
collection_names: string,
|
|
||||||
query: string,
|
|
||||||
k: number | null = null
|
|
||||||
) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/query/collection`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
collection_names: collection_names,
|
|
||||||
query: query,
|
|
||||||
k: k
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const scanDocs = async (token: string) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/scan`, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const resetUploadDir = async (token: string) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/reset/uploads`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const resetVectorDB = async (token: string) => {
|
|
||||||
let error = null;
|
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/reset/db`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
authorization: `Bearer ${token}`
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.then(async (res) => {
|
|
||||||
if (!res.ok) throw await res.json();
|
|
||||||
return res.json();
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
error = err.detail;
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getEmbeddingConfig = async (token: string) => {
|
export const getEmbeddingConfig = async (token: string) => {
|
||||||
let error = null;
|
let error = null;
|
||||||
|
|
||||||
@ -578,14 +300,140 @@ export const updateRerankingConfig = async (token: string, payload: RerankingMod
|
|||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const runWebSearch = async (
|
export interface SearchDocument {
|
||||||
|
status: boolean;
|
||||||
|
collection_name: string;
|
||||||
|
filenames: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export const processFile = async (token: string, file_id: string) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/process/file`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
file_id: file_id
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const processDocsDir = async (token: string) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/process/dir`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const processYoutubeVideo = async (token: string, url: string) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/process/youtube`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: url
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const processWeb = async (token: string, collection_name: string, url: string) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/process/web`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: url,
|
||||||
|
collection_name: collection_name
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const processWebSearch = async (
|
||||||
token: string,
|
token: string,
|
||||||
query: string,
|
query: string,
|
||||||
collection_name?: string
|
collection_name?: string
|
||||||
): Promise<SearchDocument | null> => {
|
): Promise<SearchDocument | null> => {
|
||||||
let error = null;
|
let error = null;
|
||||||
|
|
||||||
const res = await fetch(`${RAG_API_BASE_URL}/web/search`, {
|
const res = await fetch(`${RAG_API_BASE_URL}/process/web/search`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@ -613,8 +461,128 @@ export const runWebSearch = async (
|
|||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface SearchDocument {
|
export const queryDoc = async (
|
||||||
status: boolean;
|
token: string,
|
||||||
collection_name: string;
|
collection_name: string,
|
||||||
filenames: string[];
|
query: string,
|
||||||
|
k: number | null = null
|
||||||
|
) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/query/doc`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
collection_name: collection_name,
|
||||||
|
query: query,
|
||||||
|
k: k
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const queryCollection = async (
|
||||||
|
token: string,
|
||||||
|
collection_names: string,
|
||||||
|
query: string,
|
||||||
|
k: number | null = null
|
||||||
|
) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/query/collection`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
collection_names: collection_names,
|
||||||
|
query: query,
|
||||||
|
k: k
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const resetUploadDir = async (token: string) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/reset/uploads`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const resetVectorDB = async (token: string) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${RAG_API_BASE_URL}/reset/db`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
authorization: `Bearer ${token}`
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err.detail;
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
@ -7,7 +7,7 @@
|
|||||||
import { deleteAllFiles, deleteFileById } from '$lib/apis/files';
|
import { deleteAllFiles, deleteFileById } from '$lib/apis/files';
|
||||||
import {
|
import {
|
||||||
getQuerySettings,
|
getQuerySettings,
|
||||||
scanDocs,
|
processDocsDir,
|
||||||
updateQuerySettings,
|
updateQuerySettings,
|
||||||
resetVectorDB,
|
resetVectorDB,
|
||||||
getEmbeddingConfig,
|
getEmbeddingConfig,
|
||||||
@ -17,7 +17,7 @@
|
|||||||
resetUploadDir,
|
resetUploadDir,
|
||||||
getRAGConfig,
|
getRAGConfig,
|
||||||
updateRAGConfig
|
updateRAGConfig
|
||||||
} from '$lib/apis/rag';
|
} from '$lib/apis/retrieval';
|
||||||
import ResetUploadDirConfirmDialog from '$lib/components/common/ConfirmDialog.svelte';
|
import ResetUploadDirConfirmDialog from '$lib/components/common/ConfirmDialog.svelte';
|
||||||
import ResetVectorDBConfirmDialog from '$lib/components/common/ConfirmDialog.svelte';
|
import ResetVectorDBConfirmDialog from '$lib/components/common/ConfirmDialog.svelte';
|
||||||
|
|
||||||
@ -63,7 +63,7 @@
|
|||||||
|
|
||||||
const scanHandler = async () => {
|
const scanHandler = async () => {
|
||||||
scanDirLoading = true;
|
scanDirLoading = true;
|
||||||
const res = await scanDocs(localStorage.token);
|
const res = await processDocsDir(localStorage.token);
|
||||||
scanDirLoading = false;
|
scanDirLoading = false;
|
||||||
|
|
||||||
if (res) {
|
if (res) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { getRAGConfig, updateRAGConfig } from '$lib/apis/rag';
|
import { getRAGConfig, updateRAGConfig } from '$lib/apis/retrieval';
|
||||||
import Switch from '$lib/components/common/Switch.svelte';
|
import Switch from '$lib/components/common/Switch.svelte';
|
||||||
|
|
||||||
import { documents, models } from '$lib/stores';
|
import { documents, models } from '$lib/stores';
|
||||||
|
@ -52,7 +52,7 @@
|
|||||||
updateChatById
|
updateChatById
|
||||||
} from '$lib/apis/chats';
|
} from '$lib/apis/chats';
|
||||||
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
||||||
import { runWebSearch } from '$lib/apis/rag';
|
import { processWebSearch } from '$lib/apis/retrieval';
|
||||||
import { createOpenAITextStream } from '$lib/apis/streaming';
|
import { createOpenAITextStream } from '$lib/apis/streaming';
|
||||||
import { queryMemory } from '$lib/apis/memories';
|
import { queryMemory } from '$lib/apis/memories';
|
||||||
import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
|
import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
|
||||||
@ -1737,7 +1737,7 @@
|
|||||||
});
|
});
|
||||||
history.messages[responseMessageId] = responseMessage;
|
history.messages[responseMessageId] = responseMessage;
|
||||||
|
|
||||||
const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
|
const results = await processWebSearch(localStorage.token, searchQuery).catch((error) => {
|
||||||
console.log(error);
|
console.log(error);
|
||||||
toast.error(error);
|
toast.error(error);
|
||||||
|
|
||||||
|
@ -46,6 +46,9 @@
|
|||||||
chatFiles.splice(fileIdx, 1);
|
chatFiles.splice(fileIdx, 1);
|
||||||
chatFiles = chatFiles;
|
chatFiles = chatFiles;
|
||||||
}}
|
}}
|
||||||
|
on:click={() => {
|
||||||
|
console.log(file);
|
||||||
|
}}
|
||||||
/>
|
/>
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
|
@ -17,7 +17,8 @@
|
|||||||
import { blobToFile, findWordIndices } from '$lib/utils';
|
import { blobToFile, findWordIndices } from '$lib/utils';
|
||||||
|
|
||||||
import { transcribeAudio } from '$lib/apis/audio';
|
import { transcribeAudio } from '$lib/apis/audio';
|
||||||
import { processDocToVectorDB } from '$lib/apis/rag';
|
|
||||||
|
import { processFile } from '$lib/apis/retrieval';
|
||||||
import { uploadFile } from '$lib/apis/files';
|
import { uploadFile } from '$lib/apis/files';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@ -158,17 +159,14 @@
|
|||||||
|
|
||||||
const processFileItem = async (fileItem) => {
|
const processFileItem = async (fileItem) => {
|
||||||
try {
|
try {
|
||||||
const res = await processDocToVectorDB(localStorage.token, fileItem.id);
|
const res = await processFile(localStorage.token, fileItem.id);
|
||||||
|
|
||||||
if (res) {
|
if (res) {
|
||||||
fileItem.status = 'processed';
|
fileItem.status = 'processed';
|
||||||
fileItem.collection_name = res.collection_name;
|
fileItem.collection_name = res.collection_name;
|
||||||
files = files;
|
files = files;
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Remove the failed doc from the files array
|
// We keep the file in the files list even if it fails to process
|
||||||
// files = files.filter((f) => f.id !== fileItem.id);
|
|
||||||
toast.error(e);
|
|
||||||
fileItem.status = 'processed';
|
fileItem.status = 'processed';
|
||||||
files = files;
|
files = files;
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
import Models from './Commands/Models.svelte';
|
import Models from './Commands/Models.svelte';
|
||||||
|
|
||||||
import { removeLastWordFromString } from '$lib/utils';
|
import { removeLastWordFromString } from '$lib/utils';
|
||||||
import { uploadWebToVectorDB, uploadYoutubeTranscriptionToVectorDB } from '$lib/apis/rag';
|
import { processWeb, processYoutubeVideo } from '$lib/apis/retrieval';
|
||||||
|
|
||||||
export let prompt = '';
|
export let prompt = '';
|
||||||
export let files = [];
|
export let files = [];
|
||||||
@ -41,7 +41,7 @@
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
files = [...files, doc];
|
files = [...files, doc];
|
||||||
const res = await uploadWebToVectorDB(localStorage.token, '', url);
|
const res = await processWeb(localStorage.token, '', url);
|
||||||
|
|
||||||
if (res) {
|
if (res) {
|
||||||
doc.status = 'processed';
|
doc.status = 'processed';
|
||||||
@ -69,7 +69,7 @@
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
files = [...files, doc];
|
files = [...files, doc];
|
||||||
const res = await uploadYoutubeTranscriptionToVectorDB(localStorage.token, url);
|
const res = await processYoutubeVideo(localStorage.token, url);
|
||||||
|
|
||||||
if (res) {
|
if (res) {
|
||||||
doc.status = 'processed';
|
doc.status = 'processed';
|
||||||
|
@ -8,8 +8,6 @@
|
|||||||
export let colorClassName = 'bg-white dark:bg-gray-800';
|
export let colorClassName = 'bg-white dark:bg-gray-800';
|
||||||
export let url: string | null = null;
|
export let url: string | null = null;
|
||||||
|
|
||||||
export let clickHandler: Function | null = null;
|
|
||||||
|
|
||||||
export let dismissible = false;
|
export let dismissible = false;
|
||||||
export let status = 'processed';
|
export let status = 'processed';
|
||||||
|
|
||||||
@ -17,7 +15,7 @@
|
|||||||
export let type: string;
|
export let type: string;
|
||||||
export let size: number;
|
export let size: number;
|
||||||
|
|
||||||
function formatSize(size) {
|
const formatSize = (size) => {
|
||||||
if (size == null) return 'Unknown size';
|
if (size == null) return 'Unknown size';
|
||||||
if (typeof size !== 'number' || size < 0) return 'Invalid size';
|
if (typeof size !== 'number' || size < 0) return 'Invalid size';
|
||||||
if (size === 0) return '0 B';
|
if (size === 0) return '0 B';
|
||||||
@ -29,7 +27,7 @@
|
|||||||
unitIndex++;
|
unitIndex++;
|
||||||
}
|
}
|
||||||
return `${size.toFixed(1)} ${units[unitIndex]}`;
|
return `${size.toFixed(1)} ${units[unitIndex]}`;
|
||||||
}
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="relative group">
|
<div class="relative group">
|
||||||
@ -37,17 +35,7 @@
|
|||||||
class="h-14 {className} flex items-center space-x-3 {colorClassName} rounded-xl border border-gray-100 dark:border-gray-800 text-left"
|
class="h-14 {className} flex items-center space-x-3 {colorClassName} rounded-xl border border-gray-100 dark:border-gray-800 text-left"
|
||||||
type="button"
|
type="button"
|
||||||
on:click={async () => {
|
on:click={async () => {
|
||||||
if (clickHandler === null) {
|
dispatch('click');
|
||||||
if (url) {
|
|
||||||
if (type === 'file') {
|
|
||||||
window.open(`${url}/content`, '_blank').focus();
|
|
||||||
} else {
|
|
||||||
window.open(`${url}`, '_blank').focus();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
clickHandler();
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<div class="p-4 py-[1.1rem] bg-red-400 text-white rounded-l-xl">
|
<div class="p-4 py-[1.1rem] bg-red-400 text-white rounded-l-xl">
|
||||||
|
@ -3,16 +3,13 @@
|
|||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
import { onMount, getContext } from 'svelte';
|
import { onMount, getContext } from 'svelte';
|
||||||
|
|
||||||
import { createNewDoc, getDocs, tagDocByName, updateDocByName } from '$lib/apis/documents';
|
import { getDocs } from '$lib/apis/documents';
|
||||||
import Modal from '../common/Modal.svelte';
|
import Modal from '../common/Modal.svelte';
|
||||||
import { documents } from '$lib/stores';
|
import { documents } from '$lib/stores';
|
||||||
import TagInput from '../common/Tags/TagInput.svelte';
|
|
||||||
import Tags from '../common/Tags.svelte';
|
|
||||||
import { addTagById } from '$lib/apis/chats';
|
|
||||||
import { uploadDocToVectorDB } from '$lib/apis/rag';
|
|
||||||
import { transformFileName } from '$lib/utils';
|
|
||||||
import { SUPPORTED_FILE_EXTENSIONS, SUPPORTED_FILE_TYPE } from '$lib/constants';
|
import { SUPPORTED_FILE_EXTENSIONS, SUPPORTED_FILE_TYPE } from '$lib/constants';
|
||||||
|
|
||||||
|
import Tags from '../common/Tags.svelte';
|
||||||
|
|
||||||
const i18n = getContext('i18n');
|
const i18n = getContext('i18n');
|
||||||
|
|
||||||
export let show = false;
|
export let show = false;
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
import { createNewDoc, deleteDocByName, getDocs } from '$lib/apis/documents';
|
import { createNewDoc, deleteDocByName, getDocs } from '$lib/apis/documents';
|
||||||
|
|
||||||
import { SUPPORTED_FILE_TYPE, SUPPORTED_FILE_EXTENSIONS } from '$lib/constants';
|
import { SUPPORTED_FILE_TYPE, SUPPORTED_FILE_EXTENSIONS } from '$lib/constants';
|
||||||
import { processDocToVectorDB, uploadDocToVectorDB } from '$lib/apis/rag';
|
import { processFile } from '$lib/apis/retrieval';
|
||||||
import { blobToFile, transformFileName } from '$lib/utils';
|
import { blobToFile, transformFileName } from '$lib/utils';
|
||||||
|
|
||||||
import Checkbox from '$lib/components/common/Checkbox.svelte';
|
import Checkbox from '$lib/components/common/Checkbox.svelte';
|
||||||
@ -74,7 +74,7 @@
|
|||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
const res = await processDocToVectorDB(localStorage.token, uploadedFile.id).catch((error) => {
|
const res = await processFile(localStorage.token, uploadedFile.id).catch((error) => {
|
||||||
toast.error(error);
|
toast.error(error);
|
||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
|
@ -11,7 +11,7 @@ export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama`;
|
|||||||
export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai`;
|
export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai`;
|
||||||
export const AUDIO_API_BASE_URL = `${WEBUI_BASE_URL}/audio/api/v1`;
|
export const AUDIO_API_BASE_URL = `${WEBUI_BASE_URL}/audio/api/v1`;
|
||||||
export const IMAGES_API_BASE_URL = `${WEBUI_BASE_URL}/images/api/v1`;
|
export const IMAGES_API_BASE_URL = `${WEBUI_BASE_URL}/images/api/v1`;
|
||||||
export const RAG_API_BASE_URL = `${WEBUI_BASE_URL}/rag/api/v1`;
|
export const RAG_API_BASE_URL = `${WEBUI_BASE_URL}/retrieval/api/v1`;
|
||||||
|
|
||||||
export const WEBUI_VERSION = APP_VERSION;
|
export const WEBUI_VERSION = APP_VERSION;
|
||||||
export const WEBUI_BUILD_HASH = APP_BUILD_HASH;
|
export const WEBUI_BUILD_HASH = APP_BUILD_HASH;
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
"{{user}}'s Chats": "Els xats de {{user}}",
|
"{{user}}'s Chats": "Els xats de {{user}}",
|
||||||
"{{webUIName}} Backend Required": "El Backend de {{webUIName}} és necessari",
|
"{{webUIName}} Backend Required": "El Backend de {{webUIName}} és necessari",
|
||||||
"*Prompt node ID(s) are required for image generation": "*Els identificadors de nodes d'indicacions són necessaris per a la generació d'imatges",
|
"*Prompt node ID(s) are required for image generation": "*Els identificadors de nodes d'indicacions són necessaris per a la generació d'imatges",
|
||||||
"A new version (v{{LATEST_VERSION}}) is now available.": "",
|
"A new version (v{{LATEST_VERSION}}) is now available.": "Hi ha una nova versió disponible (v{{LATEST_VERSION}}).",
|
||||||
"A task model is used when performing tasks such as generating titles for chats and web search queries": "Un model de tasca s'utilitza quan es realitzen tasques com ara generar títols per a xats i consultes de cerca per a la web",
|
"A task model is used when performing tasks such as generating titles for chats and web search queries": "Un model de tasca s'utilitza quan es realitzen tasques com ara generar títols per a xats i consultes de cerca per a la web",
|
||||||
"a user": "un usuari",
|
"a user": "un usuari",
|
||||||
"About": "Sobre",
|
"About": "Sobre",
|
||||||
@ -466,7 +466,7 @@
|
|||||||
"Oops! Looks like the URL is invalid. Please double-check and try again.": "Ui! Sembla que l'URL no és vàlida. Si us plau, revisa-la i torna-ho a provar.",
|
"Oops! Looks like the URL is invalid. Please double-check and try again.": "Ui! Sembla que l'URL no és vàlida. Si us plau, revisa-la i torna-ho a provar.",
|
||||||
"Oops! There was an error in the previous response. Please try again or contact admin.": "Ui! Hi ha hagut un error en la resposta anterior. Torna a provar-ho o contacta amb un administrador",
|
"Oops! There was an error in the previous response. Please try again or contact admin.": "Ui! Hi ha hagut un error en la resposta anterior. Torna a provar-ho o contacta amb un administrador",
|
||||||
"Oops! You're using an unsupported method (frontend only). Please serve the WebUI from the backend.": "Ui! Estàs utilitzant un mètode no suportat (només frontend). Si us plau, serveix la WebUI des del backend.",
|
"Oops! You're using an unsupported method (frontend only). Please serve the WebUI from the backend.": "Ui! Estàs utilitzant un mètode no suportat (només frontend). Si us plau, serveix la WebUI des del backend.",
|
||||||
"Open file": "",
|
"Open file": "Obrir arxiu",
|
||||||
"Open new chat": "Obre un xat nou",
|
"Open new chat": "Obre un xat nou",
|
||||||
"Open WebUI version (v{{OPEN_WEBUI_VERSION}}) is lower than required version (v{{REQUIRED_VERSION}})": "La versió d'Open WebUI (v{{OPEN_WEBUI_VERSION}}) és inferior a la versió requerida (v{{REQUIRED_VERSION}})",
|
"Open WebUI version (v{{OPEN_WEBUI_VERSION}}) is lower than required version (v{{REQUIRED_VERSION}})": "La versió d'Open WebUI (v{{OPEN_WEBUI_VERSION}}) és inferior a la versió requerida (v{{REQUIRED_VERSION}})",
|
||||||
"OpenAI": "OpenAI",
|
"OpenAI": "OpenAI",
|
||||||
@ -478,7 +478,7 @@
|
|||||||
"Other": "Altres",
|
"Other": "Altres",
|
||||||
"Output format": "Format de sortida",
|
"Output format": "Format de sortida",
|
||||||
"Overview": "Vista general",
|
"Overview": "Vista general",
|
||||||
"page": "",
|
"page": "pàgina",
|
||||||
"Password": "Contrasenya",
|
"Password": "Contrasenya",
|
||||||
"PDF document (.pdf)": "Document PDF (.pdf)",
|
"PDF document (.pdf)": "Document PDF (.pdf)",
|
||||||
"PDF Extract Images (OCR)": "Extreu imatges del PDF (OCR)",
|
"PDF Extract Images (OCR)": "Extreu imatges del PDF (OCR)",
|
||||||
@ -497,7 +497,7 @@
|
|||||||
"Plain text (.txt)": "Text pla (.txt)",
|
"Plain text (.txt)": "Text pla (.txt)",
|
||||||
"Playground": "Zona de jocs",
|
"Playground": "Zona de jocs",
|
||||||
"Please carefully review the following warnings:": "Si us plau, revisa els següents avisos amb cura:",
|
"Please carefully review the following warnings:": "Si us plau, revisa els següents avisos amb cura:",
|
||||||
"Please select a reason": "",
|
"Please select a reason": "Si us plau, selecciona una raó",
|
||||||
"Positive attitude": "Actitud positiva",
|
"Positive attitude": "Actitud positiva",
|
||||||
"Previous 30 days": "30 dies anteriors",
|
"Previous 30 days": "30 dies anteriors",
|
||||||
"Previous 7 days": "7 dies anteriors",
|
"Previous 7 days": "7 dies anteriors",
|
||||||
@ -704,7 +704,7 @@
|
|||||||
"Unpin": "Alliberar",
|
"Unpin": "Alliberar",
|
||||||
"Update": "Actualitzar",
|
"Update": "Actualitzar",
|
||||||
"Update and Copy Link": "Actualitzar i copiar l'enllaç",
|
"Update and Copy Link": "Actualitzar i copiar l'enllaç",
|
||||||
"Update for the latest features and improvements.": "",
|
"Update for the latest features and improvements.": "Actualitza per a les darreres característiques i millores.",
|
||||||
"Update password": "Actualitzar la contrasenya",
|
"Update password": "Actualitzar la contrasenya",
|
||||||
"Updated at": "Actualitzat",
|
"Updated at": "Actualitzat",
|
||||||
"Upload": "Pujar",
|
"Upload": "Pujar",
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { getRAGTemplate } from '$lib/apis/rag';
|
import { getRAGTemplate } from '$lib/apis/retrieval';
|
||||||
|
|
||||||
export const RAGTemplate = async (token: string, context: string, query: string) => {
|
export const RAGTemplate = async (token: string, context: string, query: string) => {
|
||||||
let template = await getRAGTemplate(token).catch(() => {
|
let template = await getRAGTemplate(token).catch(() => {
|
||||||
|
@ -206,10 +206,10 @@
|
|||||||
const now = new Date();
|
const now = new Date();
|
||||||
|
|
||||||
if (now - dismissedUpdateToast > 24 * 60 * 60 * 1000) {
|
if (now - dismissedUpdateToast > 24 * 60 * 60 * 1000) {
|
||||||
await checkForVersionUpdates();
|
checkForVersionUpdates();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
await checkForVersionUpdates();
|
checkForVersionUpdates();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await tick();
|
await tick();
|
||||||
|
Loading…
x
Reference in New Issue
Block a user