diff --git a/Dockerfile b/Dockerfile index 5102afd28..d7de72f01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,6 +26,9 @@ ARG BUILD_HASH WORKDIR /app +# to store git revision in build +RUN apk add --no-cache git + COPY package.json package-lock.json ./ RUN npm ci diff --git a/backend/open_webui/config.py b/backend/open_webui/config.py index 38bd709f1..b1955b056 100644 --- a/backend/open_webui/config.py +++ b/backend/open_webui/config.py @@ -989,6 +989,26 @@ DEFAULT_USER_ROLE = PersistentConfig( os.getenv("DEFAULT_USER_ROLE", "pending"), ) +PENDING_USER_OVERLAY_TITLE = PersistentConfig( + "PENDING_USER_OVERLAY_TITLE", + "ui.pending_user_overlay_title", + os.environ.get("PENDING_USER_OVERLAY_TITLE", ""), +) + +PENDING_USER_OVERLAY_CONTENT = PersistentConfig( + "PENDING_USER_OVERLAY_CONTENT", + "ui.pending_user_overlay_content", + os.environ.get("PENDING_USER_OVERLAY_CONTENT", ""), +) + + +RESPONSE_WATERMARK = PersistentConfig( + "RESPONSE_WATERMARK", + "ui.watermark", + os.environ.get("RESPONSE_WATERMARK", ""), +) + + USER_PERMISSIONS_WORKSPACE_MODELS_ACCESS = ( os.environ.get("USER_PERMISSIONS_WORKSPACE_MODELS_ACCESS", "False").lower() == "true" @@ -1731,6 +1751,9 @@ QDRANT_API_KEY = os.environ.get("QDRANT_API_KEY", None) QDRANT_ON_DISK = os.environ.get("QDRANT_ON_DISK", "false").lower() == "true" QDRANT_PREFER_GRPC = os.environ.get("QDRANT_PREFER_GRPC", "False").lower() == "true" QDRANT_GRPC_PORT = int(os.environ.get("QDRANT_GRPC_PORT", "6334")) +ENABLE_QDRANT_MULTITENANCY_MODE = ( + os.environ.get("ENABLE_QDRANT_MULTITENANCY_MODE", "false").lower() == "true" +) # OpenSearch OPENSEARCH_URI = os.environ.get("OPENSEARCH_URI", "https://localhost:9200") @@ -1825,6 +1848,18 @@ CONTENT_EXTRACTION_ENGINE = PersistentConfig( os.environ.get("CONTENT_EXTRACTION_ENGINE", "").lower(), ) +EXTERNAL_DOCUMENT_LOADER_URL = PersistentConfig( + "EXTERNAL_DOCUMENT_LOADER_URL", + "rag.external_document_loader_url", + os.environ.get("EXTERNAL_DOCUMENT_LOADER_URL", ""), +) + +EXTERNAL_DOCUMENT_LOADER_API_KEY = PersistentConfig( + "EXTERNAL_DOCUMENT_LOADER_API_KEY", + "rag.external_document_loader_api_key", + os.environ.get("EXTERNAL_DOCUMENT_LOADER_API_KEY", ""), +) + TIKA_SERVER_URL = PersistentConfig( "TIKA_SERVER_URL", "rag.tika_server_url", @@ -1849,6 +1884,12 @@ DOCLING_OCR_LANG = PersistentConfig( os.getenv("DOCLING_OCR_LANG", "eng,fra,deu,spa"), ) +DOCLING_DO_PICTURE_DESCRIPTION = PersistentConfig( + "DOCLING_DO_PICTURE_DESCRIPTION", + "rag.docling_do_picture_description", + os.getenv("DOCLING_DO_PICTURE_DESCRIPTION", "False").lower() == "true", +) + DOCUMENT_INTELLIGENCE_ENDPOINT = PersistentConfig( "DOCUMENT_INTELLIGENCE_ENDPOINT", "rag.document_intelligence_endpoint", @@ -1920,6 +1961,16 @@ RAG_FILE_MAX_SIZE = PersistentConfig( ), ) +RAG_ALLOWED_FILE_EXTENSIONS = PersistentConfig( + "RAG_ALLOWED_FILE_EXTENSIONS", + "rag.file.allowed_extensions", + [ + ext.strip() + for ext in os.environ.get("RAG_ALLOWED_FILE_EXTENSIONS", "").split(",") + if ext.strip() + ], +) + RAG_EMBEDDING_ENGINE = PersistentConfig( "RAG_EMBEDDING_ENGINE", "rag.embedding_engine", @@ -2839,6 +2890,12 @@ LDAP_CA_CERT_FILE = PersistentConfig( os.environ.get("LDAP_CA_CERT_FILE", ""), ) +LDAP_VALIDATE_CERT = PersistentConfig( + "LDAP_VALIDATE_CERT", + "ldap.server.validate_cert", + os.environ.get("LDAP_VALIDATE_CERT", "True").lower() == "true", +) + LDAP_CIPHERS = PersistentConfig( "LDAP_CIPHERS", "ldap.server.ciphers", os.environ.get("LDAP_CIPHERS", "ALL") ) diff --git a/backend/open_webui/main.py b/backend/open_webui/main.py index e5fdace6d..a5aee4bb8 100644 --- a/backend/open_webui/main.py +++ b/backend/open_webui/main.py @@ -197,6 +197,7 @@ from open_webui.config import ( RAG_EMBEDDING_ENGINE, RAG_EMBEDDING_BATCH_SIZE, RAG_RELEVANCE_THRESHOLD, + RAG_ALLOWED_FILE_EXTENSIONS, RAG_FILE_MAX_COUNT, RAG_FILE_MAX_SIZE, RAG_OPENAI_API_BASE_URL, @@ -206,10 +207,13 @@ from open_webui.config import ( CHUNK_OVERLAP, CHUNK_SIZE, CONTENT_EXTRACTION_ENGINE, + EXTERNAL_DOCUMENT_LOADER_URL, + EXTERNAL_DOCUMENT_LOADER_API_KEY, TIKA_SERVER_URL, DOCLING_SERVER_URL, DOCLING_OCR_ENGINE, DOCLING_OCR_LANG, + DOCLING_DO_PICTURE_DESCRIPTION, DOCUMENT_INTELLIGENCE_ENDPOINT, DOCUMENT_INTELLIGENCE_KEY, MISTRAL_OCR_API_KEY, @@ -291,6 +295,8 @@ from open_webui.config import ( ENABLE_EVALUATION_ARENA_MODELS, USER_PERMISSIONS, DEFAULT_USER_ROLE, + PENDING_USER_OVERLAY_CONTENT, + PENDING_USER_OVERLAY_TITLE, DEFAULT_PROMPT_SUGGESTIONS, DEFAULT_MODELS, DEFAULT_ARENA_MODEL, @@ -317,6 +323,7 @@ from open_webui.config import ( LDAP_APP_PASSWORD, LDAP_USE_TLS, LDAP_CA_CERT_FILE, + LDAP_VALIDATE_CERT, LDAP_CIPHERS, # Misc ENV, @@ -327,6 +334,7 @@ from open_webui.config import ( DEFAULT_LOCALE, OAUTH_PROVIDERS, WEBUI_URL, + RESPONSE_WATERMARK, # Admin ENABLE_ADMIN_CHAT_ACCESS, ENABLE_ADMIN_EXPORT, @@ -373,6 +381,7 @@ from open_webui.env import ( OFFLINE_MODE, ENABLE_OTEL, EXTERNAL_PWA_MANIFEST_URL, + AIOHTTP_CLIENT_SESSION_SSL, ) @@ -573,6 +582,11 @@ app.state.config.DEFAULT_MODELS = DEFAULT_MODELS app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE +app.state.config.PENDING_USER_OVERLAY_CONTENT = PENDING_USER_OVERLAY_CONTENT +app.state.config.PENDING_USER_OVERLAY_TITLE = PENDING_USER_OVERLAY_TITLE + +app.state.config.RESPONSE_WATERMARK = RESPONSE_WATERMARK + app.state.config.USER_PERMISSIONS = USER_PERMISSIONS app.state.config.WEBHOOK_URL = WEBHOOK_URL app.state.config.BANNERS = WEBUI_BANNERS @@ -609,6 +623,7 @@ app.state.config.LDAP_SEARCH_BASE = LDAP_SEARCH_BASE app.state.config.LDAP_SEARCH_FILTERS = LDAP_SEARCH_FILTERS app.state.config.LDAP_USE_TLS = LDAP_USE_TLS app.state.config.LDAP_CA_CERT_FILE = LDAP_CA_CERT_FILE +app.state.config.LDAP_VALIDATE_CERT = LDAP_VALIDATE_CERT app.state.config.LDAP_CIPHERS = LDAP_CIPHERS @@ -631,6 +646,7 @@ app.state.FUNCTIONS = {} app.state.config.TOP_K = RAG_TOP_K app.state.config.TOP_K_RERANKER = RAG_TOP_K_RERANKER app.state.config.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD +app.state.config.ALLOWED_FILE_EXTENSIONS = RAG_ALLOWED_FILE_EXTENSIONS app.state.config.FILE_MAX_SIZE = RAG_FILE_MAX_SIZE app.state.config.FILE_MAX_COUNT = RAG_FILE_MAX_COUNT @@ -641,10 +657,13 @@ app.state.config.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH app.state.config.ENABLE_WEB_LOADER_SSL_VERIFICATION = ENABLE_WEB_LOADER_SSL_VERIFICATION app.state.config.CONTENT_EXTRACTION_ENGINE = CONTENT_EXTRACTION_ENGINE +app.state.config.EXTERNAL_DOCUMENT_LOADER_URL = EXTERNAL_DOCUMENT_LOADER_URL +app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY = EXTERNAL_DOCUMENT_LOADER_API_KEY app.state.config.TIKA_SERVER_URL = TIKA_SERVER_URL app.state.config.DOCLING_SERVER_URL = DOCLING_SERVER_URL app.state.config.DOCLING_OCR_ENGINE = DOCLING_OCR_ENGINE app.state.config.DOCLING_OCR_LANG = DOCLING_OCR_LANG +app.state.config.DOCLING_DO_PICTURE_DESCRIPTION = DOCLING_DO_PICTURE_DESCRIPTION app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = DOCUMENT_INTELLIGENCE_ENDPOINT app.state.config.DOCUMENT_INTELLIGENCE_KEY = DOCUMENT_INTELLIGENCE_KEY app.state.config.MISTRAL_OCR_API_KEY = MISTRAL_OCR_API_KEY @@ -1167,11 +1186,12 @@ async def chat_completion( "chat_id": form_data.pop("chat_id", None), "message_id": form_data.pop("id", None), "session_id": form_data.pop("session_id", None), + "filter_ids": form_data.pop("filter_ids", []), "tool_ids": form_data.get("tool_ids", None), "tool_servers": form_data.pop("tool_servers", None), "files": form_data.get("files", None), - "features": form_data.get("features", None), - "variables": form_data.get("variables", None), + "features": form_data.get("features", {}), + "variables": form_data.get("variables", {}), "model": model, "direct": model_item.get("direct", False), **( @@ -1395,6 +1415,11 @@ async def get_app_config(request: Request): "sharepoint_url": ONEDRIVE_SHAREPOINT_URL.value, "sharepoint_tenant_id": ONEDRIVE_SHAREPOINT_TENANT_ID.value, }, + "ui": { + "pending_user_overlay_title": app.state.config.PENDING_USER_OVERLAY_TITLE, + "pending_user_overlay_content": app.state.config.PENDING_USER_OVERLAY_CONTENT, + "response_watermark": app.state.config.RESPONSE_WATERMARK, + }, "license_metadata": app.state.LICENSE_METADATA, **( { @@ -1446,7 +1471,8 @@ async def get_app_latest_release_version(user=Depends(get_verified_user)): timeout = aiohttp.ClientTimeout(total=1) async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session: async with session.get( - "https://api.github.com/repos/open-webui/open-webui/releases/latest" + "https://api.github.com/repos/open-webui/open-webui/releases/latest", + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as response: response.raise_for_status() data = await response.json() diff --git a/backend/open_webui/retrieval/loaders/external_document.py b/backend/open_webui/retrieval/loaders/external_document.py new file mode 100644 index 000000000..6119da379 --- /dev/null +++ b/backend/open_webui/retrieval/loaders/external_document.py @@ -0,0 +1,58 @@ +import requests +import logging +from typing import Iterator, List, Union + +from langchain_core.document_loaders import BaseLoader +from langchain_core.documents import Document +from open_webui.env import SRC_LOG_LEVELS + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +class ExternalDocumentLoader(BaseLoader): + def __init__( + self, + file_path, + url: str, + api_key: str, + mime_type=None, + **kwargs, + ) -> None: + self.url = url + self.api_key = api_key + + self.file_path = file_path + self.mime_type = mime_type + + def load(self) -> list[Document]: + with open(self.file_path, "rb") as f: + data = f.read() + + headers = {} + if self.mime_type is not None: + headers["Content-Type"] = self.mime_type + + if self.api_key is not None: + headers["Authorization"] = f"Bearer {self.api_key}" + + url = self.url + if url.endswith("/"): + url = url[:-1] + + r = requests.put(f"{url}/process", data=data, headers=headers) + + if r.ok: + res = r.json() + + if res: + return [ + Document( + page_content=res.get("page_content"), + metadata=res.get("metadata"), + ) + ] + else: + raise Exception("Error loading document: No content returned") + else: + raise Exception(f"Error loading document: {r.status_code} {r.text}") diff --git a/backend/open_webui/retrieval/loaders/external.py b/backend/open_webui/retrieval/loaders/external_web.py similarity index 95% rename from backend/open_webui/retrieval/loaders/external.py rename to backend/open_webui/retrieval/loaders/external_web.py index 642cfd3a5..68ed66162 100644 --- a/backend/open_webui/retrieval/loaders/external.py +++ b/backend/open_webui/retrieval/loaders/external_web.py @@ -10,7 +10,7 @@ log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["RAG"]) -class ExternalLoader(BaseLoader): +class ExternalWebLoader(BaseLoader): def __init__( self, web_paths: Union[str, List[str]], @@ -32,7 +32,7 @@ class ExternalLoader(BaseLoader): response = requests.post( self.external_url, headers={ - "User-Agent": "Open WebUI (https://github.com/open-webui/open-webui) RAG Bot", + "User-Agent": "Open WebUI (https://github.com/open-webui/open-webui) External Web Loader", "Authorization": f"Bearer {self.external_api_key}", }, json={ diff --git a/backend/open_webui/retrieval/loaders/main.py b/backend/open_webui/retrieval/loaders/main.py index 8e7b5a3da..c5f0b4e5e 100644 --- a/backend/open_webui/retrieval/loaders/main.py +++ b/backend/open_webui/retrieval/loaders/main.py @@ -21,6 +21,8 @@ from langchain_community.document_loaders import ( ) from langchain_core.documents import Document + +from open_webui.retrieval.loaders.external_document import ExternalDocumentLoader from open_webui.retrieval.loaders.mistral import MistralLoader from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL @@ -126,14 +128,12 @@ class TikaLoader: class DoclingLoader: - def __init__( - self, url, file_path=None, mime_type=None, ocr_engine=None, ocr_lang=None - ): + def __init__(self, url, file_path=None, mime_type=None, params=None): self.url = url.rstrip("/") self.file_path = file_path self.mime_type = mime_type - self.ocr_engine = ocr_engine - self.ocr_lang = ocr_lang + + self.params = params or {} def load(self) -> list[Document]: with open(self.file_path, "rb") as f: @@ -150,11 +150,19 @@ class DoclingLoader: "table_mode": "accurate", } - if self.ocr_engine and self.ocr_lang: - params["ocr_engine"] = self.ocr_engine - params["ocr_lang"] = [ - lang.strip() for lang in self.ocr_lang.split(",") if lang.strip() - ] + if self.params: + if self.params.get("do_picture_classification"): + params["do_picture_classification"] = self.params.get( + "do_picture_classification" + ) + + if self.params.get("ocr_engine") and self.params.get("ocr_lang"): + params["ocr_engine"] = self.params.get("ocr_engine") + params["ocr_lang"] = [ + lang.strip() + for lang in self.params.get("ocr_lang").split(",") + if lang.strip() + ] endpoint = f"{self.url}/v1alpha/convert/file" r = requests.post(endpoint, files=files, data=params) @@ -207,6 +215,17 @@ class Loader: def _get_loader(self, filename: str, file_content_type: str, file_path: str): file_ext = filename.split(".")[-1].lower() + if ( + self.engine == "external" + and self.kwargs.get("EXTERNAL_DOCUMENT_LOADER_URL") + and self.kwargs.get("EXTERNAL_DOCUMENT_LOADER_API_KEY") + ): + loader = ExternalDocumentLoader( + file_path=file_path, + url=self.kwargs.get("EXTERNAL_DOCUMENT_LOADER_URL"), + api_key=self.kwargs.get("EXTERNAL_DOCUMENT_LOADER_API_KEY"), + mime_type=file_content_type, + ) if self.engine == "tika" and self.kwargs.get("TIKA_SERVER_URL"): if self._is_text_file(file_ext, file_content_type): loader = TextLoader(file_path, autodetect_encoding=True) @@ -225,8 +244,13 @@ class Loader: url=self.kwargs.get("DOCLING_SERVER_URL"), file_path=file_path, mime_type=file_content_type, - ocr_engine=self.kwargs.get("DOCLING_OCR_ENGINE"), - ocr_lang=self.kwargs.get("DOCLING_OCR_LANG"), + params={ + "ocr_engine": self.kwargs.get("DOCLING_OCR_ENGINE"), + "ocr_lang": self.kwargs.get("DOCLING_OCR_LANG"), + "do_picture_classification": self.kwargs.get( + "DOCLING_DO_PICTURE_DESCRIPTION" + ), + }, ) elif ( self.engine == "document_intelligence" @@ -258,6 +282,15 @@ class Loader: loader = MistralLoader( api_key=self.kwargs.get("MISTRAL_OCR_API_KEY"), file_path=file_path ) + elif ( + self.engine == "external" + and self.kwargs.get("MISTRAL_OCR_API_KEY") != "" + and file_ext + in ["pdf"] # Mistral OCR currently only supports PDF and images + ): + loader = MistralLoader( + api_key=self.kwargs.get("MISTRAL_OCR_API_KEY"), file_path=file_path + ) else: if file_ext == "pdf": loader = PyPDFLoader( diff --git a/backend/open_webui/retrieval/utils.py b/backend/open_webui/retrieval/utils.py index 2df6a0ab5..a132d7201 100644 --- a/backend/open_webui/retrieval/utils.py +++ b/backend/open_webui/retrieval/utils.py @@ -12,7 +12,7 @@ from langchain_community.retrievers import BM25Retriever from langchain_core.documents import Document from open_webui.config import VECTOR_DB -from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT +from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT from open_webui.models.users import UserModel from open_webui.models.files import Files diff --git a/backend/open_webui/retrieval/vector/connector.py b/backend/open_webui/retrieval/vector/connector.py deleted file mode 100644 index 198e6f176..000000000 --- a/backend/open_webui/retrieval/vector/connector.py +++ /dev/null @@ -1,30 +0,0 @@ -from open_webui.config import VECTOR_DB - -if VECTOR_DB == "milvus": - from open_webui.retrieval.vector.dbs.milvus import MilvusClient - - VECTOR_DB_CLIENT = MilvusClient() -elif VECTOR_DB == "qdrant": - from open_webui.retrieval.vector.dbs.qdrant import QdrantClient - - VECTOR_DB_CLIENT = QdrantClient() -elif VECTOR_DB == "opensearch": - from open_webui.retrieval.vector.dbs.opensearch import OpenSearchClient - - VECTOR_DB_CLIENT = OpenSearchClient() -elif VECTOR_DB == "pgvector": - from open_webui.retrieval.vector.dbs.pgvector import PgvectorClient - - VECTOR_DB_CLIENT = PgvectorClient() -elif VECTOR_DB == "elasticsearch": - from open_webui.retrieval.vector.dbs.elasticsearch import ElasticsearchClient - - VECTOR_DB_CLIENT = ElasticsearchClient() -elif VECTOR_DB == "pinecone": - from open_webui.retrieval.vector.dbs.pinecone import PineconeClient - - VECTOR_DB_CLIENT = PineconeClient() -else: - from open_webui.retrieval.vector.dbs.chroma import ChromaClient - - VECTOR_DB_CLIENT = ChromaClient() diff --git a/backend/open_webui/retrieval/vector/dbs/qdrant_multitenancy.py b/backend/open_webui/retrieval/vector/dbs/qdrant_multitenancy.py new file mode 100644 index 000000000..e83c437ef --- /dev/null +++ b/backend/open_webui/retrieval/vector/dbs/qdrant_multitenancy.py @@ -0,0 +1,712 @@ +import logging +from typing import Optional, Tuple +from urllib.parse import urlparse + +import grpc +from open_webui.config import ( + QDRANT_API_KEY, + QDRANT_GRPC_PORT, + QDRANT_ON_DISK, + QDRANT_PREFER_GRPC, + QDRANT_URI, +) +from open_webui.env import SRC_LOG_LEVELS +from open_webui.retrieval.vector.main import ( + GetResult, + SearchResult, + VectorDBBase, + VectorItem, +) +from qdrant_client import QdrantClient as Qclient +from qdrant_client.http.exceptions import UnexpectedResponse +from qdrant_client.http.models import PointStruct +from qdrant_client.models import models + +NO_LIMIT = 999999999 + +log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["RAG"]) + + +class QdrantClient(VectorDBBase): + def __init__(self): + self.collection_prefix = "open-webui" + self.QDRANT_URI = QDRANT_URI + self.QDRANT_API_KEY = QDRANT_API_KEY + self.QDRANT_ON_DISK = QDRANT_ON_DISK + self.PREFER_GRPC = QDRANT_PREFER_GRPC + self.GRPC_PORT = QDRANT_GRPC_PORT + + if not self.QDRANT_URI: + self.client = None + return + + # Unified handling for either scheme + parsed = urlparse(self.QDRANT_URI) + host = parsed.hostname or self.QDRANT_URI + http_port = parsed.port or 6333 # default REST port + + if self.PREFER_GRPC: + self.client = Qclient( + host=host, + port=http_port, + grpc_port=self.GRPC_PORT, + prefer_grpc=self.PREFER_GRPC, + api_key=self.QDRANT_API_KEY, + ) + else: + self.client = Qclient(url=self.QDRANT_URI, api_key=self.QDRANT_API_KEY) + + # Main collection types for multi-tenancy + self.MEMORY_COLLECTION = f"{self.collection_prefix}_memories" + self.KNOWLEDGE_COLLECTION = f"{self.collection_prefix}_knowledge" + self.FILE_COLLECTION = f"{self.collection_prefix}_files" + self.WEB_SEARCH_COLLECTION = f"{self.collection_prefix}_web-search" + self.HASH_BASED_COLLECTION = f"{self.collection_prefix}_hash-based" + + def _result_to_get_result(self, points) -> GetResult: + ids = [] + documents = [] + metadatas = [] + + for point in points: + payload = point.payload + ids.append(point.id) + documents.append(payload["text"]) + metadatas.append(payload["metadata"]) + + return GetResult( + **{ + "ids": [ids], + "documents": [documents], + "metadatas": [metadatas], + } + ) + + def _get_collection_and_tenant_id(self, collection_name: str) -> Tuple[str, str]: + """ + Maps the traditional collection name to multi-tenant collection and tenant ID. + + Returns: + tuple: (collection_name, tenant_id) + """ + # Check for user memory collections + tenant_id = collection_name + + if collection_name.startswith("user-memory-"): + return self.MEMORY_COLLECTION, tenant_id + + # Check for file collections + elif collection_name.startswith("file-"): + return self.FILE_COLLECTION, tenant_id + + # Check for web search collections + elif collection_name.startswith("web-search-"): + return self.WEB_SEARCH_COLLECTION, tenant_id + + # Handle hash-based collections (YouTube and web URLs) + elif len(collection_name) == 63 and all( + c in "0123456789abcdef" for c in collection_name + ): + return self.HASH_BASED_COLLECTION, tenant_id + + else: + return self.KNOWLEDGE_COLLECTION, tenant_id + + def _extract_error_message(self, exception): + """ + Extract error message from either HTTP or gRPC exceptions + + Returns: + tuple: (status_code, error_message) + """ + # Check if it's an HTTP exception + if isinstance(exception, UnexpectedResponse): + try: + error_data = exception.structured() + error_msg = error_data.get("status", {}).get("error", "") + return exception.status_code, error_msg + except Exception as inner_e: + log.error(f"Failed to parse HTTP error: {inner_e}") + return exception.status_code, str(exception) + + # Check if it's a gRPC exception + elif isinstance(exception, grpc.RpcError): + # Extract status code from gRPC error + status_code = None + if hasattr(exception, "code") and callable(exception.code): + status_code = exception.code().value[0] + + # Extract error message + error_msg = str(exception) + if "details =" in error_msg: + # Parse the details line which contains the actual error message + try: + details_line = [ + line.strip() + for line in error_msg.split("\n") + if "details =" in line + ][0] + error_msg = details_line.split("details =")[1].strip(' "') + except (IndexError, AttributeError): + # Fall back to full message if parsing fails + pass + + return status_code, error_msg + + # For any other type of exception + return None, str(exception) + + def _is_collection_not_found_error(self, exception): + """ + Check if the exception is due to collection not found, supporting both HTTP and gRPC + """ + status_code, error_msg = self._extract_error_message(exception) + + # HTTP error (404) + if ( + status_code == 404 + and "Collection" in error_msg + and "doesn't exist" in error_msg + ): + return True + + # gRPC error (NOT_FOUND status) + if ( + isinstance(exception, grpc.RpcError) + and exception.code() == grpc.StatusCode.NOT_FOUND + ): + return True + + return False + + def _is_dimension_mismatch_error(self, exception): + """ + Check if the exception is due to dimension mismatch, supporting both HTTP and gRPC + """ + status_code, error_msg = self._extract_error_message(exception) + + # Common patterns in both HTTP and gRPC + return ( + "Vector dimension error" in error_msg + or "dimensions mismatch" in error_msg + or "invalid vector size" in error_msg + ) + + def _create_multi_tenant_collection_if_not_exists( + self, mt_collection_name: str, dimension: int = 384 + ): + """ + Creates a collection with multi-tenancy configuration if it doesn't exist. + Default dimension is set to 384 which corresponds to 'sentence-transformers/all-MiniLM-L6-v2'. + When creating collections dynamically (insert/upsert), the actual vector dimensions will be used. + """ + try: + # Try to create the collection directly - will fail if it already exists + self.client.create_collection( + collection_name=mt_collection_name, + vectors_config=models.VectorParams( + size=dimension, + distance=models.Distance.COSINE, + on_disk=self.QDRANT_ON_DISK, + ), + hnsw_config=models.HnswConfigDiff( + payload_m=16, # Enable per-tenant indexing + m=0, + on_disk=self.QDRANT_ON_DISK, + ), + ) + + # Create tenant ID payload index + self.client.create_payload_index( + collection_name=mt_collection_name, + field_name="tenant_id", + field_schema=models.KeywordIndexParams( + type=models.KeywordIndexType.KEYWORD, + is_tenant=True, + on_disk=self.QDRANT_ON_DISK, + ), + wait=True, + ) + + log.info( + f"Multi-tenant collection {mt_collection_name} created with dimension {dimension}!" + ) + except (UnexpectedResponse, grpc.RpcError) as e: + # Check for the specific error indicating collection already exists + status_code, error_msg = self._extract_error_message(e) + + # HTTP status code 409 or gRPC ALREADY_EXISTS + if (isinstance(e, UnexpectedResponse) and status_code == 409) or ( + isinstance(e, grpc.RpcError) + and e.code() == grpc.StatusCode.ALREADY_EXISTS + ): + if "already exists" in error_msg: + log.debug(f"Collection {mt_collection_name} already exists") + return + # If it's not an already exists error, re-raise + raise e + except Exception as e: + raise e + + def _create_points(self, items: list[VectorItem], tenant_id: str): + """ + Create point structs from vector items with tenant ID. + """ + return [ + PointStruct( + id=item["id"], + vector=item["vector"], + payload={ + "text": item["text"], + "metadata": item["metadata"], + "tenant_id": tenant_id, + }, + ) + for item in items + ] + + def has_collection(self, collection_name: str) -> bool: + """ + Check if a logical collection exists by checking for any points with the tenant ID. + """ + if not self.client: + return False + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Create tenant filter + tenant_filter = models.FieldCondition( + key="tenant_id", match=models.MatchValue(value=tenant_id) + ) + + try: + # Try directly querying - most of the time collection should exist + response = self.client.query_points( + collection_name=mt_collection, + query_filter=models.Filter(must=[tenant_filter]), + limit=1, + ) + + # Collection exists with this tenant ID if there are points + return len(response.points) > 0 + except (UnexpectedResponse, grpc.RpcError) as e: + if self._is_collection_not_found_error(e): + log.debug(f"Collection {mt_collection} doesn't exist") + return False + else: + # For other API errors, log and return False + _, error_msg = self._extract_error_message(e) + log.warning(f"Unexpected Qdrant error: {error_msg}") + return False + except Exception as e: + # For any other errors, log and return False + log.debug(f"Error checking collection {mt_collection}: {e}") + return False + + def delete( + self, + collection_name: str, + ids: Optional[list[str]] = None, + filter: Optional[dict] = None, + ): + """ + Delete vectors by ID or filter from a collection with tenant isolation. + """ + if not self.client: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Create tenant filter + tenant_filter = models.FieldCondition( + key="tenant_id", match=models.MatchValue(value=tenant_id) + ) + + must_conditions = [tenant_filter] + should_conditions = [] + + if ids: + for id_value in ids: + should_conditions.append( + models.FieldCondition( + key="metadata.id", + match=models.MatchValue(value=id_value), + ), + ) + elif filter: + for key, value in filter.items(): + must_conditions.append( + models.FieldCondition( + key=f"metadata.{key}", + match=models.MatchValue(value=value), + ), + ) + + try: + # Try to delete directly - most of the time collection should exist + update_result = self.client.delete( + collection_name=mt_collection, + points_selector=models.FilterSelector( + filter=models.Filter(must=must_conditions, should=should_conditions) + ), + ) + + return update_result + except (UnexpectedResponse, grpc.RpcError) as e: + if self._is_collection_not_found_error(e): + log.debug( + f"Collection {mt_collection} doesn't exist, nothing to delete" + ) + return None + else: + # For other API errors, log and re-raise + _, error_msg = self._extract_error_message(e) + log.warning(f"Unexpected Qdrant error: {error_msg}") + raise + except Exception as e: + # For non-Qdrant exceptions, re-raise + raise + + def search( + self, collection_name: str, vectors: list[list[float | int]], limit: int + ) -> Optional[SearchResult]: + """ + Search for the nearest neighbor items based on the vectors with tenant isolation. + """ + if not self.client: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Get the vector dimension from the query vector + dimension = len(vectors[0]) if vectors and len(vectors) > 0 else None + + try: + # Try the search operation directly - most of the time collection should exist + + # Create tenant filter + tenant_filter = models.FieldCondition( + key="tenant_id", match=models.MatchValue(value=tenant_id) + ) + + # Ensure vector dimensions match the collection + collection_dim = self.client.get_collection( + mt_collection + ).config.params.vectors.size + + if collection_dim != dimension: + if collection_dim < dimension: + vectors = [vector[:collection_dim] for vector in vectors] + else: + vectors = [ + vector + [0] * (collection_dim - dimension) + for vector in vectors + ] + + # Search with tenant filter + prefetch_query = models.Prefetch( + filter=models.Filter(must=[tenant_filter]), + limit=NO_LIMIT, + ) + query_response = self.client.query_points( + collection_name=mt_collection, + query=vectors[0], + prefetch=prefetch_query, + limit=limit, + ) + + get_result = self._result_to_get_result(query_response.points) + return SearchResult( + ids=get_result.ids, + documents=get_result.documents, + metadatas=get_result.metadatas, + # qdrant distance is [-1, 1], normalize to [0, 1] + distances=[ + [(point.score + 1.0) / 2.0 for point in query_response.points] + ], + ) + except (UnexpectedResponse, grpc.RpcError) as e: + if self._is_collection_not_found_error(e): + log.debug( + f"Collection {mt_collection} doesn't exist, search returns None" + ) + return None + else: + # For other API errors, log and re-raise + _, error_msg = self._extract_error_message(e) + log.warning(f"Unexpected Qdrant error during search: {error_msg}") + raise + except Exception as e: + # For non-Qdrant exceptions, log and return None + log.exception(f"Error searching collection '{collection_name}': {e}") + return None + + def query(self, collection_name: str, filter: dict, limit: Optional[int] = None): + """ + Query points with filters and tenant isolation. + """ + if not self.client: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Set default limit if not provided + if limit is None: + limit = NO_LIMIT + + # Create tenant filter + tenant_filter = models.FieldCondition( + key="tenant_id", match=models.MatchValue(value=tenant_id) + ) + + # Create metadata filters + field_conditions = [] + for key, value in filter.items(): + field_conditions.append( + models.FieldCondition( + key=f"metadata.{key}", match=models.MatchValue(value=value) + ) + ) + + # Combine tenant filter with metadata filters + combined_filter = models.Filter(must=[tenant_filter, *field_conditions]) + + try: + # Try the query directly - most of the time collection should exist + points = self.client.query_points( + collection_name=mt_collection, + query_filter=combined_filter, + limit=limit, + ) + + return self._result_to_get_result(points.points) + except (UnexpectedResponse, grpc.RpcError) as e: + if self._is_collection_not_found_error(e): + log.debug( + f"Collection {mt_collection} doesn't exist, query returns None" + ) + return None + else: + # For other API errors, log and re-raise + _, error_msg = self._extract_error_message(e) + log.warning(f"Unexpected Qdrant error during query: {error_msg}") + raise + except Exception as e: + # For non-Qdrant exceptions, log and re-raise + log.exception(f"Error querying collection '{collection_name}': {e}") + return None + + def get(self, collection_name: str) -> Optional[GetResult]: + """ + Get all items in a collection with tenant isolation. + """ + if not self.client: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Create tenant filter + tenant_filter = models.FieldCondition( + key="tenant_id", match=models.MatchValue(value=tenant_id) + ) + + try: + # Try to get points directly - most of the time collection should exist + points = self.client.query_points( + collection_name=mt_collection, + query_filter=models.Filter(must=[tenant_filter]), + limit=NO_LIMIT, + ) + + return self._result_to_get_result(points.points) + except (UnexpectedResponse, grpc.RpcError) as e: + if self._is_collection_not_found_error(e): + log.debug(f"Collection {mt_collection} doesn't exist, get returns None") + return None + else: + # For other API errors, log and re-raise + _, error_msg = self._extract_error_message(e) + log.warning(f"Unexpected Qdrant error during get: {error_msg}") + raise + except Exception as e: + # For non-Qdrant exceptions, log and return None + log.exception(f"Error getting collection '{collection_name}': {e}") + return None + + def _handle_operation_with_error_retry( + self, operation_name, mt_collection, points, dimension + ): + """ + Private helper to handle common error cases for insert and upsert operations. + + Args: + operation_name: 'insert' or 'upsert' + mt_collection: The multi-tenant collection name + points: The vector points to insert/upsert + dimension: The dimension of the vectors + + Returns: + The operation result (for upsert) or None (for insert) + """ + try: + if operation_name == "insert": + self.client.upload_points(mt_collection, points) + return None + else: # upsert + return self.client.upsert(mt_collection, points) + except (UnexpectedResponse, grpc.RpcError) as e: + # Handle collection not found + if self._is_collection_not_found_error(e): + log.info( + f"Collection {mt_collection} doesn't exist. Creating it with dimension {dimension}." + ) + # Create collection with correct dimensions from our vectors + self._create_multi_tenant_collection_if_not_exists( + mt_collection_name=mt_collection, dimension=dimension + ) + # Try operation again - no need for dimension adjustment since we just created with correct dimensions + if operation_name == "insert": + self.client.upload_points(mt_collection, points) + return None + else: # upsert + return self.client.upsert(mt_collection, points) + + # Handle dimension mismatch + elif self._is_dimension_mismatch_error(e): + # For dimension errors, the collection must exist, so get its configuration + mt_collection_info = self.client.get_collection(mt_collection) + existing_size = mt_collection_info.config.params.vectors.size + + log.info( + f"Dimension mismatch: Collection {mt_collection} expects {existing_size}, got {dimension}" + ) + + if existing_size < dimension: + # Truncate vectors to fit + log.info( + f"Truncating vectors from {dimension} to {existing_size} dimensions" + ) + points = [ + PointStruct( + id=point.id, + vector=point.vector[:existing_size], + payload=point.payload, + ) + for point in points + ] + elif existing_size > dimension: + # Pad vectors with zeros + log.info( + f"Padding vectors from {dimension} to {existing_size} dimensions with zeros" + ) + points = [ + PointStruct( + id=point.id, + vector=point.vector + + [0] * (existing_size - len(point.vector)), + payload=point.payload, + ) + for point in points + ] + # Try operation again with adjusted dimensions + if operation_name == "insert": + self.client.upload_points(mt_collection, points) + return None + else: # upsert + return self.client.upsert(mt_collection, points) + else: + # Not a known error we can handle, log and re-raise + _, error_msg = self._extract_error_message(e) + log.warning(f"Unhandled Qdrant error: {error_msg}") + raise + except Exception as e: + # For non-Qdrant exceptions, re-raise + raise + + def insert(self, collection_name: str, items: list[VectorItem]): + """ + Insert items with tenant ID. + """ + if not self.client or not items: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Get dimensions from the actual vectors + dimension = len(items[0]["vector"]) if items else None + + # Create points with tenant ID + points = self._create_points(items, tenant_id) + + # Handle the operation with error retry + return self._handle_operation_with_error_retry( + "insert", mt_collection, points, dimension + ) + + def upsert(self, collection_name: str, items: list[VectorItem]): + """ + Upsert items with tenant ID. + """ + if not self.client or not items: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + # Get dimensions from the actual vectors + dimension = len(items[0]["vector"]) if items else None + + # Create points with tenant ID + points = self._create_points(items, tenant_id) + + # Handle the operation with error retry + return self._handle_operation_with_error_retry( + "upsert", mt_collection, points, dimension + ) + + def reset(self): + """ + Reset the database by deleting all collections. + """ + if not self.client: + return None + + collection_names = self.client.get_collections().collections + for collection_name in collection_names: + if collection_name.name.startswith(self.collection_prefix): + self.client.delete_collection(collection_name=collection_name.name) + + def delete_collection(self, collection_name: str): + """ + Delete a collection. + """ + if not self.client: + return None + + # Map to multi-tenant collection and tenant ID + mt_collection, tenant_id = self._get_collection_and_tenant_id(collection_name) + + tenant_filter = models.FieldCondition( + key="tenant_id", match=models.MatchValue(value=tenant_id) + ) + + field_conditions = [tenant_filter] + + update_result = self.client.delete( + collection_name=mt_collection, + points_selector=models.FilterSelector( + filter=models.Filter(must=field_conditions) + ), + ) + + if self.client.get_collection(mt_collection).points_count == 0: + self.client.delete_collection(mt_collection) + + return update_result diff --git a/backend/open_webui/retrieval/vector/factory.py b/backend/open_webui/retrieval/vector/factory.py new file mode 100644 index 000000000..72a3f6ceb --- /dev/null +++ b/backend/open_webui/retrieval/vector/factory.py @@ -0,0 +1,55 @@ +from open_webui.retrieval.vector.main import VectorDBBase +from open_webui.retrieval.vector.type import VectorType +from open_webui.config import VECTOR_DB, ENABLE_QDRANT_MULTITENANCY_MODE + + +class Vector: + + @staticmethod + def get_vector(vector_type: str) -> VectorDBBase: + """ + get vector db instance by vector type + """ + match vector_type: + case VectorType.MILVUS: + from open_webui.retrieval.vector.dbs.milvus import MilvusClient + + return MilvusClient() + case VectorType.QDRANT: + if ENABLE_QDRANT_MULTITENANCY_MODE: + from open_webui.retrieval.vector.dbs.qdrant_multitenancy import ( + QdrantClient, + ) + + return QdrantClient() + else: + from open_webui.retrieval.vector.dbs.qdrant import QdrantClient + + return QdrantClient() + case VectorType.PINECONE: + from open_webui.retrieval.vector.dbs.pinecone import PineconeClient + + return PineconeClient() + case VectorType.OPENSEARCH: + from open_webui.retrieval.vector.dbs.opensearch import OpenSearchClient + + return OpenSearchClient() + case VectorType.PGVECTOR: + from open_webui.retrieval.vector.dbs.pgvector import PgvectorClient + + return PgvectorClient() + case VectorType.ELASTICSEARCH: + from open_webui.retrieval.vector.dbs.elasticsearch import ( + ElasticsearchClient, + ) + + return ElasticsearchClient() + case VectorType.CHROMA: + from open_webui.retrieval.vector.dbs.chroma import ChromaClient + + return ChromaClient() + case _: + raise ValueError(f"Unsupported vector type: {vector_type}") + + +VECTOR_DB_CLIENT = Vector.get_vector(VECTOR_DB) diff --git a/backend/open_webui/retrieval/vector/type.py b/backend/open_webui/retrieval/vector/type.py new file mode 100644 index 000000000..b03bcb482 --- /dev/null +++ b/backend/open_webui/retrieval/vector/type.py @@ -0,0 +1,11 @@ +from enum import StrEnum + + +class VectorType(StrEnum): + MILVUS = "milvus" + QDRANT = "qdrant" + CHROMA = "chroma" + PINECONE = "pinecone" + ELASTICSEARCH = "elasticsearch" + OPENSEARCH = "opensearch" + PGVECTOR = "pgvector" diff --git a/backend/open_webui/retrieval/web/utils.py b/backend/open_webui/retrieval/web/utils.py index 78c962f15..b8ec538d3 100644 --- a/backend/open_webui/retrieval/web/utils.py +++ b/backend/open_webui/retrieval/web/utils.py @@ -25,7 +25,7 @@ from langchain_community.document_loaders.firecrawl import FireCrawlLoader from langchain_community.document_loaders.base import BaseLoader from langchain_core.documents import Document from open_webui.retrieval.loaders.tavily import TavilyLoader -from open_webui.retrieval.loaders.external import ExternalLoader +from open_webui.retrieval.loaders.external_web import ExternalWebLoader from open_webui.constants import ERROR_MESSAGES from open_webui.config import ( ENABLE_RAG_LOCAL_WEB_FETCH, @@ -39,7 +39,7 @@ from open_webui.config import ( EXTERNAL_WEB_LOADER_URL, EXTERNAL_WEB_LOADER_API_KEY, ) -from open_webui.env import SRC_LOG_LEVELS +from open_webui.env import SRC_LOG_LEVELS, AIOHTTP_CLIENT_SESSION_SSL log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["RAG"]) @@ -515,7 +515,9 @@ class SafeWebBaseLoader(WebBaseLoader): kwargs["ssl"] = False async with session.get( - url, **(self.requests_kwargs | kwargs) + url, + **(self.requests_kwargs | kwargs), + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as response: if self.raise_for_status: response.raise_for_status() @@ -628,7 +630,7 @@ def get_web_loader( web_loader_args["extract_depth"] = TAVILY_EXTRACT_DEPTH.value if WEB_LOADER_ENGINE.value == "external": - WebLoaderClass = ExternalLoader + WebLoaderClass = ExternalWebLoader web_loader_args["external_url"] = EXTERNAL_WEB_LOADER_URL.value web_loader_args["external_api_key"] = EXTERNAL_WEB_LOADER_API_KEY.value diff --git a/backend/open_webui/routers/audio.py b/backend/open_webui/routers/audio.py index 445857c88..484bc138a 100644 --- a/backend/open_webui/routers/audio.py +++ b/backend/open_webui/routers/audio.py @@ -7,6 +7,7 @@ from functools import lru_cache from pathlib import Path from pydub import AudioSegment from pydub.silence import split_on_silence +from concurrent.futures import ThreadPoolExecutor import aiohttp import aiofiles @@ -38,6 +39,7 @@ from open_webui.config import ( from open_webui.constants import ERROR_MESSAGES from open_webui.env import ( + AIOHTTP_CLIENT_SESSION_SSL, AIOHTTP_CLIENT_TIMEOUT, ENV, SRC_LOG_LEVELS, @@ -49,7 +51,7 @@ from open_webui.env import ( router = APIRouter() # Constants -MAX_FILE_SIZE_MB = 25 +MAX_FILE_SIZE_MB = 20 MAX_FILE_SIZE = MAX_FILE_SIZE_MB * 1024 * 1024 # Convert MB to bytes AZURE_MAX_FILE_SIZE_MB = 200 AZURE_MAX_FILE_SIZE = AZURE_MAX_FILE_SIZE_MB * 1024 * 1024 # Convert MB to bytes @@ -86,8 +88,6 @@ def get_audio_convert_format(file_path): and info.get("codec_tag_string") == "mp4a" ): return "mp4" - elif info.get("format_name") == "ogg": - return "ogg" except Exception as e: log.error(f"Error getting audio format: {e}") return False @@ -326,6 +326,7 @@ async def speech(request: Request, user=Depends(get_verified_user)): else {} ), }, + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as r: r.raise_for_status() @@ -381,6 +382,7 @@ async def speech(request: Request, user=Depends(get_verified_user)): "Content-Type": "application/json", "xi-api-key": request.app.state.config.TTS_API_KEY, }, + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as r: r.raise_for_status() @@ -439,6 +441,7 @@ async def speech(request: Request, user=Depends(get_verified_user)): "X-Microsoft-OutputFormat": output_format, }, data=data, + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as r: r.raise_for_status() @@ -507,8 +510,7 @@ async def speech(request: Request, user=Depends(get_verified_user)): return FileResponse(file_path) -def transcribe(request: Request, file_path): - log.info(f"transcribe: {file_path}") +def transcription_handler(request, file_path): filename = os.path.basename(file_path) file_dir = os.path.dirname(file_path) id = filename.split(".")[0] @@ -771,24 +773,119 @@ def transcribe(request: Request, file_path): ) +def transcribe(request: Request, file_path): + log.info(f"transcribe: {file_path}") + + try: + file_path = compress_audio(file_path) + except Exception as e: + log.exception(e) + + # Always produce a list of chunk paths (could be one entry if small) + try: + chunk_paths = split_audio(file_path, MAX_FILE_SIZE) + print(f"Chunk paths: {chunk_paths}") + except Exception as e: + log.exception(e) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT(e), + ) + + results = [] + try: + with ThreadPoolExecutor() as executor: + # Submit tasks for each chunk_path + futures = [ + executor.submit(transcription_handler, request, chunk_path) + for chunk_path in chunk_paths + ] + # Gather results as they complete + for future in futures: + try: + results.append(future.result()) + except Exception as transcribe_exc: + log.exception(f"Error transcribing chunk: {transcribe_exc}") + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Error during transcription.", + ) + finally: + # Clean up only the temporary chunks, never the original file + for chunk_path in chunk_paths: + if chunk_path != file_path and os.path.isfile(chunk_path): + try: + os.remove(chunk_path) + except Exception: + pass + + return { + "text": " ".join([result["text"] for result in results]), + } + + def compress_audio(file_path): if os.path.getsize(file_path) > MAX_FILE_SIZE: + id = os.path.splitext(os.path.basename(file_path))[ + 0 + ] # Handles names with multiple dots file_dir = os.path.dirname(file_path) + audio = AudioSegment.from_file(file_path) audio = audio.set_frame_rate(16000).set_channels(1) # Compress audio - compressed_path = f"{file_dir}/{id}_compressed.opus" - audio.export(compressed_path, format="opus", bitrate="32k") - log.debug(f"Compressed audio to {compressed_path}") - if ( - os.path.getsize(compressed_path) > MAX_FILE_SIZE - ): # Still larger than MAX_FILE_SIZE after compression - raise Exception(ERROR_MESSAGES.FILE_TOO_LARGE(size=f"{MAX_FILE_SIZE_MB}MB")) + compressed_path = os.path.join(file_dir, f"{id}_compressed.mp3") + audio.export(compressed_path, format="mp3", bitrate="32k") + # log.debug(f"Compressed audio to {compressed_path}") # Uncomment if log is defined + return compressed_path else: return file_path +def split_audio(file_path, max_bytes, format="mp3", bitrate="32k"): + """ + Splits audio into chunks not exceeding max_bytes. + Returns a list of chunk file paths. If audio fits, returns list with original path. + """ + file_size = os.path.getsize(file_path) + if file_size <= max_bytes: + return [file_path] # Nothing to split + + audio = AudioSegment.from_file(file_path) + duration_ms = len(audio) + orig_size = file_size + + approx_chunk_ms = max(int(duration_ms * (max_bytes / orig_size)) - 1000, 1000) + chunks = [] + start = 0 + i = 0 + + base, _ = os.path.splitext(file_path) + + while start < duration_ms: + end = min(start + approx_chunk_ms, duration_ms) + chunk = audio[start:end] + chunk_path = f"{base}_chunk_{i}.{format}" + chunk.export(chunk_path, format=format, bitrate=bitrate) + + # Reduce chunk duration if still too large + while os.path.getsize(chunk_path) > max_bytes and (end - start) > 5000: + end = start + ((end - start) // 2) + chunk = audio[start:end] + chunk.export(chunk_path, format=format, bitrate=bitrate) + + if os.path.getsize(chunk_path) > max_bytes: + os.remove(chunk_path) + raise Exception("Audio chunk cannot be reduced below max file size.") + + chunks.append(chunk_path) + start = end + i += 1 + + return chunks + + @router.post("/transcriptions") def transcription( request: Request, @@ -803,6 +900,7 @@ def transcription( "audio/ogg", "audio/x-m4a", "audio/webm", + "video/webm", ) if not file.content_type.startswith(supported_filetypes): @@ -826,19 +924,13 @@ def transcription( f.write(contents) try: - try: - file_path = compress_audio(file_path) - except Exception as e: - log.exception(e) + result = transcribe(request, file_path) - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=ERROR_MESSAGES.DEFAULT(e), - ) + return { + **result, + "filename": os.path.basename(file_path), + } - data = transcribe(request, file_path) - file_path = file_path.split("/")[-1] - return {**data, "filename": file_path} except Exception as e: log.exception(e) diff --git a/backend/open_webui/routers/auths.py b/backend/open_webui/routers/auths.py index 309862ed5..793bdfd30 100644 --- a/backend/open_webui/routers/auths.py +++ b/backend/open_webui/routers/auths.py @@ -31,7 +31,7 @@ from open_webui.env import ( SRC_LOG_LEVELS, ) from fastapi import APIRouter, Depends, HTTPException, Request, status -from fastapi.responses import RedirectResponse, Response +from fastapi.responses import RedirectResponse, Response, JSONResponse from open_webui.config import OPENID_PROVIDER_URL, ENABLE_OAUTH_SIGNUP, ENABLE_LDAP from pydantic import BaseModel @@ -51,7 +51,7 @@ from open_webui.utils.access_control import get_permissions from typing import Optional, List -from ssl import CERT_REQUIRED, PROTOCOL_TLS +from ssl import CERT_NONE, CERT_REQUIRED, PROTOCOL_TLS if ENABLE_LDAP.value: from ldap3 import Server, Connection, NONE, Tls @@ -186,6 +186,9 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm): LDAP_APP_PASSWORD = request.app.state.config.LDAP_APP_PASSWORD LDAP_USE_TLS = request.app.state.config.LDAP_USE_TLS LDAP_CA_CERT_FILE = request.app.state.config.LDAP_CA_CERT_FILE + LDAP_VALIDATE_CERT = ( + CERT_REQUIRED if request.app.state.config.LDAP_VALIDATE_CERT else CERT_NONE + ) LDAP_CIPHERS = ( request.app.state.config.LDAP_CIPHERS if request.app.state.config.LDAP_CIPHERS @@ -197,7 +200,7 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm): try: tls = Tls( - validate=CERT_REQUIRED, + validate=LDAP_VALIDATE_CERT, version=PROTOCOL_TLS, ca_certs_file=LDAP_CA_CERT_FILE, ciphers=LDAP_CIPHERS, @@ -478,10 +481,6 @@ async def signup(request: Request, response: Response, form_data: SignupForm): "admin" if user_count == 0 else request.app.state.config.DEFAULT_USER_ROLE ) - if user_count == 0: - # Disable signup after the first user is created - request.app.state.config.ENABLE_SIGNUP = False - # The password passed to bcrypt must be 72 bytes or fewer. If it is longer, it will be truncated before hashing. if len(form_data.password.encode("utf-8")) > 72: raise HTTPException( @@ -541,6 +540,10 @@ async def signup(request: Request, response: Response, form_data: SignupForm): user.id, request.app.state.config.USER_PERMISSIONS ) + if user_count == 0: + # Disable signup after the first user is created + request.app.state.config.ENABLE_SIGNUP = False + return { "token": token, "token_type": "Bearer", @@ -574,9 +577,14 @@ async def signout(request: Request, response: Response): logout_url = openid_data.get("end_session_endpoint") if logout_url: response.delete_cookie("oauth_id_token") - return RedirectResponse( + + return JSONResponse( + status_code=200, + content={ + "status": True, + "redirect_url": f"{logout_url}?id_token_hint={oauth_id_token}", + }, headers=response.headers, - url=f"{logout_url}?id_token_hint={oauth_id_token}", ) else: raise HTTPException( @@ -591,12 +599,18 @@ async def signout(request: Request, response: Response): ) if WEBUI_AUTH_SIGNOUT_REDIRECT_URL: - return RedirectResponse( + return JSONResponse( + status_code=200, + content={ + "status": True, + "redirect_url": WEBUI_AUTH_SIGNOUT_REDIRECT_URL, + }, headers=response.headers, - url=WEBUI_AUTH_SIGNOUT_REDIRECT_URL, ) - return {"status": True} + return JSONResponse( + status_code=200, content={"status": True}, headers=response.headers + ) ############################ @@ -696,6 +710,9 @@ async def get_admin_config(request: Request, user=Depends(get_admin_user)): "ENABLE_CHANNELS": request.app.state.config.ENABLE_CHANNELS, "ENABLE_NOTES": request.app.state.config.ENABLE_NOTES, "ENABLE_USER_WEBHOOKS": request.app.state.config.ENABLE_USER_WEBHOOKS, + "PENDING_USER_OVERLAY_TITLE": request.app.state.config.PENDING_USER_OVERLAY_TITLE, + "PENDING_USER_OVERLAY_CONTENT": request.app.state.config.PENDING_USER_OVERLAY_CONTENT, + "RESPONSE_WATERMARK": request.app.state.config.RESPONSE_WATERMARK, } @@ -713,6 +730,9 @@ class AdminConfig(BaseModel): ENABLE_CHANNELS: bool ENABLE_NOTES: bool ENABLE_USER_WEBHOOKS: bool + PENDING_USER_OVERLAY_TITLE: Optional[str] = None + PENDING_USER_OVERLAY_CONTENT: Optional[str] = None + RESPONSE_WATERMARK: Optional[str] = None @router.post("/admin/config") @@ -750,6 +770,15 @@ async def update_admin_config( request.app.state.config.ENABLE_USER_WEBHOOKS = form_data.ENABLE_USER_WEBHOOKS + request.app.state.config.PENDING_USER_OVERLAY_TITLE = ( + form_data.PENDING_USER_OVERLAY_TITLE + ) + request.app.state.config.PENDING_USER_OVERLAY_CONTENT = ( + form_data.PENDING_USER_OVERLAY_CONTENT + ) + + request.app.state.config.RESPONSE_WATERMARK = form_data.RESPONSE_WATERMARK + return { "SHOW_ADMIN_DETAILS": request.app.state.config.SHOW_ADMIN_DETAILS, "WEBUI_URL": request.app.state.config.WEBUI_URL, @@ -764,6 +793,9 @@ async def update_admin_config( "ENABLE_CHANNELS": request.app.state.config.ENABLE_CHANNELS, "ENABLE_NOTES": request.app.state.config.ENABLE_NOTES, "ENABLE_USER_WEBHOOKS": request.app.state.config.ENABLE_USER_WEBHOOKS, + "PENDING_USER_OVERLAY_TITLE": request.app.state.config.PENDING_USER_OVERLAY_TITLE, + "PENDING_USER_OVERLAY_CONTENT": request.app.state.config.PENDING_USER_OVERLAY_CONTENT, + "RESPONSE_WATERMARK": request.app.state.config.RESPONSE_WATERMARK, } @@ -779,6 +811,7 @@ class LdapServerConfig(BaseModel): search_filters: str = "" use_tls: bool = True certificate_path: Optional[str] = None + validate_cert: bool = True ciphers: Optional[str] = "ALL" @@ -796,6 +829,7 @@ async def get_ldap_server(request: Request, user=Depends(get_admin_user)): "search_filters": request.app.state.config.LDAP_SEARCH_FILTERS, "use_tls": request.app.state.config.LDAP_USE_TLS, "certificate_path": request.app.state.config.LDAP_CA_CERT_FILE, + "validate_cert": request.app.state.config.LDAP_VALIDATE_CERT, "ciphers": request.app.state.config.LDAP_CIPHERS, } @@ -831,6 +865,7 @@ async def update_ldap_server( request.app.state.config.LDAP_SEARCH_FILTERS = form_data.search_filters request.app.state.config.LDAP_USE_TLS = form_data.use_tls request.app.state.config.LDAP_CA_CERT_FILE = form_data.certificate_path + request.app.state.config.LDAP_VALIDATE_CERT = form_data.validate_cert request.app.state.config.LDAP_CIPHERS = form_data.ciphers return { @@ -845,6 +880,7 @@ async def update_ldap_server( "search_filters": request.app.state.config.LDAP_SEARCH_FILTERS, "use_tls": request.app.state.config.LDAP_USE_TLS, "certificate_path": request.app.state.config.LDAP_CA_CERT_FILE, + "validate_cert": request.app.state.config.LDAP_VALIDATE_CERT, "ciphers": request.app.state.config.LDAP_CIPHERS, } diff --git a/backend/open_webui/routers/evaluations.py b/backend/open_webui/routers/evaluations.py index 36320b6fc..164f3c40b 100644 --- a/backend/open_webui/routers/evaluations.py +++ b/backend/open_webui/routers/evaluations.py @@ -74,13 +74,17 @@ class FeedbackUserResponse(FeedbackResponse): @router.get("/feedbacks/all", response_model=list[FeedbackUserResponse]) async def get_all_feedbacks(user=Depends(get_admin_user)): feedbacks = Feedbacks.get_all_feedbacks() - return [ - FeedbackUserResponse( - **feedback.model_dump(), - user=UserResponse(**Users.get_user_by_id(feedback.user_id).model_dump()), + + feedback_list = [] + for feedback in feedbacks: + user = Users.get_user_by_id(feedback.user_id) + feedback_list.append( + FeedbackUserResponse( + **feedback.model_dump(), + user=UserResponse(**user.model_dump()) if user else None, + ) ) - for feedback in feedbacks - ] + return feedback_list @router.delete("/feedbacks/all") @@ -92,12 +96,7 @@ async def delete_all_feedbacks(user=Depends(get_admin_user)): @router.get("/feedbacks/all/export", response_model=list[FeedbackModel]) async def get_all_feedbacks(user=Depends(get_admin_user)): feedbacks = Feedbacks.get_all_feedbacks() - return [ - FeedbackModel( - **feedback.model_dump(), user=Users.get_user_by_id(feedback.user_id) - ) - for feedback in feedbacks - ] + return feedbacks @router.get("/feedbacks/user", response_model=list[FeedbackUserResponse]) diff --git a/backend/open_webui/routers/files.py b/backend/open_webui/routers/files.py index 475905da1..4ba57f56b 100644 --- a/backend/open_webui/routers/files.py +++ b/backend/open_webui/routers/files.py @@ -95,6 +95,20 @@ def upload_file( unsanitized_filename = file.filename filename = os.path.basename(unsanitized_filename) + file_extension = os.path.splitext(filename)[1] + if request.app.state.config.ALLOWED_FILE_EXTENSIONS: + request.app.state.config.ALLOWED_FILE_EXTENSIONS = [ + ext for ext in request.app.state.config.ALLOWED_FILE_EXTENSIONS if ext + ] + + if file_extension not in request.app.state.config.ALLOWED_FILE_EXTENSIONS: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ERROR_MESSAGES.DEFAULT( + f"File type {file_extension} is not allowed" + ), + ) + # replace filename with uuid id = str(uuid.uuid4()) name = filename @@ -125,33 +139,38 @@ def upload_file( ) if process: try: + if file.content_type: + if file.content_type.startswith( + ( + "audio/mpeg", + "audio/wav", + "audio/ogg", + "audio/x-m4a", + "audio/webm", + "video/webm", + ) + ): + file_path = Storage.get_file(file_path) + result = transcribe(request, file_path) - if file.content_type.startswith( - ( - "audio/mpeg", - "audio/wav", - "audio/ogg", - "audio/x-m4a", - "audio/webm", - "video/webm", + process_file( + request, + ProcessFileForm(file_id=id, content=result.get("text", "")), + user=user, + ) + elif file.content_type not in [ + "image/png", + "image/jpeg", + "image/gif", + "video/mp4", + "video/ogg", + "video/quicktime", + ]: + process_file(request, ProcessFileForm(file_id=id), user=user) + else: + log.info( + f"File type {file.content_type} is not provided, but trying to process anyway" ) - ): - file_path = Storage.get_file(file_path) - result = transcribe(request, file_path) - - process_file( - request, - ProcessFileForm(file_id=id, content=result.get("text", "")), - user=user, - ) - elif file.content_type not in [ - "image/png", - "image/jpeg", - "image/gif", - "video/mp4", - "video/ogg", - "video/quicktime", - ]: process_file(request, ProcessFileForm(file_id=id), user=user) file_item = Files.get_file_by_id(id=id) diff --git a/backend/open_webui/routers/knowledge.py b/backend/open_webui/routers/knowledge.py index 920130858..e6e55f4d3 100644 --- a/backend/open_webui/routers/knowledge.py +++ b/backend/open_webui/routers/knowledge.py @@ -10,7 +10,7 @@ from open_webui.models.knowledge import ( KnowledgeUserResponse, ) from open_webui.models.files import Files, FileModel, FileMetadataResponse -from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT +from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT from open_webui.routers.retrieval import ( process_file, ProcessFileForm, diff --git a/backend/open_webui/routers/memories.py b/backend/open_webui/routers/memories.py index 6d54c9c17..333e9ecc6 100644 --- a/backend/open_webui/routers/memories.py +++ b/backend/open_webui/routers/memories.py @@ -4,7 +4,7 @@ import logging from typing import Optional from open_webui.models.memories import Memories, MemoryModel -from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT +from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT from open_webui.utils.auth import get_verified_user from open_webui.env import SRC_LOG_LEVELS diff --git a/backend/open_webui/routers/ollama.py b/backend/open_webui/routers/ollama.py index 790f7dece..7c313ea97 100644 --- a/backend/open_webui/routers/ollama.py +++ b/backend/open_webui/routers/ollama.py @@ -340,6 +340,8 @@ async def get_all_models(request: Request, user: UserModel = None): ), # Legacy support ) + connection_type = api_config.get("connection_type", "local") + prefix_id = api_config.get("prefix_id", None) tags = api_config.get("tags", []) model_ids = api_config.get("model_ids", []) @@ -352,14 +354,16 @@ async def get_all_models(request: Request, user: UserModel = None): ) ) - if prefix_id: - for model in response.get("models", []): + for model in response.get("models", []): + if prefix_id: model["model"] = f"{prefix_id}.{model['model']}" - if tags: - for model in response.get("models", []): + if tags: model["tags"] = tags + if connection_type: + model["connection_type"] = connection_type + def merge_models_lists(model_lists): merged_models = {} @@ -1585,7 +1589,9 @@ async def upload_model( if url_idx is None: url_idx = 0 ollama_url = request.app.state.config.OLLAMA_BASE_URLS[url_idx] - file_path = os.path.join(UPLOAD_DIR, file.filename) + + filename = os.path.basename(file.filename) + file_path = os.path.join(UPLOAD_DIR, filename) os.makedirs(UPLOAD_DIR, exist_ok=True) # --- P1: save file locally --- @@ -1630,13 +1636,13 @@ async def upload_model( os.remove(file_path) # Create model in ollama - model_name, ext = os.path.splitext(file.filename) + model_name, ext = os.path.splitext(filename) log.info(f"Created Model: {model_name}") # DEBUG create_payload = { "model": model_name, # Reference the file by its original name => the uploaded blob's digest - "files": {file.filename: f"sha256:{file_hash}"}, + "files": {filename: f"sha256:{file_hash}"}, } log.info(f"Model Payload: {create_payload}") # DEBUG @@ -1653,7 +1659,7 @@ async def upload_model( done_msg = { "done": True, "blob": f"sha256:{file_hash}", - "name": file.filename, + "name": filename, "model_created": model_name, } yield f"data: {json.dumps(done_msg)}\n\n" diff --git a/backend/open_webui/routers/openai.py b/backend/open_webui/routers/openai.py index 02a81209c..a196eca26 100644 --- a/backend/open_webui/routers/openai.py +++ b/backend/open_webui/routers/openai.py @@ -353,21 +353,22 @@ async def get_all_models_responses(request: Request, user: UserModel) -> list: ), # Legacy support ) + connection_type = api_config.get("connection_type", "external") prefix_id = api_config.get("prefix_id", None) tags = api_config.get("tags", []) - if prefix_id: - for model in ( - response if isinstance(response, list) else response.get("data", []) - ): + for model in ( + response if isinstance(response, list) else response.get("data", []) + ): + if prefix_id: model["id"] = f"{prefix_id}.{model['id']}" - if tags: - for model in ( - response if isinstance(response, list) else response.get("data", []) - ): + if tags: model["tags"] = tags + if connection_type: + model["connection_type"] = connection_type + log.debug(f"get_all_models:responses() {responses}") return responses @@ -415,6 +416,7 @@ async def get_all_models(request: Request, user: UserModel) -> dict[str, list]: "name": model.get("name", model["id"]), "owned_by": "openai", "openai": model, + "connection_type": model.get("connection_type", "external"), "urlIdx": idx, } for model in models diff --git a/backend/open_webui/routers/pipelines.py b/backend/open_webui/routers/pipelines.py index f14002502..f80ea91f8 100644 --- a/backend/open_webui/routers/pipelines.py +++ b/backend/open_webui/routers/pipelines.py @@ -18,7 +18,7 @@ from pydantic import BaseModel from starlette.responses import FileResponse from typing import Optional -from open_webui.env import SRC_LOG_LEVELS +from open_webui.env import SRC_LOG_LEVELS, AIOHTTP_CLIENT_SESSION_SSL from open_webui.config import CACHE_DIR from open_webui.constants import ERROR_MESSAGES @@ -69,7 +69,10 @@ async def process_pipeline_inlet_filter(request, payload, user, models): async with aiohttp.ClientSession(trust_env=True) as session: for filter in sorted_filters: urlIdx = filter.get("urlIdx") - if urlIdx is None: + + try: + urlIdx = int(urlIdx) + except: continue url = request.app.state.config.OPENAI_API_BASE_URLS[urlIdx] @@ -89,6 +92,7 @@ async def process_pipeline_inlet_filter(request, payload, user, models): f"{url}/{filter['id']}/filter/inlet", headers=headers, json=request_data, + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as response: payload = await response.json() response.raise_for_status() @@ -118,7 +122,10 @@ async def process_pipeline_outlet_filter(request, payload, user, models): async with aiohttp.ClientSession(trust_env=True) as session: for filter in sorted_filters: urlIdx = filter.get("urlIdx") - if urlIdx is None: + + try: + urlIdx = int(urlIdx) + except: continue url = request.app.state.config.OPENAI_API_BASE_URLS[urlIdx] @@ -138,6 +145,7 @@ async def process_pipeline_outlet_filter(request, payload, user, models): f"{url}/{filter['id']}/filter/outlet", headers=headers, json=request_data, + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as response: payload = await response.json() response.raise_for_status() @@ -197,8 +205,10 @@ async def upload_pipeline( user=Depends(get_admin_user), ): log.info(f"upload_pipeline: urlIdx={urlIdx}, filename={file.filename}") + filename = os.path.basename(file.filename) + # Check if the uploaded file is a python file - if not (file.filename and file.filename.endswith(".py")): + if not (filename and filename.endswith(".py")): raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail="Only Python (.py) files are allowed.", @@ -206,7 +216,7 @@ async def upload_pipeline( upload_folder = f"{CACHE_DIR}/pipelines" os.makedirs(upload_folder, exist_ok=True) - file_path = os.path.join(upload_folder, file.filename) + file_path = os.path.join(upload_folder, filename) r = None try: diff --git a/backend/open_webui/routers/retrieval.py b/backend/open_webui/routers/retrieval.py index efefa12fc..5cb47373f 100644 --- a/backend/open_webui/routers/retrieval.py +++ b/backend/open_webui/routers/retrieval.py @@ -36,7 +36,7 @@ from open_webui.models.knowledge import Knowledges from open_webui.storage.provider import Storage -from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT +from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT # Document loaders from open_webui.retrieval.loaders.main import Loader @@ -352,10 +352,13 @@ async def get_rag_config(request: Request, user=Depends(get_admin_user)): # Content extraction settings "CONTENT_EXTRACTION_ENGINE": request.app.state.config.CONTENT_EXTRACTION_ENGINE, "PDF_EXTRACT_IMAGES": request.app.state.config.PDF_EXTRACT_IMAGES, + "EXTERNAL_DOCUMENT_LOADER_URL": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL, + "EXTERNAL_DOCUMENT_LOADER_API_KEY": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY, "TIKA_SERVER_URL": request.app.state.config.TIKA_SERVER_URL, "DOCLING_SERVER_URL": request.app.state.config.DOCLING_SERVER_URL, "DOCLING_OCR_ENGINE": request.app.state.config.DOCLING_OCR_ENGINE, "DOCLING_OCR_LANG": request.app.state.config.DOCLING_OCR_LANG, + "DOCLING_DO_PICTURE_DESCRIPTION": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION, "DOCUMENT_INTELLIGENCE_ENDPOINT": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT, "DOCUMENT_INTELLIGENCE_KEY": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY, "MISTRAL_OCR_API_KEY": request.app.state.config.MISTRAL_OCR_API_KEY, @@ -371,6 +374,7 @@ async def get_rag_config(request: Request, user=Depends(get_admin_user)): # File upload settings "FILE_MAX_SIZE": request.app.state.config.FILE_MAX_SIZE, "FILE_MAX_COUNT": request.app.state.config.FILE_MAX_COUNT, + "ALLOWED_FILE_EXTENSIONS": request.app.state.config.ALLOWED_FILE_EXTENSIONS, # Integration settings "ENABLE_GOOGLE_DRIVE_INTEGRATION": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION, "ENABLE_ONEDRIVE_INTEGRATION": request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION, @@ -492,10 +496,14 @@ class ConfigForm(BaseModel): # Content extraction settings CONTENT_EXTRACTION_ENGINE: Optional[str] = None PDF_EXTRACT_IMAGES: Optional[bool] = None + EXTERNAL_DOCUMENT_LOADER_URL: Optional[str] = None + EXTERNAL_DOCUMENT_LOADER_API_KEY: Optional[str] = None + TIKA_SERVER_URL: Optional[str] = None DOCLING_SERVER_URL: Optional[str] = None DOCLING_OCR_ENGINE: Optional[str] = None DOCLING_OCR_LANG: Optional[str] = None + DOCLING_DO_PICTURE_DESCRIPTION: Optional[bool] = None DOCUMENT_INTELLIGENCE_ENDPOINT: Optional[str] = None DOCUMENT_INTELLIGENCE_KEY: Optional[str] = None MISTRAL_OCR_API_KEY: Optional[str] = None @@ -514,6 +522,7 @@ class ConfigForm(BaseModel): # File upload settings FILE_MAX_SIZE: Optional[int] = None FILE_MAX_COUNT: Optional[int] = None + ALLOWED_FILE_EXTENSIONS: Optional[List[str]] = None # Integration settings ENABLE_GOOGLE_DRIVE_INTEGRATION: Optional[bool] = None @@ -581,6 +590,16 @@ async def update_rag_config( if form_data.PDF_EXTRACT_IMAGES is not None else request.app.state.config.PDF_EXTRACT_IMAGES ) + request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL = ( + form_data.EXTERNAL_DOCUMENT_LOADER_URL + if form_data.EXTERNAL_DOCUMENT_LOADER_URL is not None + else request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL + ) + request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY = ( + form_data.EXTERNAL_DOCUMENT_LOADER_API_KEY + if form_data.EXTERNAL_DOCUMENT_LOADER_API_KEY is not None + else request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY + ) request.app.state.config.TIKA_SERVER_URL = ( form_data.TIKA_SERVER_URL if form_data.TIKA_SERVER_URL is not None @@ -601,6 +620,13 @@ async def update_rag_config( if form_data.DOCLING_OCR_LANG is not None else request.app.state.config.DOCLING_OCR_LANG ) + + request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION = ( + form_data.DOCLING_DO_PICTURE_DESCRIPTION + if form_data.DOCLING_DO_PICTURE_DESCRIPTION is not None + else request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION + ) + request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = ( form_data.DOCUMENT_INTELLIGENCE_ENDPOINT if form_data.DOCUMENT_INTELLIGENCE_ENDPOINT is not None @@ -688,6 +714,11 @@ async def update_rag_config( if form_data.FILE_MAX_COUNT is not None else request.app.state.config.FILE_MAX_COUNT ) + request.app.state.config.ALLOWED_FILE_EXTENSIONS = ( + form_data.ALLOWED_FILE_EXTENSIONS + if form_data.ALLOWED_FILE_EXTENSIONS is not None + else request.app.state.config.ALLOWED_FILE_EXTENSIONS + ) # Integration settings request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION = ( @@ -809,10 +840,13 @@ async def update_rag_config( # Content extraction settings "CONTENT_EXTRACTION_ENGINE": request.app.state.config.CONTENT_EXTRACTION_ENGINE, "PDF_EXTRACT_IMAGES": request.app.state.config.PDF_EXTRACT_IMAGES, + "EXTERNAL_DOCUMENT_LOADER_URL": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL, + "EXTERNAL_DOCUMENT_LOADER_API_KEY": request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY, "TIKA_SERVER_URL": request.app.state.config.TIKA_SERVER_URL, "DOCLING_SERVER_URL": request.app.state.config.DOCLING_SERVER_URL, "DOCLING_OCR_ENGINE": request.app.state.config.DOCLING_OCR_ENGINE, "DOCLING_OCR_LANG": request.app.state.config.DOCLING_OCR_LANG, + "DOCLING_DO_PICTURE_DESCRIPTION": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION, "DOCUMENT_INTELLIGENCE_ENDPOINT": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT, "DOCUMENT_INTELLIGENCE_KEY": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY, "MISTRAL_OCR_API_KEY": request.app.state.config.MISTRAL_OCR_API_KEY, @@ -828,6 +862,7 @@ async def update_rag_config( # File upload settings "FILE_MAX_SIZE": request.app.state.config.FILE_MAX_SIZE, "FILE_MAX_COUNT": request.app.state.config.FILE_MAX_COUNT, + "ALLOWED_FILE_EXTENSIONS": request.app.state.config.ALLOWED_FILE_EXTENSIONS, # Integration settings "ENABLE_GOOGLE_DRIVE_INTEGRATION": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION, "ENABLE_ONEDRIVE_INTEGRATION": request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION, @@ -1129,10 +1164,13 @@ def process_file( file_path = Storage.get_file(file_path) loader = Loader( engine=request.app.state.config.CONTENT_EXTRACTION_ENGINE, + EXTERNAL_DOCUMENT_LOADER_URL=request.app.state.config.EXTERNAL_DOCUMENT_LOADER_URL, + EXTERNAL_DOCUMENT_LOADER_API_KEY=request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY, TIKA_SERVER_URL=request.app.state.config.TIKA_SERVER_URL, DOCLING_SERVER_URL=request.app.state.config.DOCLING_SERVER_URL, DOCLING_OCR_ENGINE=request.app.state.config.DOCLING_OCR_ENGINE, DOCLING_OCR_LANG=request.app.state.config.DOCLING_OCR_LANG, + DOCLING_DO_PICTURE_DESCRIPTION=request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION, PDF_EXTRACT_IMAGES=request.app.state.config.PDF_EXTRACT_IMAGES, DOCUMENT_INTELLIGENCE_ENDPOINT=request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT, DOCUMENT_INTELLIGENCE_KEY=request.app.state.config.DOCUMENT_INTELLIGENCE_KEY, diff --git a/backend/open_webui/routers/tasks.py b/backend/open_webui/routers/tasks.py index 14a6c4286..8b34c8630 100644 --- a/backend/open_webui/routers/tasks.py +++ b/backend/open_webui/routers/tasks.py @@ -20,10 +20,7 @@ from open_webui.utils.auth import get_admin_user, get_verified_user from open_webui.constants import TASKS from open_webui.routers.pipelines import process_pipeline_inlet_filter -from open_webui.utils.filter import ( - get_sorted_filter_ids, - process_filter_functions, -) + from open_webui.utils.task import get_task_model_id from open_webui.config import ( diff --git a/backend/open_webui/utils/auth.py b/backend/open_webui/utils/auth.py index 118ac049e..2db0da7e5 100644 --- a/backend/open_webui/utils/auth.py +++ b/backend/open_webui/utils/auth.py @@ -13,6 +13,8 @@ import pytz from pytz import UTC from typing import Optional, Union, List, Dict +from opentelemetry import trace + from open_webui.models.users import Users from open_webui.constants import ERROR_MESSAGES @@ -194,7 +196,17 @@ def get_current_user( status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.API_KEY_NOT_ALLOWED ) - return get_current_user_by_api_key(token) + user = get_current_user_by_api_key(token) + + # Add user info to current span + current_span = trace.get_current_span() + if current_span: + current_span.set_attribute("client.user.id", user.id) + current_span.set_attribute("client.user.email", user.email) + current_span.set_attribute("client.user.role", user.role) + current_span.set_attribute("client.auth.type", "api_key") + + return user # auth by jwt token try: @@ -213,6 +225,14 @@ def get_current_user( detail=ERROR_MESSAGES.INVALID_TOKEN, ) else: + # Add user info to current span + current_span = trace.get_current_span() + if current_span: + current_span.set_attribute("client.user.id", user.id) + current_span.set_attribute("client.user.email", user.email) + current_span.set_attribute("client.user.role", user.role) + current_span.set_attribute("client.auth.type", "jwt") + # Refresh the user's last active timestamp asynchronously # to prevent blocking the request if background_tasks: @@ -234,6 +254,14 @@ def get_current_user_by_api_key(api_key: str): detail=ERROR_MESSAGES.INVALID_TOKEN, ) else: + # Add user info to current span + current_span = trace.get_current_span() + if current_span: + current_span.set_attribute("client.user.id", user.id) + current_span.set_attribute("client.user.email", user.email) + current_span.set_attribute("client.user.role", user.role) + current_span.set_attribute("client.auth.type", "api_key") + Users.update_user_last_active_by_id(user.id) return user diff --git a/backend/open_webui/utils/chat.py b/backend/open_webui/utils/chat.py index a6a06c522..ce86811d4 100644 --- a/backend/open_webui/utils/chat.py +++ b/backend/open_webui/utils/chat.py @@ -309,6 +309,7 @@ async def chat_completed(request: Request, form_data: dict, user: Any): metadata = { "chat_id": data["chat_id"], "message_id": data["id"], + "filter_ids": data.get("filter_ids", []), "session_id": data["session_id"], "user_id": user.id, } @@ -330,7 +331,9 @@ async def chat_completed(request: Request, form_data: dict, user: Any): try: filter_functions = [ Functions.get_function_by_id(filter_id) - for filter_id in get_sorted_filter_ids(model) + for filter_id in get_sorted_filter_ids( + request, model, metadata.get("filter_ids", []) + ) ] result, _ = await process_filter_functions( diff --git a/backend/open_webui/utils/filter.py b/backend/open_webui/utils/filter.py index 76c9db9eb..02e504765 100644 --- a/backend/open_webui/utils/filter.py +++ b/backend/open_webui/utils/filter.py @@ -9,7 +9,20 @@ log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MAIN"]) -def get_sorted_filter_ids(model: dict): +def get_function_module(request, function_id): + """ + Get the function module by its ID. + """ + if function_id in request.app.state.FUNCTIONS: + function_module = request.app.state.FUNCTIONS[function_id] + else: + function_module, _, _ = load_function_module_by_id(function_id) + request.app.state.FUNCTIONS[function_id] = function_module + + return function_module + + +def get_sorted_filter_ids(request, model: dict, enabled_filter_ids: list = None): def get_priority(function_id): function = Functions.get_function_by_id(function_id) if function is not None: @@ -21,14 +34,23 @@ def get_sorted_filter_ids(model: dict): if "info" in model and "meta" in model["info"]: filter_ids.extend(model["info"]["meta"].get("filterIds", [])) filter_ids = list(set(filter_ids)) - - enabled_filter_ids = [ + active_filter_ids = [ function.id for function in Functions.get_functions_by_type("filter", active_only=True) ] - filter_ids = [fid for fid in filter_ids if fid in enabled_filter_ids] + for filter_id in active_filter_ids: + function_module = get_function_module(request, filter_id) + + if getattr(function_module, "toggle", None) and ( + filter_id not in enabled_filter_ids + ): + active_filter_ids.remove(filter_id) + continue + + filter_ids = [fid for fid in filter_ids if fid in active_filter_ids] filter_ids.sort(key=get_priority) + return filter_ids @@ -43,12 +65,7 @@ async def process_filter_functions( if not filter: continue - if filter_id in request.app.state.FUNCTIONS: - function_module = request.app.state.FUNCTIONS[filter_id] - else: - function_module, _, _ = load_function_module_by_id(filter_id) - request.app.state.FUNCTIONS[filter_id] = function_module - + function_module = get_function_module(request, filter_id) # Prepare handler function handler = getattr(function_module, filter_type, None) if not handler: diff --git a/backend/open_webui/utils/middleware.py b/backend/open_webui/utils/middleware.py index 442dfba76..c9095f931 100644 --- a/backend/open_webui/utils/middleware.py +++ b/backend/open_webui/utils/middleware.py @@ -340,6 +340,11 @@ async def chat_web_search_handler( log.exception(e) queries = [user_message] + # Check if generated queries are empty + if len(queries) == 1 and queries[0].strip() == "": + queries = [user_message] + + # Check if queries are not found if len(queries) == 0: await event_emitter( { @@ -651,7 +656,7 @@ def apply_params_to_form_data(form_data, model): convert_logit_bias_input_to_json(params["logit_bias"]) ) except Exception as e: - print(f"Error parsing logit_bias: {e}") + log.exception(f"Error parsing logit_bias: {e}") return form_data @@ -749,9 +754,12 @@ async def process_chat_payload(request, form_data, user, metadata, model): raise e try: + filter_functions = [ Functions.get_function_by_id(filter_id) - for filter_id in get_sorted_filter_ids(model) + for filter_id in get_sorted_filter_ids( + request, model, metadata.get("filter_ids", []) + ) ] form_data, flags = await process_filter_functions( @@ -942,21 +950,35 @@ async def process_chat_response( message = message_map.get(metadata["message_id"]) if message_map else None if message: - messages = get_message_list(message_map, message.get("id")) + message_list = get_message_list(message_map, message.get("id")) - # Remove reasoning details and files from the messages. + # Remove details tags and files from the messages. # as get_message_list creates a new list, it does not affect # the original messages outside of this handler - for message in messages: - message["content"] = re.sub( - r"]*>.*?<\/details>", - "", - message["content"], - flags=re.S, - ).strip() - if message.get("files"): - message["files"] = [] + messages = [] + for message in message_list: + content = message.get("content", "") + if isinstance(content, list): + for item in content: + if item.get("type") == "text": + content = item["text"] + break + + if isinstance(content, str): + content = re.sub( + r"]*>.*?<\/details>", + "", + content, + flags=re.S | re.I, + ).strip() + + messages.append( + { + "role": message["role"], + "content": content, + } + ) if tasks and messages: if TASKS.TITLE_GENERATION in tasks: @@ -1169,7 +1191,9 @@ async def process_chat_response( } filter_functions = [ Functions.get_function_by_id(filter_id) - for filter_id in get_sorted_filter_ids(model) + for filter_id in get_sorted_filter_ids( + request, model, metadata.get("filter_ids", []) + ) ] # Streaming response diff --git a/backend/open_webui/utils/models.py b/backend/open_webui/utils/models.py index 245eaf874..77ff0c932 100644 --- a/backend/open_webui/utils/models.py +++ b/backend/open_webui/utils/models.py @@ -49,6 +49,7 @@ async def get_all_base_models(request: Request, user: UserModel = None): "created": int(time.time()), "owned_by": "ollama", "ollama": model, + "connection_type": model.get("connection_type", "local"), "tags": model.get("tags", []), } for model in ollama_models["models"] @@ -110,6 +111,14 @@ async def get_all_models(request, user: UserModel = None): for function in Functions.get_functions_by_type("action", active_only=True) ] + global_filter_ids = [ + function.id for function in Functions.get_global_filter_functions() + ] + enabled_filter_ids = [ + function.id + for function in Functions.get_functions_by_type("filter", active_only=True) + ] + custom_models = Models.get_all_models() for custom_model in custom_models: if custom_model.base_model_id is None: @@ -125,13 +134,20 @@ async def get_all_models(request, user: UserModel = None): model["name"] = custom_model.name model["info"] = custom_model.model_dump() + # Set action_ids and filter_ids action_ids = [] + filter_ids = [] + if "info" in model and "meta" in model["info"]: action_ids.extend( model["info"]["meta"].get("actionIds", []) ) + filter_ids.extend( + model["info"]["meta"].get("filterIds", []) + ) model["action_ids"] = action_ids + model["filter_ids"] = filter_ids else: models.remove(model) @@ -140,7 +156,9 @@ async def get_all_models(request, user: UserModel = None): ): owned_by = "openai" pipe = None + action_ids = [] + filter_ids = [] for model in models: if ( @@ -154,9 +172,13 @@ async def get_all_models(request, user: UserModel = None): if custom_model.meta: meta = custom_model.meta.model_dump() + if "actionIds" in meta: action_ids.extend(meta["actionIds"]) + if "filterIds" in meta: + filter_ids.extend(meta["filterIds"]) + models.append( { "id": f"{custom_model.id}", @@ -168,6 +190,7 @@ async def get_all_models(request, user: UserModel = None): "preset": True, **({"pipe": pipe} if pipe is not None else {}), "action_ids": action_ids, + "filter_ids": filter_ids, } ) @@ -181,8 +204,11 @@ async def get_all_models(request, user: UserModel = None): "id": f"{function.id}.{action['id']}", "name": action.get("name", f"{function.name} ({action['id']})"), "description": function.meta.description, - "icon_url": action.get( - "icon_url", function.meta.manifest.get("icon_url", None) + "icon": action.get( + "icon_url", + function.meta.manifest.get("icon_url", None) + or getattr(module, "icon_url", None) + or getattr(module, "icon", None), ), } for action in actions @@ -193,10 +219,25 @@ async def get_all_models(request, user: UserModel = None): "id": function.id, "name": function.name, "description": function.meta.description, - "icon_url": function.meta.manifest.get("icon_url", None), + "icon": function.meta.manifest.get("icon_url", None) + or getattr(module, "icon_url", None) + or getattr(module, "icon", None), } ] + # Process filter_ids to get the filters + def get_filter_items_from_module(function, module): + return [ + { + "id": function.id, + "name": function.name, + "description": function.meta.description, + "icon": function.meta.manifest.get("icon_url", None) + or getattr(module, "icon_url", None) + or getattr(module, "icon", None), + } + ] + def get_function_module_by_id(function_id): if function_id in request.app.state.FUNCTIONS: function_module = request.app.state.FUNCTIONS[function_id] @@ -211,6 +252,11 @@ async def get_all_models(request, user: UserModel = None): for action_id in list(set(model.pop("action_ids", []) + global_action_ids)) if action_id in enabled_action_ids ] + filter_ids = [ + filter_id + for filter_id in list(set(model.pop("filter_ids", []) + global_filter_ids)) + if filter_id in enabled_filter_ids + ] model["actions"] = [] for action_id in action_ids: @@ -222,6 +268,20 @@ async def get_all_models(request, user: UserModel = None): model["actions"].extend( get_action_items_from_module(action_function, function_module) ) + + model["filters"] = [] + for filter_id in filter_ids: + filter_function = Functions.get_function_by_id(filter_id) + if filter_function is None: + raise Exception(f"Filter not found: {filter_id}") + + function_module = get_function_module_by_id(filter_id) + + if getattr(function_module, "toggle", None): + model["filters"].extend( + get_filter_items_from_module(filter_function, function_module) + ) + log.debug(f"get_all_models() returned {len(models)} models") request.app.state.MODELS = {model["id"]: model for model in models} diff --git a/backend/open_webui/utils/oauth.py b/backend/open_webui/utils/oauth.py index 0bd82b577..f6004515f 100644 --- a/backend/open_webui/utils/oauth.py +++ b/backend/open_webui/utils/oauth.py @@ -41,6 +41,7 @@ from open_webui.config import ( ) from open_webui.constants import ERROR_MESSAGES, WEBHOOK_MESSAGES from open_webui.env import ( + AIOHTTP_CLIENT_SESSION_SSL, WEBUI_NAME, WEBUI_AUTH_COOKIE_SAME_SITE, WEBUI_AUTH_COOKIE_SECURE, @@ -305,8 +306,10 @@ class OAuthManager: get_kwargs["headers"] = { "Authorization": f"Bearer {access_token}", } - async with aiohttp.ClientSession() as session: - async with session.get(picture_url, **get_kwargs) as resp: + async with aiohttp.ClientSession(trust_env=True) as session: + async with session.get( + picture_url, **get_kwargs, ssl=AIOHTTP_CLIENT_SESSION_SSL + ) as resp: if resp.ok: picture = await resp.read() base64_encoded_picture = base64.b64encode(picture).decode( @@ -371,7 +374,9 @@ class OAuthManager: headers = {"Authorization": f"Bearer {access_token}"} async with aiohttp.ClientSession(trust_env=True) as session: async with session.get( - "https://api.github.com/user/emails", headers=headers + "https://api.github.com/user/emails", + headers=headers, + ssl=AIOHTTP_CLIENT_SESSION_SSL, ) as resp: if resp.ok: emails = await resp.json() diff --git a/backend/open_webui/utils/tools.py b/backend/open_webui/utils/tools.py index 123ec5fb9..f0b37b605 100644 --- a/backend/open_webui/utils/tools.py +++ b/backend/open_webui/utils/tools.py @@ -37,6 +37,7 @@ from open_webui.models.tools import Tools from open_webui.models.users import UserModel from open_webui.utils.plugin import load_tool_module_by_id from open_webui.env import ( + SRC_LOG_LEVELS, AIOHTTP_CLIENT_TIMEOUT_TOOL_SERVER_DATA, AIOHTTP_CLIENT_SESSION_TOOL_SERVER_SSL, ) @@ -44,6 +45,7 @@ from open_webui.env import ( import copy log = logging.getLogger(__name__) +log.setLevel(SRC_LOG_LEVELS["MODELS"]) def get_async_tool_function_and_apply_extra_params( @@ -477,7 +479,7 @@ async def get_tool_server_data(token: str, url: str) -> Dict[str, Any]: "specs": convert_openapi_to_tool_payload(res), } - print("Fetched data:", data) + log.info("Fetched data:", data) return data @@ -510,7 +512,7 @@ async def get_tool_servers_data( results = [] for (idx, server, url, _), response in zip(server_entries, responses): if isinstance(response, Exception): - print(f"Failed to connect to {url} OpenAPI tool server") + log.error(f"Failed to connect to {url} OpenAPI tool server") continue results.append( @@ -620,5 +622,5 @@ async def execute_tool_server( except Exception as err: error = str(err) - print("API Request Error:", error) + log.exception("API Request Error:", error) return {"error": error} diff --git a/backend/requirements.txt b/backend/requirements.txt index ce55d2d34..07dc09be6 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -37,7 +37,8 @@ asgiref==3.8.1 # AI libraries openai anthropic -google-generativeai==0.8.4 +google-genai==1.15.0 +google-generativeai==0.8.5 tiktoken langchain==0.3.24 @@ -98,7 +99,7 @@ pytube==15.0.0 extract_msg pydub -duckduckgo-search~=8.0.0 +duckduckgo-search==8.0.2 ## Google Drive google-api-python-client diff --git a/package-lock.json b/package-lock.json index ff0414895..1c3bc8571 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "open-webui", - "version": "0.6.9", + "version": "0.6.10", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "open-webui", - "version": "0.6.9", + "version": "0.6.10", "dependencies": { "@azure/msal-browser": "^4.5.0", "@codemirror/lang-javascript": "^6.2.2", diff --git a/package.json b/package.json index e7229fb5c..744315c3b 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,12 @@ { "name": "open-webui", - "version": "0.6.9", + "version": "0.6.10", "private": true, "scripts": { "dev": "npm run pyodide:fetch && vite dev --host", "dev:5050": "npm run pyodide:fetch && vite dev --port 5050", "build": "npm run pyodide:fetch && vite build", + "build:watch": "npm run pyodide:fetch && vite build --watch", "preview": "vite preview", "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", diff --git a/pyproject.toml b/pyproject.toml index bc04c4bf4..01e6bd72c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,8 @@ dependencies = [ "openai", "anthropic", - "google-generativeai==0.8.4", + "google-genai==1.15.0", + "google-generativeai==0.8.5", "tiktoken", "langchain==0.3.24", @@ -105,7 +106,7 @@ dependencies = [ "extract_msg", "pydub", - "duckduckgo-search~=8.0.0", + "duckduckgo-search==8.0.2", "google-api-python-client", "google-auth-httplib2", diff --git a/src/app.css b/src/app.css index 5cfdd8df0..925b9c52d 100644 --- a/src/app.css +++ b/src/app.css @@ -314,12 +314,20 @@ input[type='number'] { .ProseMirror p.is-editor-empty:first-child::before { content: attr(data-placeholder); float: left; - color: #adb5bd; + /* Below color is from tailwind, and has the proper contrast + text-gray-600 from: https://tailwindcss.com/docs/color */ + color: #676767; pointer-events: none; @apply line-clamp-1 absolute; } +@media (prefers-color-scheme: dark) { + .ProseMirror p.is-editor-empty:first-child::before { + color: #757575; + } +} + .ai-autocompletion::after { color: #a0a0a0; diff --git a/src/lib/apis/audio/index.ts b/src/lib/apis/audio/index.ts index 5cd6ab949..f6354da77 100644 --- a/src/lib/apis/audio/index.ts +++ b/src/lib/apis/audio/index.ts @@ -15,7 +15,7 @@ export const getAudioConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -52,7 +52,7 @@ export const updateAudioConfig = async (token: string, payload: OpenAIConfigForm return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -83,7 +83,7 @@ export const transcribeAudio = async (token: string, file: File) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -120,7 +120,7 @@ export const synthesizeOpenAISpeech = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -152,7 +152,7 @@ export const getModels = async (token: string = ''): Promise { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -180,7 +180,7 @@ export const getVoices = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/auths/index.ts b/src/lib/apis/auths/index.ts index 75252fd71..169a6c14f 100644 --- a/src/lib/apis/auths/index.ts +++ b/src/lib/apis/auths/index.ts @@ -15,7 +15,7 @@ export const getAdminDetails = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -42,7 +42,7 @@ export const getAdminConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -70,7 +70,7 @@ export const updateAdminConfig = async (token: string, body: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -98,7 +98,7 @@ export const getSessionUser = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -129,7 +129,7 @@ export const ldapUserSignIn = async (user: string, password: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; @@ -157,7 +157,7 @@ export const getLdapConfig = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -187,7 +187,7 @@ export const updateLdapConfig = async (token: string = '', enable_ldap: boolean) return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -214,7 +214,7 @@ export const getLdapServer = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -242,7 +242,7 @@ export const updateLdapServer = async (token: string = '', body: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -273,7 +273,7 @@ export const userSignIn = async (email: string, password: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; @@ -312,7 +312,7 @@ export const userSignUp = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -339,7 +339,7 @@ export const userSignOut = async () => { return res; }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -347,6 +347,7 @@ export const userSignOut = async () => { if (error) { throw error; } + return res; }; export const addUser = async ( @@ -378,7 +379,7 @@ export const addUser = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -409,7 +410,7 @@ export const updateUserProfile = async (token: string, name: string, profileImag return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -440,7 +441,7 @@ export const updateUserPassword = async (token: string, password: string, newPas return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -467,7 +468,7 @@ export const getSignUpEnabledStatus = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -494,7 +495,7 @@ export const getDefaultUserRole = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -524,7 +525,7 @@ export const updateDefaultUserRole = async (token: string, role: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -551,7 +552,7 @@ export const toggleSignUpEnabledStatus = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -578,7 +579,7 @@ export const getJWTExpiresDuration = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -608,7 +609,7 @@ export const updateJWTExpiresDuration = async (token: string, duration: string) return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -635,7 +636,7 @@ export const createAPIKey = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -660,7 +661,7 @@ export const getAPIKey = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -685,7 +686,7 @@ export const deleteAPIKey = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); diff --git a/src/lib/apis/channels/index.ts b/src/lib/apis/channels/index.ts index cd46410c7..548572c6f 100644 --- a/src/lib/apis/channels/index.ts +++ b/src/lib/apis/channels/index.ts @@ -28,7 +28,7 @@ export const createNewChannel = async (token: string = '', channel: ChannelForm) }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -59,7 +59,7 @@ export const getChannels = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -90,7 +90,7 @@ export const getChannelById = async (token: string = '', channel_id: string) => }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -126,7 +126,7 @@ export const updateChannelById = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -157,7 +157,7 @@ export const deleteChannelById = async (token: string = '', channel_id: string) }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -196,7 +196,7 @@ export const getChannelMessages = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -236,7 +236,7 @@ export const getChannelThreadMessages = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -275,7 +275,7 @@ export const sendMessage = async (token: string = '', channel_id: string, messag }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -315,7 +315,7 @@ export const updateMessage = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -355,7 +355,7 @@ export const addReaction = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -395,7 +395,7 @@ export const removeReaction = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -429,7 +429,7 @@ export const deleteMessage = async (token: string = '', channel_id: string, mess }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/chats/index.ts b/src/lib/apis/chats/index.ts index 02bdd4eb3..0ff56ea23 100644 --- a/src/lib/apis/chats/index.ts +++ b/src/lib/apis/chats/index.ts @@ -21,7 +21,7 @@ export const createNewChat = async (token: string, chat: object) => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -61,7 +61,7 @@ export const importChat = async ( }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -97,7 +97,7 @@ export const getChatList = async (token: string = '', page: number | null = null }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -131,7 +131,7 @@ export const getChatListByUserId = async (token: string = '', userId: string) => }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -165,7 +165,7 @@ export const getArchivedChatList = async (token: string = '') => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -196,7 +196,7 @@ export const getAllChats = async (token: string) => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -231,7 +231,7 @@ export const getChatListBySearchText = async (token: string, text: string, page: }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -265,7 +265,7 @@ export const getChatsByFolderId = async (token: string, folderId: string) => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -296,7 +296,7 @@ export const getAllArchivedChats = async (token: string) => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -327,7 +327,7 @@ export const getAllUserChats = async (token: string) => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -358,7 +358,7 @@ export const getAllTags = async (token: string) => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -389,7 +389,7 @@ export const getPinnedChatList = async (token: string = '') => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -426,7 +426,7 @@ export const getChatListByTagName = async (token: string = '', tagName: string) }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -461,7 +461,7 @@ export const getChatById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -493,7 +493,7 @@ export const getChatByShareId = async (token: string, share_id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -531,7 +531,7 @@ export const getChatPinnedStatusById = async (token: string, id: string) => { error = err; } - console.log(err); + console.error(err); return null; }); @@ -569,7 +569,7 @@ export const toggleChatPinnedStatusById = async (token: string, id: string) => { error = err; } - console.log(err); + console.error(err); return null; }); @@ -610,7 +610,7 @@ export const cloneChatById = async (token: string, id: string, title?: string) = error = err; } - console.log(err); + console.error(err); return null; }); @@ -648,7 +648,7 @@ export const cloneSharedChatById = async (token: string, id: string) => { error = err; } - console.log(err); + console.error(err); return null; }); @@ -680,7 +680,7 @@ export const shareChatById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -715,7 +715,7 @@ export const updateChatFolderIdById = async (token: string, id: string, folderId .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -747,7 +747,7 @@ export const archiveChatById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -779,7 +779,7 @@ export const deleteSharedChatById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -814,7 +814,7 @@ export const updateChatById = async (token: string, id: string, chat: object) => .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -846,7 +846,7 @@ export const deleteChatById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -878,7 +878,7 @@ export const getTagsById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -912,7 +912,7 @@ export const addTagById = async (token: string, id: string, tagName: string) => }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -947,7 +947,7 @@ export const deleteTagById = async (token: string, id: string, tagName: string) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -978,7 +978,7 @@ export const deleteTagsById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -1010,7 +1010,7 @@ export const deleteAllChats = async (token: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -1042,7 +1042,7 @@ export const archiveAllChats = async (token: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/configs/index.ts b/src/lib/apis/configs/index.ts index 5872303f6..26dec26c9 100644 --- a/src/lib/apis/configs/index.ts +++ b/src/lib/apis/configs/index.ts @@ -19,7 +19,7 @@ export const importConfig = async (token: string, config) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -46,7 +46,7 @@ export const exportConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -73,7 +73,7 @@ export const getDirectConnectionsConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -103,7 +103,7 @@ export const setDirectConnectionsConfig = async (token: string, config: object) return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -130,7 +130,7 @@ export const getToolServerConnections = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -160,7 +160,7 @@ export const setToolServerConnections = async (token: string, connections: objec return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -190,7 +190,7 @@ export const verifyToolServerConnection = async (token: string, connection: obje return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -217,7 +217,7 @@ export const getCodeExecutionConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -247,7 +247,7 @@ export const setCodeExecutionConfig = async (token: string, config: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -274,7 +274,7 @@ export const getModelsConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -304,7 +304,7 @@ export const setModelsConfig = async (token: string, config: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -334,7 +334,7 @@ export const setDefaultPromptSuggestions = async (token: string, promptSuggestio return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -361,7 +361,7 @@ export const getBanners = async (token: string): Promise => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -391,7 +391,7 @@ export const setBanners = async (token: string, banners: Banner[]) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); diff --git a/src/lib/apis/evaluations/index.ts b/src/lib/apis/evaluations/index.ts index f6f35f7c1..96a689fcb 100644 --- a/src/lib/apis/evaluations/index.ts +++ b/src/lib/apis/evaluations/index.ts @@ -20,7 +20,7 @@ export const getConfig = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -51,7 +51,7 @@ export const updateConfig = async (token: string, config: object) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -82,7 +82,7 @@ export const getAllFeedbacks = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -113,7 +113,7 @@ export const exportAllFeedbacks = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -144,7 +144,7 @@ export const createNewFeedback = async (token: string, feedback: object) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -175,7 +175,7 @@ export const getFeedbackById = async (token: string, feedbackId: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -206,7 +206,7 @@ export const updateFeedbackById = async (token: string, feedbackId: string, feed }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -234,7 +234,7 @@ export const deleteFeedbackById = async (token: string, feedbackId: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/files/index.ts b/src/lib/apis/files/index.ts index 6a42ec614..261fe56db 100644 --- a/src/lib/apis/files/index.ts +++ b/src/lib/apis/files/index.ts @@ -19,7 +19,7 @@ export const uploadFile = async (token: string, file: File) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -76,7 +76,7 @@ export const getFiles = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -107,7 +107,7 @@ export const getFileById = async (token: string, id: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -141,7 +141,7 @@ export const updateFileDataContentById = async (token: string, id: string, conte }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -168,7 +168,7 @@ export const getFileContentById = async (id: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -200,7 +200,7 @@ export const deleteFileById = async (token: string, id: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -231,7 +231,7 @@ export const deleteAllFiles = async (token: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/folders/index.ts b/src/lib/apis/folders/index.ts index f1a1f5b48..21ec426b0 100644 --- a/src/lib/apis/folders/index.ts +++ b/src/lib/apis/folders/index.ts @@ -50,7 +50,7 @@ export const getFolders = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -81,7 +81,7 @@ export const getFolderById = async (token: string, id: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -115,7 +115,7 @@ export const updateFolderNameById = async (token: string, id: string, name: stri }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -153,7 +153,7 @@ export const updateFolderIsExpandedById = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -187,7 +187,7 @@ export const updateFolderParentIdById = async (token: string, id: string, parent }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -226,7 +226,7 @@ export const updateFolderItemsById = async (token: string, id: string, items: Fo }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -257,7 +257,7 @@ export const deleteFolderById = async (token: string, id: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/functions/index.ts b/src/lib/apis/functions/index.ts index ed3306b32..f1a9bf5a0 100644 --- a/src/lib/apis/functions/index.ts +++ b/src/lib/apis/functions/index.ts @@ -20,7 +20,7 @@ export const createNewFunction = async (token: string, func: object) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -51,7 +51,7 @@ export const getFunctions = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -82,7 +82,7 @@ export const exportFunctions = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -114,7 +114,7 @@ export const getFunctionById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -149,7 +149,7 @@ export const updateFunctionById = async (token: string, id: string, func: object .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -181,7 +181,7 @@ export const deleteFunctionById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -213,7 +213,7 @@ export const toggleFunctionById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -245,7 +245,7 @@ export const toggleGlobalById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -277,7 +277,7 @@ export const getFunctionValvesById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -309,7 +309,7 @@ export const getFunctionValvesSpecById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -344,7 +344,7 @@ export const updateFunctionValvesById = async (token: string, id: string, valves .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -376,7 +376,7 @@ export const getUserValvesById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -408,7 +408,7 @@ export const getUserValvesSpecById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -443,7 +443,7 @@ export const updateUserValvesById = async (token: string, id: string, valves: ob .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/groups/index.ts b/src/lib/apis/groups/index.ts index b7d4f8ef9..c55f477af 100644 --- a/src/lib/apis/groups/index.ts +++ b/src/lib/apis/groups/index.ts @@ -20,7 +20,7 @@ export const createNewGroup = async (token: string, group: object) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -51,7 +51,7 @@ export const getGroups = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -83,7 +83,7 @@ export const getGroupById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -118,7 +118,7 @@ export const updateGroupById = async (token: string, id: string, group: object) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -150,7 +150,7 @@ export const deleteGroupById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/images/index.ts b/src/lib/apis/images/index.ts index 2e6510437..a58d16085 100644 --- a/src/lib/apis/images/index.ts +++ b/src/lib/apis/images/index.ts @@ -16,7 +16,7 @@ export const getConfig = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -51,7 +51,7 @@ export const updateConfig = async (token: string = '', config: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -83,7 +83,7 @@ export const verifyConfigUrl = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -115,7 +115,7 @@ export const getImageGenerationConfig = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -148,7 +148,7 @@ export const updateImageGenerationConfig = async (token: string = '', config: ob return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -180,7 +180,7 @@ export const getImageGenerationModels = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -215,7 +215,7 @@ export const imageGenerations = async (token: string = '', prompt: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { diff --git a/src/lib/apis/index.ts b/src/lib/apis/index.ts index 3892afeb8..710179c12 100644 --- a/src/lib/apis/index.ts +++ b/src/lib/apis/index.ts @@ -25,7 +25,7 @@ export const getModels = async ( }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -173,7 +173,7 @@ export const chatCompleted = async (token: string, body: ChatCompletedForm) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -212,7 +212,7 @@ export const chatAction = async (token: string, action_id: string, body: ChatAct return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -244,7 +244,7 @@ export const stopTask = async (token: string, id: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -276,7 +276,7 @@ export const getTaskIdsByChatId = async (token: string, chat_id: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -315,7 +315,7 @@ export const getToolServerData = async (token: string, url: string) => { } }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -491,7 +491,7 @@ export const getTaskConfig = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -520,7 +520,7 @@ export const updateTaskConfig = async (token: string, config: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -562,7 +562,7 @@ export const generateTitle = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } @@ -634,7 +634,7 @@ export const generateTags = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } @@ -706,7 +706,7 @@ export const generateEmoji = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } @@ -756,7 +756,7 @@ export const generateQueries = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } @@ -828,7 +828,7 @@ export const generateAutoCompletion = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } @@ -892,7 +892,7 @@ export const generateMoACompletion = async ( stream: true }) }).catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -920,7 +920,7 @@ export const getPipelinesList = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -954,7 +954,7 @@ export const uploadPipeline = async (token: string, file: File, urlIdx: string) return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -990,7 +990,7 @@ export const downloadPipeline = async (token: string, url: string, urlIdx: strin return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -1026,7 +1026,7 @@ export const deletePipeline = async (token: string, id: string, urlIdx: string) return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -1063,7 +1063,7 @@ export const getPipelines = async (token: string, urlIdx?: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1100,7 +1100,7 @@ export const getPipelineValves = async (token: string, pipeline_id: string, urlI return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1136,7 +1136,7 @@ export const getPipelineValvesSpec = async (token: string, pipeline_id: string, return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1178,7 +1178,7 @@ export const updatePipelineValves = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; @@ -1210,7 +1210,7 @@ export const getBackendConfig = async () => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1236,7 +1236,7 @@ export const getChangelog = async () => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1263,7 +1263,7 @@ export const getVersionUpdates = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1290,7 +1290,7 @@ export const getModelFilterConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1325,7 +1325,7 @@ export const updateModelFilterConfig = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1352,7 +1352,7 @@ export const getWebhookUrl = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1382,7 +1382,7 @@ export const updateWebhookUrl = async (token: string, url: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1409,7 +1409,7 @@ export const getCommunitySharingEnabledStatus = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1436,7 +1436,7 @@ export const toggleCommunitySharingEnabledStatus = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -1463,7 +1463,7 @@ export const getModelConfig = async (token: string): Promise return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -1511,7 +1511,7 @@ export const updateModelConfig = async (token: string, config: GlobalModelConfig return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); diff --git a/src/lib/apis/knowledge/index.ts b/src/lib/apis/knowledge/index.ts index 92fda2a95..c01c986a2 100644 --- a/src/lib/apis/knowledge/index.ts +++ b/src/lib/apis/knowledge/index.ts @@ -27,7 +27,7 @@ export const createNewKnowledge = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -58,7 +58,7 @@ export const getKnowledgeBases = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -89,7 +89,7 @@ export const getKnowledgeBaseList = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -121,7 +121,7 @@ export const getKnowledgeById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -166,7 +166,7 @@ export const updateKnowledgeById = async (token: string, id: string, form: Knowl .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -201,7 +201,7 @@ export const addFileToKnowledgeById = async (token: string, id: string, fileId: .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -236,7 +236,7 @@ export const updateFileFromKnowledgeById = async (token: string, id: string, fil .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -271,7 +271,7 @@ export const removeFileFromKnowledgeById = async (token: string, id: string, fil .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -303,7 +303,7 @@ export const resetKnowledgeById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -335,7 +335,7 @@ export const deleteKnowledgeById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -363,7 +363,7 @@ export const reindexKnowledgeFiles = async (token: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/memories/index.ts b/src/lib/apis/memories/index.ts index 3fd83ca9e..d8fdc638f 100644 --- a/src/lib/apis/memories/index.ts +++ b/src/lib/apis/memories/index.ts @@ -17,7 +17,7 @@ export const getMemories = async (token: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -48,7 +48,7 @@ export const addNewMemory = async (token: string, content: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -79,7 +79,7 @@ export const updateMemoryById = async (token: string, id: string, content: strin }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -110,7 +110,7 @@ export const queryMemory = async (token: string, content: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -142,7 +142,7 @@ export const deleteMemoryById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -174,7 +174,7 @@ export const deleteMemoriesByUserId = async (token: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/models/index.ts b/src/lib/apis/models/index.ts index 9cf625d03..3e6e0d0c0 100644 --- a/src/lib/apis/models/index.ts +++ b/src/lib/apis/models/index.ts @@ -20,7 +20,7 @@ export const getModels = async (token: string = '') => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -51,7 +51,7 @@ export const getBaseModels = async (token: string = '') => { }) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -80,7 +80,7 @@ export const createNewModel = async (token: string, model: object) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -115,7 +115,7 @@ export const getModelById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -150,7 +150,7 @@ export const toggleModelById = async (token: string, id: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -186,7 +186,7 @@ export const updateModelById = async (token: string, id: string, model: object) .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); @@ -221,7 +221,7 @@ export const deleteModelById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -253,7 +253,7 @@ export const deleteAllModels = async (token: string) => { .catch((err) => { error = err; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/notes/index.ts b/src/lib/apis/notes/index.ts index 23bec36f2..df0be7262 100644 --- a/src/lib/apis/notes/index.ts +++ b/src/lib/apis/notes/index.ts @@ -28,7 +28,7 @@ export const createNewNote = async (token: string, note: NoteItem) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -59,7 +59,7 @@ export const getNotes = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -108,7 +108,7 @@ export const getNoteById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -143,7 +143,7 @@ export const updateNoteById = async (token: string, id: string, note: NoteItem) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -175,7 +175,7 @@ export const deleteNoteById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts index b96567e63..2f6278fe9 100644 --- a/src/lib/apis/ollama/index.ts +++ b/src/lib/apis/ollama/index.ts @@ -51,7 +51,7 @@ export const getOllamaConfig = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -92,7 +92,7 @@ export const updateOllamaConfig = async (token: string = '', config: OllamaConfi return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -124,7 +124,7 @@ export const getOllamaUrls = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -159,7 +159,7 @@ export const updateOllamaUrls = async (token: string = '', urls: string[]) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -191,7 +191,7 @@ export const getOllamaVersion = async (token: string, urlIdx?: number) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -223,7 +223,7 @@ export const getOllamaModels = async (token: string = '', urlIdx: null | number return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -268,7 +268,7 @@ export const generatePrompt = async (token: string = '', model: string, conversa ` }) }).catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } @@ -408,11 +408,11 @@ export const deleteModel = async (token: string, tagName: string, urlIdx: string return res.json(); }) .then((json) => { - console.log(json); + console.debug(json); return true; }) .catch((err) => { - console.log(err); + console.error(err); error = err; if ('detail' in err) { @@ -445,7 +445,7 @@ export const pullModel = async (token: string, tagName: string, urlIdx: number | name: tagName }) }).catch((err) => { - console.log(err); + console.error(err); error = err; if ('detail' in err) { @@ -481,7 +481,7 @@ export const downloadModel = async ( }) } ).catch((err) => { - console.log(err); + console.error(err); error = err; if ('detail' in err) { @@ -512,7 +512,7 @@ export const uploadModel = async (token: string, file: File, urlIdx: string | nu body: formData } ).catch((err) => { - console.log(err); + console.error(err); error = err; if ('detail' in err) { diff --git a/src/lib/apis/openai/index.ts b/src/lib/apis/openai/index.ts index bab2d6e36..f6cf76a73 100644 --- a/src/lib/apis/openai/index.ts +++ b/src/lib/apis/openai/index.ts @@ -16,7 +16,7 @@ export const getOpenAIConfig = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -58,7 +58,7 @@ export const updateOpenAIConfig = async (token: string = '', config: OpenAIConfi return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -90,7 +90,7 @@ export const getOpenAIUrls = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -125,7 +125,7 @@ export const updateOpenAIUrls = async (token: string = '', urls: string[]) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -157,7 +157,7 @@ export const getOpenAIKeys = async (token: string = '') => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -192,7 +192,7 @@ export const updateOpenAIKeys = async (token: string = '', keys: string[]) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); if ('detail' in err) { error = err.detail; } else { @@ -346,7 +346,7 @@ export const chatCompletion = async ( }, body: JSON.stringify(body) }).catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -409,7 +409,7 @@ export const synthesizeOpenAISpeech = async ( voice: speaker }) }).catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); diff --git a/src/lib/apis/prompts/index.ts b/src/lib/apis/prompts/index.ts index f1c54b109..4129ea62a 100644 --- a/src/lib/apis/prompts/index.ts +++ b/src/lib/apis/prompts/index.ts @@ -28,7 +28,7 @@ export const createNewPrompt = async (token: string, prompt: PromptItem) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -59,7 +59,7 @@ export const getPrompts = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -90,7 +90,7 @@ export const getPromptList = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -122,7 +122,7 @@ export const getPromptByCommand = async (token: string, command: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -158,7 +158,7 @@ export const updatePromptByCommand = async (token: string, prompt: PromptItem) = .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -192,7 +192,7 @@ export const deletePromptByCommand = async (token: string, command: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/retrieval/index.ts b/src/lib/apis/retrieval/index.ts index f4b937b68..8fa6578ed 100644 --- a/src/lib/apis/retrieval/index.ts +++ b/src/lib/apis/retrieval/index.ts @@ -15,7 +15,7 @@ export const getRAGConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -77,7 +77,7 @@ export const updateRAGConfig = async (token: string, payload: RAGConfigForm) => return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -104,7 +104,7 @@ export const getQuerySettings = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -140,7 +140,7 @@ export const updateQuerySettings = async (token: string, settings: QuerySettings return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -167,7 +167,7 @@ export const getEmbeddingConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -209,7 +209,7 @@ export const updateEmbeddingConfig = async (token: string, payload: EmbeddingMod return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -236,7 +236,7 @@ export const getRerankingConfig = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -270,7 +270,7 @@ export const updateRerankingConfig = async (token: string, payload: RerankingMod return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -313,7 +313,7 @@ export const processFile = async ( }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -344,7 +344,7 @@ export const processYoutubeVideo = async (token: string, url: string) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -376,7 +376,7 @@ export const processWeb = async (token: string, collection_name: string, url: st }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -410,7 +410,7 @@ export const processWebSearch = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); diff --git a/src/lib/apis/tools/index.ts b/src/lib/apis/tools/index.ts index d1dc11c16..52501a0e0 100644 --- a/src/lib/apis/tools/index.ts +++ b/src/lib/apis/tools/index.ts @@ -20,7 +20,7 @@ export const createNewTool = async (token: string, tool: object) => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -51,7 +51,7 @@ export const getTools = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -82,7 +82,7 @@ export const getToolList = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -113,7 +113,7 @@ export const exportTools = async (token: string = '') => { }) .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -145,7 +145,7 @@ export const getToolById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -180,7 +180,7 @@ export const updateToolById = async (token: string, id: string, tool: object) => .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -212,7 +212,7 @@ export const deleteToolById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -244,7 +244,7 @@ export const getToolValvesById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -276,7 +276,7 @@ export const getToolValvesSpecById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -311,7 +311,7 @@ export const updateToolValvesById = async (token: string, id: string, valves: ob .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -343,7 +343,7 @@ export const getUserValvesById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -375,7 +375,7 @@ export const getUserValvesSpecById = async (token: string, id: string) => { .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); @@ -410,7 +410,7 @@ export const updateUserValvesById = async (token: string, id: string, valves: ob .catch((err) => { error = err.detail; - console.log(err); + console.error(err); return null; }); diff --git a/src/lib/apis/users/index.ts b/src/lib/apis/users/index.ts index be82454c2..f8ab88ff5 100644 --- a/src/lib/apis/users/index.ts +++ b/src/lib/apis/users/index.ts @@ -16,7 +16,7 @@ export const getUserGroups = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -43,7 +43,7 @@ export const getUserDefaultPermissions = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -73,7 +73,7 @@ export const updateUserDefaultPermissions = async (token: string, permissions: o return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -104,7 +104,7 @@ export const updateUserRole = async (token: string, id: string, role: string) => return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -154,7 +154,7 @@ export const getUsers = async ( return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -182,7 +182,7 @@ export const getAllUsers = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -208,7 +208,7 @@ export const getUserSettings = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -238,7 +238,7 @@ export const updateUserSettings = async (token: string, settings: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -265,7 +265,7 @@ export const getUserById = async (token: string, userId: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -291,7 +291,7 @@ export const getUserInfo = async (token: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -321,7 +321,7 @@ export const updateUserInfo = async (token: string, info: object) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -335,7 +335,7 @@ export const updateUserInfo = async (token: string, info: object) => { export const getAndUpdateUserLocation = async (token: string) => { const location = await getUserPosition().catch((err) => { - console.log(err); + console.error(err); return null; }); @@ -343,7 +343,7 @@ export const getAndUpdateUserLocation = async (token: string) => { await updateUserInfo(token, { location: location }); return location; } else { - console.log('Failed to get user location'); + console.info('Failed to get user location'); return null; } }; @@ -363,7 +363,7 @@ export const deleteUserById = async (token: string, userId: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -403,7 +403,7 @@ export const updateUserById = async (token: string, userId: string, user: UserUp return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); diff --git a/src/lib/apis/utils/index.ts b/src/lib/apis/utils/index.ts index 64db56124..1fc30ddbb 100644 --- a/src/lib/apis/utils/index.ts +++ b/src/lib/apis/utils/index.ts @@ -15,7 +15,7 @@ export const getGravatarUrl = async (token: string, email: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -41,7 +41,7 @@ export const executeCode = async (token: string, code: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; if (err.detail) { @@ -75,7 +75,7 @@ export const formatPythonCode = async (token: string, code: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; if (err.detail) { @@ -110,7 +110,7 @@ export const downloadChatAsPDF = async (token: string, title: string, messages: return res.blob(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -136,7 +136,7 @@ export const getHTMLFromMarkdown = async (token: string, md: string) => { return res.json(); }) .catch((err) => { - console.log(err); + console.error(err); error = err; return null; }); @@ -170,7 +170,7 @@ export const downloadDatabase = async (token: string) => { window.URL.revokeObjectURL(url); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); @@ -206,7 +206,7 @@ export const downloadLiteLLMConfig = async (token: string) => { window.URL.revokeObjectURL(url); }) .catch((err) => { - console.log(err); + console.error(err); error = err.detail; return null; }); diff --git a/src/lib/components/AddConnectionModal.svelte b/src/lib/components/AddConnectionModal.svelte index 864d850a6..629f19f1b 100644 --- a/src/lib/components/AddConnectionModal.svelte +++ b/src/lib/components/AddConnectionModal.svelte @@ -30,6 +30,9 @@ let url = ''; let key = ''; + let connectionType = 'external'; + let azure = false; + let prefixId = ''; let enable = true; let tags = []; @@ -95,7 +98,9 @@ enable: enable, tags: tags, prefix_id: prefixId, - model_ids: modelIds + model_ids: modelIds, + connection_type: connectionType, + ...(!ollama && azure ? { azure: true } : {}) } }; @@ -120,6 +125,13 @@ tags = connection.config?.tags ?? []; prefixId = connection.config?.prefix_id ?? ''; modelIds = connection.config?.model_ids ?? []; + + if (ollama) { + connectionType = connection.config?.connection_type ?? 'local'; + } else { + connectionType = connection.config?.connection_type ?? 'external'; + azure = connection.config?.azure ?? false; + } } }; @@ -134,7 +146,7 @@
-
+
{#if edit} {$i18n.t('Edit Connection')} @@ -172,6 +184,28 @@ >
+
+
{$i18n.t('Connection Type')}
+ +
+ +
+
+
+ +
{$i18n.t('URL')}
diff --git a/src/lib/components/admin/Functions/FunctionEditor.svelte b/src/lib/components/admin/Functions/FunctionEditor.svelte index 9b2355fe4..1ef7bddc1 100644 --- a/src/lib/components/admin/Functions/FunctionEditor.svelte +++ b/src/lib/components/admin/Functions/FunctionEditor.svelte @@ -277,7 +277,7 @@ class Pipe: await tick(); if (res) { - console.log('Code formatted successfully'); + console.info('Code formatted successfully'); saveHandler(); } diff --git a/src/lib/components/admin/Settings/Documents.svelte b/src/lib/components/admin/Settings/Documents.svelte index cc56356fa..d1ba533c0 100644 --- a/src/lib/components/admin/Settings/Documents.svelte +++ b/src/lib/components/admin/Settings/Documents.svelte @@ -91,7 +91,7 @@ return; } - console.log('Update embedding model attempt:', embeddingModel); + console.debug('Update embedding model attempt:', embeddingModel); updateEmbeddingModelLoading = true; const res = await updateEmbeddingConfig(localStorage.token, { @@ -114,7 +114,7 @@ updateEmbeddingModelLoading = false; if (res) { - console.log('embeddingModelUpdateHandler:', res); + console.debug('embeddingModelUpdateHandler:', res); if (res.status === true) { toast.success($i18n.t('Embedding model set to "{{embedding_model}}"', res), { duration: 1000 * 10 @@ -124,6 +124,13 @@ }; const submitHandler = async () => { + if ( + RAGConfig.CONTENT_EXTRACTION_ENGINE === 'external' && + RAGConfig.EXTERNAL_DOCUMENT_LOADER_URL === '' + ) { + toast.error($i18n.t('External Document Loader URL required.')); + return; + } if (RAGConfig.CONTENT_EXTRACTION_ENGINE === 'tika' && RAGConfig.TIKA_SERVER_URL === '') { toast.error($i18n.t('Tika Server URL required.')); return; @@ -163,6 +170,10 @@ await embeddingModelUpdateHandler(); } + RAGConfig.ALLOWED_FILE_EXTENSIONS = RAGConfig.ALLOWED_FILE_EXTENSIONS.split(',') + .map((ext) => ext.trim()) + .filter((ext) => ext !== ''); + const res = await updateRAGConfig(localStorage.token, RAGConfig); dispatch('save'); }; @@ -185,7 +196,10 @@ onMount(async () => { await setEmbeddingConfig(); - RAGConfig = await getRAGConfig(localStorage.token); + const config = await getRAGConfig(localStorage.token); + config.ALLOWED_FILE_EXTENSIONS = config.ALLOWED_FILE_EXTENSIONS.join(', '); + + RAGConfig = config; }); @@ -246,7 +260,7 @@
-
+
{$i18n.t('Content Extraction Engine')}
@@ -256,6 +270,7 @@ bind:value={RAGConfig.CONTENT_EXTRACTION_ENGINE} > + @@ -275,11 +290,24 @@
+ {:else if RAGConfig.CONTENT_EXTRACTION_ENGINE === 'external'} +
+ + +
{:else if RAGConfig.CONTENT_EXTRACTION_ENGINE === 'tika'}
@@ -288,27 +316,38 @@ {:else if RAGConfig.CONTENT_EXTRACTION_ENGINE === 'docling'}
+ +
+
+
+ {$i18n.t('Describe Pictures in Documents')} +
+
+ +
+
+
{:else if RAGConfig.CONTENT_EXTRACTION_ENGINE === 'document_intelligence'}
@@ -437,7 +476,7 @@ {#if embeddingEngine === 'openai'}
{$i18n.t('Top K')}
{$i18n.t('Top K Reranker')}
+
+
{$i18n.t('Allowed File Extensions')}
+
+ + + +
+
+
{$i18n.t('Max Upload Size')}
@@ -770,7 +829,7 @@ placement="top-start" > { @@ -305,6 +306,31 @@
+
+
+ {$i18n.t('Pending User Overlay Title')} +
+