mirror of
https://git.mirrors.martin98.com/https://github.com/open-webui/open-webui
synced 2025-08-17 17:05:56 +08:00
commit
13b0e7d64a
7
.github/workflows/integration-test.yml
vendored
7
.github/workflows/integration-test.yml
vendored
@ -15,6 +15,13 @@ jobs:
|
||||
name: Run Cypress Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Maximize build space
|
||||
uses: AdityaGarg8/remove-unwanted-software@v4.1
|
||||
with:
|
||||
remove-android: 'true'
|
||||
remove-haskell: 'true'
|
||||
remove-codeql: 'true'
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
27
CHANGELOG.md
27
CHANGELOG.md
@ -5,6 +5,33 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.3.13] - 2024-08-14
|
||||
|
||||
### Added
|
||||
|
||||
- **🎨 Enhanced Markdown Rendering**: Significant improvements in rendering markdown, ensuring smooth and reliable display of LaTeX and Mermaid charts, enhancing user experience with more robust visual content.
|
||||
- **🔄 Auto-Install Tools & Functions Python Dependencies**: For 'Tools' and 'Functions', Open WebUI now automatically install extra python requirements specified in the frontmatter, streamlining setup processes and customization.
|
||||
- **🌀 OAuth Email Claim Customization**: Introduced an 'OAUTH_EMAIL_CLAIM' variable to allow customization of the default "email" claim within OAuth configurations, providing greater flexibility in authentication processes.
|
||||
- **📶 Websocket Reconnection**: Enhanced reliability with the capability to automatically reconnect when a websocket is closed, ensuring consistent and stable communication.
|
||||
- **🤳 Haptic Feedback on Support Devices**: Android devices now support haptic feedback for an immersive tactile experience during certain interactions.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🛠️ ComfyUI Performance Improvement**: Addressed an issue causing FastAPI to stall when ComfyUI image generation was active; now runs in a separate thread to prevent UI unresponsiveness.
|
||||
- **🔀 Session Handling**: Fixed an issue mandating session_id on client-side to ensure smoother session management and transitions.
|
||||
- **🖋️ Minor Bug Fixes and Format Corrections**: Various minor fixes including typo corrections, backend formatting improvements, and test amendments enhancing overall system stability and performance.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🚀 Migration to SvelteKit 2**: Upgraded the underlying framework to SvelteKit version 2, offering enhanced speed, better code structure, and improved deployment capabilities.
|
||||
- **🧹 General Cleanup and Refactoring**: Performed broad cleanup and refactoring across the platform, improving code efficiency and maintaining high standards of code health.
|
||||
- **🚧 Integration Testing Improvements**: Modified how Cypress integration tests detect chat messages and updated sharing tests for better reliability and accuracy.
|
||||
- **📁 Standardized '.safetensors' File Extension**: Renamed the '.sft' file extension to '.safetensors' for ComfyUI workflows, standardizing file formats across the platform.
|
||||
|
||||
### Removed
|
||||
|
||||
- **🗑️ Deprecated Frontend Functions**: Removed frontend functions that were migrated to backend to declutter the codebase and reduce redundancy.
|
||||
|
||||
## [0.3.12] - 2024-08-07
|
||||
|
||||
### Added
|
||||
|
@ -15,7 +15,7 @@ from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from pydantic import BaseModel
|
||||
|
||||
from typing import List
|
||||
|
||||
import uuid
|
||||
import requests
|
||||
import hashlib
|
||||
@ -244,7 +244,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']['message']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -299,7 +299,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']['message']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -353,7 +353,7 @@ def transcribe(
|
||||
|
||||
try:
|
||||
model = WhisperModel(**whisper_kwargs)
|
||||
except:
|
||||
except Exception:
|
||||
log.warning(
|
||||
"WhisperModel initialization failed, attempting download with local_files_only=False"
|
||||
)
|
||||
@ -421,7 +421,7 @@ def transcribe(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']['message']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -438,7 +438,7 @@ def transcribe(
|
||||
)
|
||||
|
||||
|
||||
def get_available_models() -> List[dict]:
|
||||
def get_available_models() -> list[dict]:
|
||||
if app.state.config.TTS_ENGINE == "openai":
|
||||
return [{"id": "tts-1"}, {"id": "tts-1-hd"}]
|
||||
elif app.state.config.TTS_ENGINE == "elevenlabs":
|
||||
@ -466,7 +466,7 @@ async def get_models(user=Depends(get_verified_user)):
|
||||
return {"models": get_available_models()}
|
||||
|
||||
|
||||
def get_available_voices() -> List[dict]:
|
||||
def get_available_voices() -> list[dict]:
|
||||
if app.state.config.TTS_ENGINE == "openai":
|
||||
return [
|
||||
{"name": "alloy", "id": "alloy"},
|
||||
|
@ -94,7 +94,7 @@ app.state.config.COMFYUI_FLUX_FP8_CLIP = COMFYUI_FLUX_FP8_CLIP
|
||||
|
||||
|
||||
def get_automatic1111_api_auth():
|
||||
if app.state.config.AUTOMATIC1111_API_AUTH == None:
|
||||
if app.state.config.AUTOMATIC1111_API_AUTH is None:
|
||||
return ""
|
||||
else:
|
||||
auth1111_byte_string = app.state.config.AUTOMATIC1111_API_AUTH.encode("utf-8")
|
||||
@ -145,28 +145,30 @@ async def get_engine_url(user=Depends(get_admin_user)):
|
||||
async def update_engine_url(
|
||||
form_data: EngineUrlUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if form_data.AUTOMATIC1111_BASE_URL == None:
|
||||
if form_data.AUTOMATIC1111_BASE_URL is None:
|
||||
app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
|
||||
else:
|
||||
url = form_data.AUTOMATIC1111_BASE_URL.strip("/")
|
||||
try:
|
||||
r = requests.head(url)
|
||||
r.raise_for_status()
|
||||
app.state.config.AUTOMATIC1111_BASE_URL = url
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail="Invalid URL provided.")
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.INVALID_URL)
|
||||
|
||||
if form_data.COMFYUI_BASE_URL == None:
|
||||
if form_data.COMFYUI_BASE_URL is None:
|
||||
app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL
|
||||
else:
|
||||
url = form_data.COMFYUI_BASE_URL.strip("/")
|
||||
|
||||
try:
|
||||
r = requests.head(url)
|
||||
r.raise_for_status()
|
||||
app.state.config.COMFYUI_BASE_URL = url
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.INVALID_URL)
|
||||
|
||||
if form_data.AUTOMATIC1111_API_AUTH == None:
|
||||
if form_data.AUTOMATIC1111_API_AUTH is None:
|
||||
app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH
|
||||
else:
|
||||
app.state.config.AUTOMATIC1111_API_AUTH = form_data.AUTOMATIC1111_API_AUTH
|
||||
@ -514,7 +516,7 @@ async def image_generations(
|
||||
|
||||
data = ImageGenerationPayload(**data)
|
||||
|
||||
res = comfyui_generate_image(
|
||||
res = await comfyui_generate_image(
|
||||
app.state.config.MODEL,
|
||||
data,
|
||||
user.id,
|
||||
|
@ -1,5 +1,5 @@
|
||||
import asyncio
|
||||
import websocket # NOTE: websocket-client (https://github.com/websocket-client/websocket-client)
|
||||
import uuid
|
||||
import json
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
@ -170,7 +170,7 @@ FLUX_DEFAULT_PROMPT = """
|
||||
},
|
||||
"10": {
|
||||
"inputs": {
|
||||
"vae_name": "ae.sft"
|
||||
"vae_name": "ae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader"
|
||||
},
|
||||
@ -184,7 +184,7 @@ FLUX_DEFAULT_PROMPT = """
|
||||
},
|
||||
"12": {
|
||||
"inputs": {
|
||||
"unet_name": "flux1-dev.sft",
|
||||
"unet_name": "flux1-dev.safetensors",
|
||||
"weight_dtype": "default"
|
||||
},
|
||||
"class_type": "UNETLoader"
|
||||
@ -328,7 +328,7 @@ class ImageGenerationPayload(BaseModel):
|
||||
flux_fp8_clip: Optional[bool] = None
|
||||
|
||||
|
||||
def comfyui_generate_image(
|
||||
async def comfyui_generate_image(
|
||||
model: str, payload: ImageGenerationPayload, client_id, base_url
|
||||
):
|
||||
ws_url = base_url.replace("http://", "ws://").replace("https://", "wss://")
|
||||
@ -397,7 +397,9 @@ def comfyui_generate_image(
|
||||
return None
|
||||
|
||||
try:
|
||||
images = get_images(ws, comfyui_prompt, client_id, base_url)
|
||||
images = await asyncio.to_thread(
|
||||
get_images, ws, comfyui_prompt, client_id, base_url
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(f"Error while receiving images: {e}")
|
||||
images = None
|
||||
|
@ -1,47 +1,36 @@
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
Response,
|
||||
HTTPException,
|
||||
Depends,
|
||||
status,
|
||||
UploadFile,
|
||||
File,
|
||||
BackgroundTasks,
|
||||
)
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse
|
||||
from fastapi.concurrency import run_in_threadpool
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
import random
|
||||
import requests
|
||||
import json
|
||||
import uuid
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from urllib.parse import urlparse
|
||||
from typing import Optional, List, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
from starlette.background import BackgroundTask
|
||||
|
||||
from apps.webui.models.models import Models
|
||||
from apps.webui.models.users import Users
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
decode_token,
|
||||
get_current_user,
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from utils.task import prompt_template
|
||||
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
@ -53,7 +42,12 @@ from config import (
|
||||
UPLOAD_DIR,
|
||||
AppConfig,
|
||||
)
|
||||
from utils.misc import calculate_sha256, add_or_update_system_message
|
||||
from utils.misc import (
|
||||
calculate_sha256,
|
||||
apply_model_params_to_body_ollama,
|
||||
apply_model_params_to_body_openai,
|
||||
apply_model_system_prompt_to_body,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OLLAMA"])
|
||||
@ -120,7 +114,7 @@ async def get_ollama_api_urls(user=Depends(get_admin_user)):
|
||||
|
||||
|
||||
class UrlUpdateForm(BaseModel):
|
||||
urls: List[str]
|
||||
urls: list[str]
|
||||
|
||||
|
||||
@app.post("/urls/update")
|
||||
@ -183,7 +177,7 @@ async def post_streaming_url(url: str, payload: str, stream: bool = True):
|
||||
res = await r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -238,7 +232,7 @@ async def get_all_models():
|
||||
async def get_ollama_tags(
|
||||
url_idx: Optional[int] = None, user=Depends(get_verified_user)
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
models = await get_all_models()
|
||||
|
||||
if app.state.config.ENABLE_MODEL_FILTER:
|
||||
@ -269,7 +263,7 @@ async def get_ollama_tags(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -282,8 +276,7 @@ async def get_ollama_tags(
|
||||
@app.get("/api/version/{url_idx}")
|
||||
async def get_ollama_versions(url_idx: Optional[int] = None):
|
||||
if app.state.config.ENABLE_OLLAMA_API:
|
||||
if url_idx == None:
|
||||
|
||||
if url_idx is None:
|
||||
# returns lowest version
|
||||
tasks = [
|
||||
fetch_url(f"{url}/api/version")
|
||||
@ -323,7 +316,7 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -346,8 +339,6 @@ async def pull_model(
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
r = None
|
||||
|
||||
# Admin should be able to pull models from any source
|
||||
payload = {**form_data.model_dump(exclude_none=True), "insecure": True}
|
||||
|
||||
@ -367,7 +358,7 @@ async def push_model(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
if form_data.name in app.state.MODELS:
|
||||
url_idx = app.state.MODELS[form_data.name]["urls"][0]
|
||||
else:
|
||||
@ -417,7 +408,7 @@ async def copy_model(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
if form_data.source in app.state.MODELS:
|
||||
url_idx = app.state.MODELS[form_data.source]["urls"][0]
|
||||
else:
|
||||
@ -428,13 +419,13 @@ async def copy_model(
|
||||
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/copy",
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
log.debug(f"r.text: {r.text}")
|
||||
@ -448,7 +439,7 @@ async def copy_model(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -464,7 +455,7 @@ async def delete_model(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
if form_data.name in app.state.MODELS:
|
||||
url_idx = app.state.MODELS[form_data.name]["urls"][0]
|
||||
else:
|
||||
@ -476,12 +467,12 @@ async def delete_model(
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="DELETE",
|
||||
url=f"{url}/api/delete",
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
log.debug(f"r.text: {r.text}")
|
||||
@ -495,7 +486,7 @@ async def delete_model(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -516,12 +507,12 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_verified_us
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/show",
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
@ -533,7 +524,7 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_verified_us
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -556,7 +547,7 @@ async def generate_embeddings(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
model = form_data.model
|
||||
|
||||
if ":" not in model:
|
||||
@ -573,12 +564,12 @@ async def generate_embeddings(
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/embeddings",
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
@ -590,7 +581,7 @@ async def generate_embeddings(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -603,10 +594,9 @@ def generate_ollama_embeddings(
|
||||
form_data: GenerateEmbeddingsForm,
|
||||
url_idx: Optional[int] = None,
|
||||
):
|
||||
|
||||
log.info(f"generate_ollama_embeddings {form_data}")
|
||||
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
model = form_data.model
|
||||
|
||||
if ":" not in model:
|
||||
@ -623,12 +613,12 @@ def generate_ollama_embeddings(
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
log.info(f"url: {url}")
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/embeddings",
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
)
|
||||
try:
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()
|
||||
@ -638,7 +628,7 @@ def generate_ollama_embeddings(
|
||||
if "embedding" in data:
|
||||
return data["embedding"]
|
||||
else:
|
||||
raise "Something went wrong :/"
|
||||
raise Exception("Something went wrong :/")
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
@ -647,16 +637,16 @@ def generate_ollama_embeddings(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise error_detail
|
||||
raise Exception(error_detail)
|
||||
|
||||
|
||||
class GenerateCompletionForm(BaseModel):
|
||||
model: str
|
||||
prompt: str
|
||||
images: Optional[List[str]] = None
|
||||
images: Optional[list[str]] = None
|
||||
format: Optional[str] = None
|
||||
options: Optional[dict] = None
|
||||
system: Optional[str] = None
|
||||
@ -674,8 +664,7 @@ async def generate_completion(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
model = form_data.model
|
||||
|
||||
if ":" not in model:
|
||||
@ -700,12 +689,12 @@ async def generate_completion(
|
||||
class ChatMessage(BaseModel):
|
||||
role: str
|
||||
content: str
|
||||
images: Optional[List[str]] = None
|
||||
images: Optional[list[str]] = None
|
||||
|
||||
|
||||
class GenerateChatCompletionForm(BaseModel):
|
||||
model: str
|
||||
messages: List[ChatMessage]
|
||||
messages: list[ChatMessage]
|
||||
format: Optional[str] = None
|
||||
options: Optional[dict] = None
|
||||
template: Optional[str] = None
|
||||
@ -713,6 +702,18 @@ class GenerateChatCompletionForm(BaseModel):
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
def get_ollama_url(url_idx: Optional[int], model: str):
|
||||
if url_idx is None:
|
||||
if model not in app.state.MODELS:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(model),
|
||||
)
|
||||
url_idx = random.choice(app.state.MODELS[model]["urls"])
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
return url
|
||||
|
||||
|
||||
@app.post("/api/chat")
|
||||
@app.post("/api/chat/{url_idx}")
|
||||
async def generate_chat_completion(
|
||||
@ -720,12 +721,7 @@ async def generate_chat_completion(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
|
||||
log.debug(
|
||||
"form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(
|
||||
form_data.model_dump_json(exclude_none=True).encode()
|
||||
)
|
||||
)
|
||||
log.debug(f"{form_data.model_dump_json(exclude_none=True).encode()}=")
|
||||
|
||||
payload = {
|
||||
**form_data.model_dump(exclude_none=True, exclude=["metadata"]),
|
||||
@ -740,185 +736,21 @@ async def generate_chat_completion(
|
||||
if model_info.base_model_id:
|
||||
payload["model"] = model_info.base_model_id
|
||||
|
||||
model_info.params = model_info.params.model_dump()
|
||||
params = model_info.params.model_dump()
|
||||
|
||||
if model_info.params:
|
||||
if params:
|
||||
if payload.get("options") is None:
|
||||
payload["options"] = {}
|
||||
|
||||
if (
|
||||
model_info.params.get("mirostat", None)
|
||||
and payload["options"].get("mirostat") is None
|
||||
):
|
||||
payload["options"]["mirostat"] = model_info.params.get("mirostat", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("mirostat_eta", None)
|
||||
and payload["options"].get("mirostat_eta") is None
|
||||
):
|
||||
payload["options"]["mirostat_eta"] = model_info.params.get(
|
||||
"mirostat_eta", None
|
||||
payload["options"] = apply_model_params_to_body_ollama(
|
||||
params, payload["options"]
|
||||
)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, user)
|
||||
|
||||
if (
|
||||
model_info.params.get("mirostat_tau", None)
|
||||
and payload["options"].get("mirostat_tau") is None
|
||||
):
|
||||
payload["options"]["mirostat_tau"] = model_info.params.get(
|
||||
"mirostat_tau", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("num_ctx", None)
|
||||
and payload["options"].get("num_ctx") is None
|
||||
):
|
||||
payload["options"]["num_ctx"] = model_info.params.get("num_ctx", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("num_batch", None)
|
||||
and payload["options"].get("num_batch") is None
|
||||
):
|
||||
payload["options"]["num_batch"] = model_info.params.get(
|
||||
"num_batch", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("num_keep", None)
|
||||
and payload["options"].get("num_keep") is None
|
||||
):
|
||||
payload["options"]["num_keep"] = model_info.params.get("num_keep", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("repeat_last_n", None)
|
||||
and payload["options"].get("repeat_last_n") is None
|
||||
):
|
||||
payload["options"]["repeat_last_n"] = model_info.params.get(
|
||||
"repeat_last_n", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("frequency_penalty", None)
|
||||
and payload["options"].get("frequency_penalty") is None
|
||||
):
|
||||
payload["options"]["repeat_penalty"] = model_info.params.get(
|
||||
"frequency_penalty", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("temperature", None) is not None
|
||||
and payload["options"].get("temperature") is None
|
||||
):
|
||||
payload["options"]["temperature"] = model_info.params.get(
|
||||
"temperature", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("seed", None) is not None
|
||||
and payload["options"].get("seed") is None
|
||||
):
|
||||
payload["options"]["seed"] = model_info.params.get("seed", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("stop", None)
|
||||
and payload["options"].get("stop") is None
|
||||
):
|
||||
payload["options"]["stop"] = (
|
||||
[
|
||||
bytes(stop, "utf-8").decode("unicode_escape")
|
||||
for stop in model_info.params["stop"]
|
||||
]
|
||||
if model_info.params.get("stop", None)
|
||||
else None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("tfs_z", None)
|
||||
and payload["options"].get("tfs_z") is None
|
||||
):
|
||||
payload["options"]["tfs_z"] = model_info.params.get("tfs_z", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("max_tokens", None)
|
||||
and payload["options"].get("max_tokens") is None
|
||||
):
|
||||
payload["options"]["num_predict"] = model_info.params.get(
|
||||
"max_tokens", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("top_k", None)
|
||||
and payload["options"].get("top_k") is None
|
||||
):
|
||||
payload["options"]["top_k"] = model_info.params.get("top_k", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("top_p", None)
|
||||
and payload["options"].get("top_p") is None
|
||||
):
|
||||
payload["options"]["top_p"] = model_info.params.get("top_p", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("min_p", None)
|
||||
and payload["options"].get("min_p") is None
|
||||
):
|
||||
payload["options"]["min_p"] = model_info.params.get("min_p", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("use_mmap", None)
|
||||
and payload["options"].get("use_mmap") is None
|
||||
):
|
||||
payload["options"]["use_mmap"] = model_info.params.get("use_mmap", None)
|
||||
|
||||
if (
|
||||
model_info.params.get("use_mlock", None)
|
||||
and payload["options"].get("use_mlock") is None
|
||||
):
|
||||
payload["options"]["use_mlock"] = model_info.params.get(
|
||||
"use_mlock", None
|
||||
)
|
||||
|
||||
if (
|
||||
model_info.params.get("num_thread", None)
|
||||
and payload["options"].get("num_thread") is None
|
||||
):
|
||||
payload["options"]["num_thread"] = model_info.params.get(
|
||||
"num_thread", None
|
||||
)
|
||||
|
||||
system = model_info.params.get("system", None)
|
||||
if system:
|
||||
system = prompt_template(
|
||||
system,
|
||||
**(
|
||||
{
|
||||
"user_name": user.name,
|
||||
"user_location": (
|
||||
user.info.get("location") if user.info else None
|
||||
),
|
||||
}
|
||||
if user
|
||||
else {}
|
||||
),
|
||||
)
|
||||
|
||||
if payload.get("messages"):
|
||||
payload["messages"] = add_or_update_system_message(
|
||||
system, payload["messages"]
|
||||
)
|
||||
|
||||
if url_idx == None:
|
||||
if ":" not in payload["model"]:
|
||||
payload["model"] = f"{payload['model']}:latest"
|
||||
|
||||
if payload["model"] in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[payload["model"]]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
url = get_ollama_url(url_idx, payload["model"])
|
||||
log.info(f"url: {url}")
|
||||
log.debug(payload)
|
||||
|
||||
@ -940,7 +772,7 @@ class OpenAIChatMessage(BaseModel):
|
||||
|
||||
class OpenAIChatCompletionForm(BaseModel):
|
||||
model: str
|
||||
messages: List[OpenAIChatMessage]
|
||||
messages: list[OpenAIChatMessage]
|
||||
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
@ -952,83 +784,28 @@ async def generate_openai_chat_completion(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
form_data = OpenAIChatCompletionForm(**form_data)
|
||||
payload = {**form_data.model_dump(exclude_none=True, exclude=["metadata"])}
|
||||
|
||||
completion_form = OpenAIChatCompletionForm(**form_data)
|
||||
payload = {**completion_form.model_dump(exclude_none=True, exclude=["metadata"])}
|
||||
if "metadata" in payload:
|
||||
del payload["metadata"]
|
||||
|
||||
model_id = form_data.model
|
||||
model_id = completion_form.model
|
||||
model_info = Models.get_model_by_id(model_id)
|
||||
|
||||
if model_info:
|
||||
if model_info.base_model_id:
|
||||
payload["model"] = model_info.base_model_id
|
||||
|
||||
model_info.params = model_info.params.model_dump()
|
||||
params = model_info.params.model_dump()
|
||||
|
||||
if model_info.params:
|
||||
payload["temperature"] = model_info.params.get("temperature", None)
|
||||
payload["top_p"] = model_info.params.get("top_p", None)
|
||||
payload["max_tokens"] = model_info.params.get("max_tokens", None)
|
||||
payload["frequency_penalty"] = model_info.params.get(
|
||||
"frequency_penalty", None
|
||||
)
|
||||
payload["seed"] = model_info.params.get("seed", None)
|
||||
payload["stop"] = (
|
||||
[
|
||||
bytes(stop, "utf-8").decode("unicode_escape")
|
||||
for stop in model_info.params["stop"]
|
||||
]
|
||||
if model_info.params.get("stop", None)
|
||||
else None
|
||||
)
|
||||
if params:
|
||||
payload = apply_model_params_to_body_openai(params, payload)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, user)
|
||||
|
||||
system = model_info.params.get("system", None)
|
||||
|
||||
if system:
|
||||
system = prompt_template(
|
||||
system,
|
||||
**(
|
||||
{
|
||||
"user_name": user.name,
|
||||
"user_location": (
|
||||
user.info.get("location") if user.info else None
|
||||
),
|
||||
}
|
||||
if user
|
||||
else {}
|
||||
),
|
||||
)
|
||||
# Check if the payload already has a system message
|
||||
# If not, add a system message to the payload
|
||||
if payload.get("messages"):
|
||||
for message in payload["messages"]:
|
||||
if message.get("role") == "system":
|
||||
message["content"] = system + message["content"]
|
||||
break
|
||||
else:
|
||||
payload["messages"].insert(
|
||||
0,
|
||||
{
|
||||
"role": "system",
|
||||
"content": system,
|
||||
},
|
||||
)
|
||||
|
||||
if url_idx == None:
|
||||
if ":" not in payload["model"]:
|
||||
payload["model"] = f"{payload['model']}:latest"
|
||||
|
||||
if payload["model"] in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[payload["model"]]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
url = get_ollama_url(url_idx, payload["model"])
|
||||
log.info(f"url: {url}")
|
||||
|
||||
return await post_streaming_url(
|
||||
@ -1044,7 +821,7 @@ async def get_openai_models(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
models = await get_all_models()
|
||||
|
||||
if app.state.config.ENABLE_MODEL_FILTER:
|
||||
@ -1099,7 +876,7 @@ async def get_openai_models(
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
except Exception:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
@ -1125,7 +902,6 @@ def parse_huggingface_url(hf_url):
|
||||
path_components = parsed_url.path.split("/")
|
||||
|
||||
# Extract the desired output
|
||||
user_repo = "/".join(path_components[1:3])
|
||||
model_file = path_components[-1]
|
||||
|
||||
return model_file
|
||||
@ -1190,7 +966,6 @@ async def download_model(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
|
||||
allowed_hosts = ["https://huggingface.co/", "https://github.com/"]
|
||||
|
||||
if not any(form_data.url.startswith(host) for host in allowed_hosts):
|
||||
@ -1199,7 +974,7 @@ async def download_model(
|
||||
detail="Invalid file_url. Only URLs from allowed hosts are permitted.",
|
||||
)
|
||||
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
url_idx = 0
|
||||
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
|
||||
@ -1222,7 +997,7 @@ def upload_model(
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if url_idx is None:
|
||||
url_idx = 0
|
||||
ollama_url = app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
|
||||
|
@ -17,7 +17,10 @@ from utils.utils import (
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from utils.misc import apply_model_params_to_body, apply_model_system_prompt_to_body
|
||||
from utils.misc import (
|
||||
apply_model_params_to_body_openai,
|
||||
apply_model_system_prompt_to_body,
|
||||
)
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
@ -30,7 +33,7 @@ from config import (
|
||||
MODEL_FILTER_LIST,
|
||||
AppConfig,
|
||||
)
|
||||
from typing import List, Optional, Literal, overload
|
||||
from typing import Optional, Literal, overload
|
||||
|
||||
|
||||
import hashlib
|
||||
@ -86,11 +89,11 @@ async def update_config(form_data: OpenAIConfigForm, user=Depends(get_admin_user
|
||||
|
||||
|
||||
class UrlsUpdateForm(BaseModel):
|
||||
urls: List[str]
|
||||
urls: list[str]
|
||||
|
||||
|
||||
class KeysUpdateForm(BaseModel):
|
||||
keys: List[str]
|
||||
keys: list[str]
|
||||
|
||||
|
||||
@app.get("/urls")
|
||||
@ -368,7 +371,7 @@ async def generate_chat_completion(
|
||||
payload["model"] = model_info.base_model_id
|
||||
|
||||
params = model_info.params.model_dump()
|
||||
payload = apply_model_params_to_body(params, payload)
|
||||
payload = apply_model_params_to_body_openai(params, payload)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, user)
|
||||
|
||||
model = app.state.MODELS[payload.get("model")]
|
||||
|
@ -13,7 +13,7 @@ import os, shutil, logging, re
|
||||
from datetime import datetime
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List, Union, Sequence, Iterator, Any
|
||||
from typing import Union, Sequence, Iterator, Any
|
||||
|
||||
from chromadb.utils.batch_utils import create_batches
|
||||
from langchain_core.documents import Document
|
||||
@ -376,7 +376,7 @@ async def update_reranking_config(
|
||||
try:
|
||||
app.state.config.RAG_RERANKING_MODEL = form_data.reranking_model
|
||||
|
||||
update_reranking_model(app.state.config.RAG_RERANKING_MODEL), True
|
||||
update_reranking_model(app.state.config.RAG_RERANKING_MODEL, True)
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
@ -439,7 +439,7 @@ class ChunkParamUpdateForm(BaseModel):
|
||||
|
||||
|
||||
class YoutubeLoaderConfig(BaseModel):
|
||||
language: List[str]
|
||||
language: list[str]
|
||||
translation: Optional[str] = None
|
||||
|
||||
|
||||
@ -642,7 +642,7 @@ def query_doc_handler(
|
||||
|
||||
|
||||
class QueryCollectionsForm(BaseModel):
|
||||
collection_names: List[str]
|
||||
collection_names: list[str]
|
||||
query: str
|
||||
k: Optional[int] = None
|
||||
r: Optional[float] = None
|
||||
@ -1021,7 +1021,7 @@ class TikaLoader:
|
||||
self.file_path = file_path
|
||||
self.mime_type = mime_type
|
||||
|
||||
def load(self) -> List[Document]:
|
||||
def load(self) -> list[Document]:
|
||||
with open(self.file_path, "rb") as f:
|
||||
data = f.read()
|
||||
|
||||
@ -1185,7 +1185,7 @@ def store_doc(
|
||||
f.close()
|
||||
|
||||
f = open(file_path, "rb")
|
||||
if collection_name == None:
|
||||
if collection_name is None:
|
||||
collection_name = calculate_sha256(f)[:63]
|
||||
f.close()
|
||||
|
||||
@ -1238,7 +1238,7 @@ def process_doc(
|
||||
f = open(file_path, "rb")
|
||||
|
||||
collection_name = form_data.collection_name
|
||||
if collection_name == None:
|
||||
if collection_name is None:
|
||||
collection_name = calculate_sha256(f)[:63]
|
||||
f.close()
|
||||
|
||||
@ -1296,7 +1296,7 @@ def store_text(
|
||||
):
|
||||
|
||||
collection_name = form_data.collection_name
|
||||
if collection_name == None:
|
||||
if collection_name is None:
|
||||
collection_name = calculate_sha256_string(form_data.content)
|
||||
|
||||
result = store_text_in_vector_db(
|
||||
@ -1339,7 +1339,7 @@ def scan_docs_dir(user=Depends(get_admin_user)):
|
||||
sanitized_filename = sanitize_filename(filename)
|
||||
doc = Documents.get_doc_by_name(sanitized_filename)
|
||||
|
||||
if doc == None:
|
||||
if doc is None:
|
||||
doc = Documents.insert_new_doc(
|
||||
user.id,
|
||||
DocumentForm(
|
||||
|
@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
@ -10,7 +10,7 @@ log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_brave(
|
||||
api_key: str, query: str, count: int, filter_list: Optional[List[str]] = None
|
||||
api_key: str, query: str, count: int, filter_list: Optional[list[str]] = None
|
||||
) -> list[SearchResult]:
|
||||
"""Search using Brave's Search API and return the results as a list of SearchResult objects.
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from duckduckgo_search import DDGS
|
||||
from config import SRC_LOG_LEVELS
|
||||
@ -9,7 +9,7 @@ log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_duckduckgo(
|
||||
query: str, count: int, filter_list: Optional[List[str]] = None
|
||||
query: str, count: int, filter_list: Optional[list[str]] = None
|
||||
) -> list[SearchResult]:
|
||||
"""
|
||||
Search using DuckDuckGo's Search API and return the results as a list of SearchResult objects.
|
||||
@ -18,7 +18,7 @@ def search_duckduckgo(
|
||||
count (int): The number of results to return
|
||||
|
||||
Returns:
|
||||
List[SearchResult]: A list of search results
|
||||
list[SearchResult]: A list of search results
|
||||
"""
|
||||
# Use the DDGS context manager to create a DDGS object
|
||||
with DDGS() as ddgs:
|
||||
|
@ -1,6 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
@ -15,7 +15,7 @@ def search_google_pse(
|
||||
search_engine_id: str,
|
||||
query: str,
|
||||
count: int,
|
||||
filter_list: Optional[List[str]] = None,
|
||||
filter_list: Optional[list[str]] = None,
|
||||
) -> list[SearchResult]:
|
||||
"""Search using Google's Programmable Search Engine API and return the results as a list of SearchResult objects.
|
||||
|
||||
|
@ -17,7 +17,7 @@ def search_jina(query: str, count: int) -> list[SearchResult]:
|
||||
count (int): The number of results to return
|
||||
|
||||
Returns:
|
||||
List[SearchResult]: A list of search results
|
||||
list[SearchResult]: A list of search results
|
||||
"""
|
||||
jina_search_endpoint = "https://s.jina.ai/"
|
||||
headers = {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
from config import SRC_LOG_LEVELS
|
||||
@ -14,9 +14,9 @@ def search_searxng(
|
||||
query_url: str,
|
||||
query: str,
|
||||
count: int,
|
||||
filter_list: Optional[List[str]] = None,
|
||||
filter_list: Optional[list[str]] = None,
|
||||
**kwargs,
|
||||
) -> List[SearchResult]:
|
||||
) -> list[SearchResult]:
|
||||
"""
|
||||
Search a SearXNG instance for a given query and return the results as a list of SearchResult objects.
|
||||
|
||||
@ -31,10 +31,10 @@ def search_searxng(
|
||||
language (str): Language filter for the search results; e.g., "en-US". Defaults to an empty string.
|
||||
safesearch (int): Safe search filter for safer web results; 0 = off, 1 = moderate, 2 = strict. Defaults to 1 (moderate).
|
||||
time_range (str): Time range for filtering results by date; e.g., "2023-04-05..today" or "all-time". Defaults to ''.
|
||||
categories: (Optional[List[str]]): Specific categories within which the search should be performed, defaulting to an empty string if not provided.
|
||||
categories: (Optional[list[str]]): Specific categories within which the search should be performed, defaulting to an empty string if not provided.
|
||||
|
||||
Returns:
|
||||
List[SearchResult]: A list of SearchResults sorted by relevance score in descending order.
|
||||
list[SearchResult]: A list of SearchResults sorted by relevance score in descending order.
|
||||
|
||||
Raise:
|
||||
requests.exceptions.RequestException: If a request error occurs during the search process.
|
||||
|
@ -1,6 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
@ -11,7 +11,7 @@ log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_serper(
|
||||
api_key: str, query: str, count: int, filter_list: Optional[List[str]] = None
|
||||
api_key: str, query: str, count: int, filter_list: Optional[list[str]] = None
|
||||
) -> list[SearchResult]:
|
||||
"""Search using serper.dev's API and return the results as a list of SearchResult objects.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import requests
|
||||
from urllib.parse import urlencode
|
||||
|
||||
@ -19,7 +19,7 @@ def search_serply(
|
||||
limit: int = 10,
|
||||
device_type: str = "desktop",
|
||||
proxy_location: str = "US",
|
||||
filter_list: Optional[List[str]] = None,
|
||||
filter_list: Optional[list[str]] = None,
|
||||
) -> list[SearchResult]:
|
||||
"""Search using serper.dev's API and return the results as a list of SearchResult objects.
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import requests
|
||||
|
||||
from apps.rag.search.main import SearchResult, get_filtered_results
|
||||
@ -14,7 +14,7 @@ def search_serpstack(
|
||||
api_key: str,
|
||||
query: str,
|
||||
count: int,
|
||||
filter_list: Optional[List[str]] = None,
|
||||
filter_list: Optional[list[str]] = None,
|
||||
https_enabled: bool = True,
|
||||
) -> list[SearchResult]:
|
||||
"""Search using serpstack.com's and return the results as a list of SearchResult objects.
|
||||
|
@ -17,7 +17,7 @@ def search_tavily(api_key: str, query: str, count: int) -> list[SearchResult]:
|
||||
query (str): The query to search for
|
||||
|
||||
Returns:
|
||||
List[SearchResult]: A list of search results
|
||||
list[SearchResult]: A list of search results
|
||||
"""
|
||||
url = "https://api.tavily.com/search"
|
||||
data = {"query": query, "api_key": api_key}
|
||||
|
@ -2,7 +2,7 @@ import os
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from typing import List, Union
|
||||
from typing import Union
|
||||
|
||||
from apps.ollama.main import (
|
||||
generate_ollama_embeddings,
|
||||
@ -142,7 +142,7 @@ def merge_and_sort_query_results(query_results, k, reverse=False):
|
||||
|
||||
|
||||
def query_collection(
|
||||
collection_names: List[str],
|
||||
collection_names: list[str],
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
@ -157,13 +157,13 @@ def query_collection(
|
||||
embedding_function=embedding_function,
|
||||
)
|
||||
results.append(result)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return merge_and_sort_query_results(results, k=k)
|
||||
|
||||
|
||||
def query_collection_with_hybrid_search(
|
||||
collection_names: List[str],
|
||||
collection_names: list[str],
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
@ -182,7 +182,7 @@ def query_collection_with_hybrid_search(
|
||||
r=r,
|
||||
)
|
||||
results.append(result)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
return merge_and_sort_query_results(results, k=k, reverse=True)
|
||||
|
||||
@ -411,7 +411,7 @@ class ChromaRetriever(BaseRetriever):
|
||||
query: str,
|
||||
*,
|
||||
run_manager: CallbackManagerForRetrieverRun,
|
||||
) -> List[Document]:
|
||||
) -> list[Document]:
|
||||
query_embeddings = self.embedding_function(query)
|
||||
|
||||
results = self.collection.query(
|
||||
|
@ -22,7 +22,7 @@ from apps.webui.utils import load_function_module_by_id
|
||||
from utils.misc import (
|
||||
openai_chat_chunk_message_template,
|
||||
openai_chat_completion_message_template,
|
||||
apply_model_params_to_body,
|
||||
apply_model_params_to_body_openai,
|
||||
apply_model_system_prompt_to_body,
|
||||
)
|
||||
|
||||
@ -46,6 +46,7 @@ from config import (
|
||||
AppConfig,
|
||||
OAUTH_USERNAME_CLAIM,
|
||||
OAUTH_PICTURE_CLAIM,
|
||||
OAUTH_EMAIL_CLAIM,
|
||||
)
|
||||
|
||||
from apps.socket.main import get_event_call, get_event_emitter
|
||||
@ -84,6 +85,7 @@ app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
|
||||
|
||||
app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
|
||||
app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
|
||||
app.state.config.OAUTH_EMAIL_CLAIM = OAUTH_EMAIL_CLAIM
|
||||
|
||||
app.state.MODELS = {}
|
||||
app.state.TOOLS = {}
|
||||
@ -289,7 +291,7 @@ async def generate_function_chat_completion(form_data, user):
|
||||
form_data["model"] = model_info.base_model_id
|
||||
|
||||
params = model_info.params.model_dump()
|
||||
form_data = apply_model_params_to_body(params, form_data)
|
||||
form_data = apply_model_params_to_body_openai(params, form_data)
|
||||
form_data = apply_model_system_prompt_to_body(params, form_data, user)
|
||||
|
||||
pipe_id = get_pipe_id(form_data)
|
||||
|
@ -140,7 +140,7 @@ class AuthsTable:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def authenticate_user_by_api_key(self, api_key: str) -> Optional[UserModel]:
|
||||
@ -152,7 +152,7 @@ class AuthsTable:
|
||||
try:
|
||||
user = Users.get_user_by_api_key(api_key)
|
||||
return user if user else None
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def authenticate_user_by_trusted_header(self, email: str) -> Optional[UserModel]:
|
||||
@ -163,7 +163,7 @@ class AuthsTable:
|
||||
if auth:
|
||||
user = Users.get_user_by_id(auth.id)
|
||||
return user
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_password_by_id(self, id: str, new_password: str) -> bool:
|
||||
@ -174,7 +174,7 @@ class AuthsTable:
|
||||
)
|
||||
db.commit()
|
||||
return True if result == 1 else False
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_email_by_id(self, id: str, email: str) -> bool:
|
||||
@ -183,7 +183,7 @@ class AuthsTable:
|
||||
result = db.query(Auth).filter_by(id=id).update({"email": email})
|
||||
db.commit()
|
||||
return True if result == 1 else False
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_auth_by_id(self, id: str) -> bool:
|
||||
@ -200,7 +200,7 @@ class AuthsTable:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
import json
|
||||
import uuid
|
||||
@ -164,7 +164,7 @@ class ChatTable:
|
||||
db.refresh(chat)
|
||||
|
||||
return self.get_chat_by_id(chat.share_id)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool:
|
||||
@ -175,7 +175,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_chat_share_id_by_id(
|
||||
@ -189,7 +189,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
@ -201,7 +201,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
db.refresh(chat)
|
||||
return ChatModel.model_validate(chat)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def archive_all_chats_by_user_id(self, user_id: str) -> bool:
|
||||
@ -210,12 +210,12 @@ class ChatTable:
|
||||
db.query(Chat).filter_by(user_id=user_id).update({"archived": True})
|
||||
db.commit()
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_archived_chat_list_by_user_id(
|
||||
self, user_id: str, skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
@ -233,7 +233,7 @@ class ChatTable:
|
||||
include_archived: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = 50,
|
||||
) -> List[ChatModel]:
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id)
|
||||
if not include_archived:
|
||||
@ -251,7 +251,7 @@ class ChatTable:
|
||||
include_archived: bool = False,
|
||||
skip: int = 0,
|
||||
limit: int = -1,
|
||||
) -> List[ChatTitleIdResponse]:
|
||||
) -> list[ChatTitleIdResponse]:
|
||||
with get_db() as db:
|
||||
query = db.query(Chat).filter_by(user_id=user_id)
|
||||
if not include_archived:
|
||||
@ -279,8 +279,8 @@ class ChatTable:
|
||||
]
|
||||
|
||||
def get_chat_list_by_chat_ids(
|
||||
self, chat_ids: List[str], skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
self, chat_ids: list[str], skip: int = 0, limit: int = 50
|
||||
) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
@ -297,7 +297,7 @@ class ChatTable:
|
||||
|
||||
chat = db.get(Chat, id)
|
||||
return ChatModel.model_validate(chat)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]:
|
||||
@ -319,10 +319,10 @@ class ChatTable:
|
||||
|
||||
chat = db.query(Chat).filter_by(id=id, user_id=user_id).first()
|
||||
return ChatModel.model_validate(chat)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chats(self, skip: int = 0, limit: int = 50) -> List[ChatModel]:
|
||||
def get_chats(self, skip: int = 0, limit: int = 50) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
@ -332,7 +332,7 @@ class ChatTable:
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
|
||||
def get_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
@ -342,7 +342,7 @@ class ChatTable:
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_archived_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
|
||||
def get_archived_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
||||
all_chats = (
|
||||
@ -360,7 +360,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
@ -371,7 +371,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_chats_by_user_id(self, user_id: str) -> bool:
|
||||
@ -385,7 +385,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_shared_chats_by_user_id(self, user_id: str) -> bool:
|
||||
@ -400,7 +400,7 @@ class ChatTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import time
|
||||
import logging
|
||||
|
||||
@ -93,7 +93,7 @@ class DocumentsTable:
|
||||
return DocumentModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_doc_by_name(self, name: str) -> Optional[DocumentModel]:
|
||||
@ -102,10 +102,10 @@ class DocumentsTable:
|
||||
|
||||
document = db.query(Document).filter_by(name=name).first()
|
||||
return DocumentModel.model_validate(document) if document else None
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_docs(self) -> List[DocumentModel]:
|
||||
def get_docs(self) -> list[DocumentModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
@ -160,7 +160,7 @@ class DocumentsTable:
|
||||
db.query(Document).filter_by(name=name).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
import time
|
||||
import logging
|
||||
|
||||
@ -90,10 +90,10 @@ class FilesTable:
|
||||
try:
|
||||
file = db.get(File, id)
|
||||
return FileModel.model_validate(file)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_files(self) -> List[FileModel]:
|
||||
def get_files(self) -> list[FileModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [FileModel.model_validate(file) for file in db.query(File).all()]
|
||||
@ -107,7 +107,7 @@ class FilesTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_all_files(self) -> bool:
|
||||
@ -119,7 +119,7 @@ class FilesTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
import time
|
||||
import logging
|
||||
|
||||
@ -122,10 +122,10 @@ class FunctionsTable:
|
||||
|
||||
function = db.get(Function, id)
|
||||
return FunctionModel.model_validate(function)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_functions(self, active_only=False) -> List[FunctionModel]:
|
||||
def get_functions(self, active_only=False) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
if active_only:
|
||||
@ -141,7 +141,7 @@ class FunctionsTable:
|
||||
|
||||
def get_functions_by_type(
|
||||
self, type: str, active_only=False
|
||||
) -> List[FunctionModel]:
|
||||
) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
if active_only:
|
||||
@ -157,7 +157,7 @@ class FunctionsTable:
|
||||
for function in db.query(Function).filter_by(type=type).all()
|
||||
]
|
||||
|
||||
def get_global_filter_functions(self) -> List[FunctionModel]:
|
||||
def get_global_filter_functions(self) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
@ -167,7 +167,7 @@ class FunctionsTable:
|
||||
.all()
|
||||
]
|
||||
|
||||
def get_global_action_functions(self) -> List[FunctionModel]:
|
||||
def get_global_action_functions(self) -> list[FunctionModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
FunctionModel.model_validate(function)
|
||||
@ -198,7 +198,7 @@ class FunctionsTable:
|
||||
db.commit()
|
||||
db.refresh(function)
|
||||
return self.get_function_by_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_user_valves_by_id_and_user_id(
|
||||
@ -256,7 +256,7 @@ class FunctionsTable:
|
||||
)
|
||||
db.commit()
|
||||
return self.get_function_by_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def deactivate_all_functions(self) -> Optional[bool]:
|
||||
@ -271,7 +271,7 @@ class FunctionsTable:
|
||||
)
|
||||
db.commit()
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_function_by_id(self, id: str) -> bool:
|
||||
@ -281,7 +281,7 @@ class FunctionsTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from sqlalchemy import Column, String, BigInteger, Text
|
||||
|
||||
@ -80,25 +80,25 @@ class MemoriesTable:
|
||||
)
|
||||
db.commit()
|
||||
return self.get_memory_by_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_memories(self) -> List[MemoryModel]:
|
||||
def get_memories(self) -> list[MemoryModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
memories = db.query(Memory).all()
|
||||
return [MemoryModel.model_validate(memory) for memory in memories]
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_memories_by_user_id(self, user_id: str) -> List[MemoryModel]:
|
||||
def get_memories_by_user_id(self, user_id: str) -> list[MemoryModel]:
|
||||
with get_db() as db:
|
||||
|
||||
try:
|
||||
memories = db.query(Memory).filter_by(user_id=user_id).all()
|
||||
return [MemoryModel.model_validate(memory) for memory in memories]
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_memory_by_id(self, id: str) -> Optional[MemoryModel]:
|
||||
@ -107,7 +107,7 @@ class MemoriesTable:
|
||||
try:
|
||||
memory = db.get(Memory, id)
|
||||
return MemoryModel.model_validate(memory)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_memory_by_id(self, id: str) -> bool:
|
||||
@ -119,7 +119,7 @@ class MemoriesTable:
|
||||
|
||||
return True
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_memories_by_user_id(self, user_id: str) -> bool:
|
||||
@ -130,7 +130,7 @@ class MemoriesTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
@ -141,7 +141,7 @@ class MemoriesTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -137,7 +137,7 @@ class ModelsTable:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def get_all_models(self) -> List[ModelModel]:
|
||||
def get_all_models(self) -> list[ModelModel]:
|
||||
with get_db() as db:
|
||||
return [ModelModel.model_validate(model) for model in db.query(Model).all()]
|
||||
|
||||
@ -146,7 +146,7 @@ class ModelsTable:
|
||||
with get_db() as db:
|
||||
model = db.get(Model, id)
|
||||
return ModelModel.model_validate(model)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_model_by_id(self, id: str, model: ModelForm) -> Optional[ModelModel]:
|
||||
@ -175,7 +175,7 @@ class ModelsTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import time
|
||||
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
@ -79,10 +79,10 @@ class PromptsTable:
|
||||
|
||||
prompt = db.query(Prompt).filter_by(command=command).first()
|
||||
return PromptModel.model_validate(prompt)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_prompts(self) -> List[PromptModel]:
|
||||
def get_prompts(self) -> list[PromptModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
@ -101,7 +101,7 @@ class PromptsTable:
|
||||
prompt.timestamp = int(time.time())
|
||||
db.commit()
|
||||
return PromptModel.model_validate(prompt)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_prompt_by_command(self, command: str) -> bool:
|
||||
@ -112,7 +112,7 @@ class PromptsTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
import json
|
||||
import uuid
|
||||
@ -69,11 +69,11 @@ class ChatIdTagForm(BaseModel):
|
||||
|
||||
|
||||
class TagChatIdsResponse(BaseModel):
|
||||
chat_ids: List[str]
|
||||
chat_ids: list[str]
|
||||
|
||||
|
||||
class ChatTagsResponse(BaseModel):
|
||||
tags: List[str]
|
||||
tags: list[str]
|
||||
|
||||
|
||||
class TagTable:
|
||||
@ -109,7 +109,7 @@ class TagTable:
|
||||
self, user_id: str, form_data: ChatIdTagForm
|
||||
) -> Optional[ChatIdTagModel]:
|
||||
tag = self.get_tag_by_name_and_user_id(form_data.tag_name, user_id)
|
||||
if tag == None:
|
||||
if tag is None:
|
||||
tag = self.insert_new_tag(form_data.tag_name, user_id)
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
@ -132,10 +132,10 @@ class TagTable:
|
||||
return ChatIdTagModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_tags_by_user_id(self, user_id: str) -> List[TagModel]:
|
||||
def get_tags_by_user_id(self, user_id: str) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
tag_names = [
|
||||
chat_id_tag.tag_name
|
||||
@ -159,7 +159,7 @@ class TagTable:
|
||||
|
||||
def get_tags_by_chat_id_and_user_id(
|
||||
self, chat_id: str, user_id: str
|
||||
) -> List[TagModel]:
|
||||
) -> list[TagModel]:
|
||||
with get_db() as db:
|
||||
|
||||
tag_names = [
|
||||
@ -184,7 +184,7 @@ class TagTable:
|
||||
|
||||
def get_chat_ids_by_tag_name_and_user_id(
|
||||
self, tag_name: str, user_id: str
|
||||
) -> List[ChatIdTagModel]:
|
||||
) -> list[ChatIdTagModel]:
|
||||
with get_db() as db:
|
||||
|
||||
return [
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
import time
|
||||
import logging
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
@ -45,7 +45,7 @@ class ToolModel(BaseModel):
|
||||
user_id: str
|
||||
name: str
|
||||
content: str
|
||||
specs: List[dict]
|
||||
specs: list[dict]
|
||||
meta: ToolMeta
|
||||
updated_at: int # timestamp in epoch
|
||||
created_at: int # timestamp in epoch
|
||||
@ -81,7 +81,7 @@ class ToolValves(BaseModel):
|
||||
class ToolsTable:
|
||||
|
||||
def insert_new_tool(
|
||||
self, user_id: str, form_data: ToolForm, specs: List[dict]
|
||||
self, user_id: str, form_data: ToolForm, specs: list[dict]
|
||||
) -> Optional[ToolModel]:
|
||||
|
||||
with get_db() as db:
|
||||
@ -115,10 +115,10 @@ class ToolsTable:
|
||||
|
||||
tool = db.get(Tool, id)
|
||||
return ToolModel.model_validate(tool)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_tools(self) -> List[ToolModel]:
|
||||
def get_tools(self) -> list[ToolModel]:
|
||||
with get_db() as db:
|
||||
return [ToolModel.model_validate(tool) for tool in db.query(Tool).all()]
|
||||
|
||||
@ -141,7 +141,7 @@ class ToolsTable:
|
||||
)
|
||||
db.commit()
|
||||
return self.get_tool_by_id(id)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_user_valves_by_id_and_user_id(
|
||||
@ -196,7 +196,7 @@ class ToolsTable:
|
||||
tool = db.query(Tool).get(id)
|
||||
db.refresh(tool)
|
||||
return ToolModel.model_validate(tool)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def delete_tool_by_id(self, id: str) -> bool:
|
||||
@ -206,7 +206,7 @@ class ToolsTable:
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from pydantic import BaseModel, ConfigDict, parse_obj_as
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
import time
|
||||
|
||||
from sqlalchemy import String, Column, BigInteger, Text
|
||||
@ -125,7 +125,7 @@ class UsersTable:
|
||||
|
||||
user = db.query(User).filter_by(api_key=api_key).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_user_by_email(self, email: str) -> Optional[UserModel]:
|
||||
@ -134,7 +134,7 @@ class UsersTable:
|
||||
|
||||
user = db.query(User).filter_by(email=email).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_user_by_oauth_sub(self, sub: str) -> Optional[UserModel]:
|
||||
@ -143,10 +143,10 @@ class UsersTable:
|
||||
|
||||
user = db.query(User).filter_by(oauth_sub=sub).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_users(self, skip: int = 0, limit: int = 50) -> List[UserModel]:
|
||||
def get_users(self, skip: int = 0, limit: int = 50) -> list[UserModel]:
|
||||
with get_db() as db:
|
||||
users = (
|
||||
db.query(User)
|
||||
@ -164,7 +164,7 @@ class UsersTable:
|
||||
with get_db() as db:
|
||||
user = db.query(User).order_by(User.created_at).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_role_by_id(self, id: str, role: str) -> Optional[UserModel]:
|
||||
@ -174,7 +174,7 @@ class UsersTable:
|
||||
db.commit()
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_profile_image_url_by_id(
|
||||
@ -189,7 +189,7 @@ class UsersTable:
|
||||
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_last_active_by_id(self, id: str) -> Optional[UserModel]:
|
||||
@ -203,7 +203,7 @@ class UsersTable:
|
||||
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_oauth_sub_by_id(
|
||||
@ -216,7 +216,7 @@ class UsersTable:
|
||||
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]:
|
||||
@ -245,7 +245,7 @@ class UsersTable:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_user_api_key_by_id(self, id: str, api_key: str) -> str:
|
||||
@ -254,7 +254,7 @@ class UsersTable:
|
||||
result = db.query(User).filter_by(id=id).update({"api_key": api_key})
|
||||
db.commit()
|
||||
return True if result == 1 else False
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_user_api_key_by_id(self, id: str) -> Optional[str]:
|
||||
|
@ -1,6 +1,6 @@
|
||||
from fastapi import Depends, Request, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
from utils.utils import get_verified_user, get_admin_user
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -40,8 +40,8 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ChatTitleIdResponse])
|
||||
@router.get("/list", response_model=List[ChatTitleIdResponse])
|
||||
@router.get("/", response_model=list[ChatTitleIdResponse])
|
||||
@router.get("/list", response_model=list[ChatTitleIdResponse])
|
||||
async def get_session_user_chat_list(
|
||||
user=Depends(get_verified_user), page: Optional[int] = None
|
||||
):
|
||||
@ -80,7 +80,7 @@ async def delete_all_user_chats(request: Request, user=Depends(get_verified_user
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/list/user/{user_id}", response_model=List[ChatTitleIdResponse])
|
||||
@router.get("/list/user/{user_id}", response_model=list[ChatTitleIdResponse])
|
||||
async def get_user_chat_list_by_user_id(
|
||||
user_id: str,
|
||||
user=Depends(get_admin_user),
|
||||
@ -119,7 +119,7 @@ async def create_new_chat(form_data: ChatForm, user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/all", response_model=List[ChatResponse])
|
||||
@router.get("/all", response_model=list[ChatResponse])
|
||||
async def get_user_chats(user=Depends(get_verified_user)):
|
||||
return [
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
@ -132,7 +132,7 @@ async def get_user_chats(user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/all/archived", response_model=List[ChatResponse])
|
||||
@router.get("/all/archived", response_model=list[ChatResponse])
|
||||
async def get_user_archived_chats(user=Depends(get_verified_user)):
|
||||
return [
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
@ -145,7 +145,7 @@ async def get_user_archived_chats(user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/all/db", response_model=List[ChatResponse])
|
||||
@router.get("/all/db", response_model=list[ChatResponse])
|
||||
async def get_all_user_chats_in_db(user=Depends(get_admin_user)):
|
||||
if not ENABLE_ADMIN_EXPORT:
|
||||
raise HTTPException(
|
||||
@ -163,7 +163,7 @@ async def get_all_user_chats_in_db(user=Depends(get_admin_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/archived", response_model=List[ChatTitleIdResponse])
|
||||
@router.get("/archived", response_model=list[ChatTitleIdResponse])
|
||||
async def get_archived_session_user_chat_list(
|
||||
user=Depends(get_verified_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
@ -216,7 +216,7 @@ class TagNameForm(BaseModel):
|
||||
limit: Optional[int] = 50
|
||||
|
||||
|
||||
@router.post("/tags", response_model=List[ChatTitleIdResponse])
|
||||
@router.post("/tags", response_model=list[ChatTitleIdResponse])
|
||||
async def get_user_chat_list_by_tag_name(
|
||||
form_data: TagNameForm, user=Depends(get_verified_user)
|
||||
):
|
||||
@ -241,7 +241,7 @@ async def get_user_chat_list_by_tag_name(
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/all", response_model=List[TagModel])
|
||||
@router.get("/tags/all", response_model=list[TagModel])
|
||||
async def get_all_tags(user=Depends(get_verified_user)):
|
||||
try:
|
||||
tags = Tags.get_tags_by_user_id(user.id)
|
||||
@ -417,7 +417,7 @@ async def delete_shared_chat_by_id(id: str, user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/{id}/tags", response_model=List[TagModel])
|
||||
@router.get("/{id}/tags", response_model=list[TagModel])
|
||||
async def get_chat_tags_by_id(id: str, user=Depends(get_verified_user)):
|
||||
tags = Tags.get_tags_by_chat_id_and_user_id(id, user.id)
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from fastapi import Response, Request
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union
|
||||
from typing import Union
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -29,12 +29,12 @@ class SetDefaultModelsForm(BaseModel):
|
||||
|
||||
|
||||
class PromptSuggestion(BaseModel):
|
||||
title: List[str]
|
||||
title: list[str]
|
||||
content: str
|
||||
|
||||
|
||||
class SetDefaultSuggestionsForm(BaseModel):
|
||||
suggestions: List[PromptSuggestion]
|
||||
suggestions: list[PromptSuggestion]
|
||||
|
||||
|
||||
############################
|
||||
@ -50,7 +50,7 @@ async def set_global_default_models(
|
||||
return request.app.state.config.DEFAULT_MODELS
|
||||
|
||||
|
||||
@router.post("/default/suggestions", response_model=List[PromptSuggestion])
|
||||
@router.post("/default/suggestions", response_model=list[PromptSuggestion])
|
||||
async def set_global_default_suggestions(
|
||||
request: Request,
|
||||
form_data: SetDefaultSuggestionsForm,
|
||||
@ -67,10 +67,10 @@ async def set_global_default_suggestions(
|
||||
|
||||
|
||||
class SetBannersForm(BaseModel):
|
||||
banners: List[BannerModel]
|
||||
banners: list[BannerModel]
|
||||
|
||||
|
||||
@router.post("/banners", response_model=List[BannerModel])
|
||||
@router.post("/banners", response_model=list[BannerModel])
|
||||
async def set_banners(
|
||||
request: Request,
|
||||
form_data: SetBannersForm,
|
||||
@ -81,7 +81,7 @@ async def set_banners(
|
||||
return request.app.state.config.BANNERS
|
||||
|
||||
|
||||
@router.get("/banners", response_model=List[BannerModel])
|
||||
@router.get("/banners", response_model=list[BannerModel])
|
||||
async def get_banners(
|
||||
request: Request,
|
||||
user=Depends(get_verified_user),
|
||||
|
@ -1,6 +1,6 @@
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -24,7 +24,7 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[DocumentResponse])
|
||||
@router.get("/", response_model=list[DocumentResponse])
|
||||
async def get_documents(user=Depends(get_verified_user)):
|
||||
docs = [
|
||||
DocumentResponse(
|
||||
@ -46,7 +46,7 @@ async def get_documents(user=Depends(get_verified_user)):
|
||||
@router.post("/create", response_model=Optional[DocumentResponse])
|
||||
async def create_new_doc(form_data: DocumentForm, user=Depends(get_admin_user)):
|
||||
doc = Documents.get_doc_by_name(form_data.name)
|
||||
if doc == None:
|
||||
if doc is None:
|
||||
doc = Documents.insert_new_doc(user.id, form_data)
|
||||
|
||||
if doc:
|
||||
@ -102,7 +102,7 @@ class TagItem(BaseModel):
|
||||
|
||||
class TagDocumentForm(BaseModel):
|
||||
name: str
|
||||
tags: List[dict]
|
||||
tags: list[dict]
|
||||
|
||||
|
||||
@router.post("/doc/tags", response_model=Optional[DocumentResponse])
|
||||
|
@ -11,7 +11,7 @@ from fastapi import (
|
||||
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter
|
||||
@ -104,7 +104,7 @@ def upload_file(file: UploadFile = File(...), user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[FileModel])
|
||||
@router.get("/", response_model=list[FileModel])
|
||||
async def list_files(user=Depends(get_verified_user)):
|
||||
files = Files.get_files()
|
||||
return files
|
||||
|
@ -1,6 +1,6 @@
|
||||
from fastapi import Depends, FastAPI, HTTPException, status, Request
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -30,7 +30,7 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[FunctionResponse])
|
||||
@router.get("/", response_model=list[FunctionResponse])
|
||||
async def get_functions(user=Depends(get_verified_user)):
|
||||
return Functions.get_functions()
|
||||
|
||||
@ -40,7 +40,7 @@ async def get_functions(user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/export", response_model=List[FunctionModel])
|
||||
@router.get("/export", response_model=list[FunctionModel])
|
||||
async def get_functions(user=Depends(get_admin_user)):
|
||||
return Functions.get_functions()
|
||||
|
||||
@ -63,7 +63,7 @@ async def create_new_function(
|
||||
form_data.id = form_data.id.lower()
|
||||
|
||||
function = Functions.get_function_by_id(form_data.id)
|
||||
if function == None:
|
||||
if function is None:
|
||||
function_path = os.path.join(FUNCTIONS_DIR, f"{form_data.id}.py")
|
||||
try:
|
||||
with open(function_path, "w") as function_file:
|
||||
@ -235,7 +235,7 @@ async def delete_function_by_id(
|
||||
function_path = os.path.join(FUNCTIONS_DIR, f"{id}.py")
|
||||
try:
|
||||
os.remove(function_path)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
@ -1,7 +1,7 @@
|
||||
from fastapi import Response, Request
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -30,7 +30,7 @@ async def get_embeddings(request: Request):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[MemoryModel])
|
||||
@router.get("/", response_model=list[MemoryModel])
|
||||
async def get_memories(user=Depends(get_verified_user)):
|
||||
return Memories.get_memories_by_user_id(user.id)
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
from fastapi import Depends, FastAPI, HTTPException, status, Request
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -18,7 +18,7 @@ router = APIRouter()
|
||||
###########################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ModelResponse])
|
||||
@router.get("/", response_model=list[ModelResponse])
|
||||
async def get_models(user=Depends(get_verified_user)):
|
||||
return Models.get_all_models()
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -18,7 +18,7 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[PromptModel])
|
||||
@router.get("/", response_model=list[PromptModel])
|
||||
async def get_prompts(user=Depends(get_verified_user)):
|
||||
return Prompts.get_prompts()
|
||||
|
||||
@ -31,7 +31,7 @@ async def get_prompts(user=Depends(get_verified_user)):
|
||||
@router.post("/create", response_model=Optional[PromptModel])
|
||||
async def create_new_prompt(form_data: PromptForm, user=Depends(get_admin_user)):
|
||||
prompt = Prompts.get_prompt_by_command(form_data.command)
|
||||
if prompt == None:
|
||||
if prompt is None:
|
||||
prompt = Prompts.insert_new_prompt(user.id, form_data)
|
||||
|
||||
if prompt:
|
||||
|
@ -1,5 +1,5 @@
|
||||
from fastapi import Depends, HTTPException, status, Request
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
@ -27,7 +27,7 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ToolResponse])
|
||||
@router.get("/", response_model=list[ToolResponse])
|
||||
async def get_toolkits(user=Depends(get_verified_user)):
|
||||
toolkits = [toolkit for toolkit in Tools.get_tools()]
|
||||
return toolkits
|
||||
@ -38,7 +38,7 @@ async def get_toolkits(user=Depends(get_verified_user)):
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/export", response_model=List[ToolModel])
|
||||
@router.get("/export", response_model=list[ToolModel])
|
||||
async def get_toolkits(user=Depends(get_admin_user)):
|
||||
toolkits = [toolkit for toolkit in Tools.get_tools()]
|
||||
return toolkits
|
||||
|
@ -1,7 +1,7 @@
|
||||
from fastapi import Response, Request
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from typing import Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
@ -39,7 +39,7 @@ router = APIRouter()
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[UserModel])
|
||||
@router.get("/", response_model=list[UserModel])
|
||||
async def get_users(skip: int = 0, limit: int = 50, user=Depends(get_admin_user)):
|
||||
return Users.get_users(skip, limit)
|
||||
|
||||
|
@ -17,7 +17,7 @@ from utils.misc import calculate_sha256, get_gravatar_url
|
||||
|
||||
from config import OLLAMA_BASE_URLS, DATA_DIR, UPLOAD_DIR, ENABLE_ADMIN_EXPORT
|
||||
from constants import ERROR_MESSAGES
|
||||
from typing import List
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@ -57,7 +57,7 @@ async def get_html_from_markdown(
|
||||
|
||||
class ChatForm(BaseModel):
|
||||
title: str
|
||||
messages: List[dict]
|
||||
messages: list[dict]
|
||||
|
||||
|
||||
@router.post("/pdf")
|
||||
|
@ -1,6 +1,8 @@
|
||||
from importlib import util
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from config import TOOLS_DIR, FUNCTIONS_DIR
|
||||
|
||||
@ -52,6 +54,7 @@ def load_toolkit_module_by_id(toolkit_id):
|
||||
frontmatter = extract_frontmatter(toolkit_path)
|
||||
|
||||
try:
|
||||
install_frontmatter_requirements(frontmatter.get("requirements", ""))
|
||||
spec.loader.exec_module(module)
|
||||
print(f"Loaded module: {module.__name__}")
|
||||
if hasattr(module, "Tools"):
|
||||
@ -73,6 +76,7 @@ def load_function_module_by_id(function_id):
|
||||
frontmatter = extract_frontmatter(function_path)
|
||||
|
||||
try:
|
||||
install_frontmatter_requirements(frontmatter.get("requirements", ""))
|
||||
spec.loader.exec_module(module)
|
||||
print(f"Loaded module: {module.__name__}")
|
||||
if hasattr(module, "Pipe"):
|
||||
@ -88,3 +92,13 @@ def load_function_module_by_id(function_id):
|
||||
# Move the file to the error folder
|
||||
os.rename(function_path, f"{function_path}.error")
|
||||
raise e
|
||||
|
||||
|
||||
def install_frontmatter_requirements(requirements):
|
||||
if requirements:
|
||||
req_list = [req.strip() for req in requirements.split(",")]
|
||||
for req in req_list:
|
||||
print(f"Installing requirement: {req}")
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", req])
|
||||
else:
|
||||
print("No requirements found in frontmatter.")
|
||||
|
@ -104,7 +104,7 @@ ENV = os.environ.get("ENV", "dev")
|
||||
|
||||
try:
|
||||
PACKAGE_DATA = json.loads((BASE_DIR / "package.json").read_text())
|
||||
except:
|
||||
except Exception:
|
||||
try:
|
||||
PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")}
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
@ -137,7 +137,7 @@ try:
|
||||
with open(str(changelog_path.absolute()), "r", encoding="utf8") as file:
|
||||
changelog_content = file.read()
|
||||
|
||||
except:
|
||||
except Exception:
|
||||
changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
|
||||
|
||||
|
||||
@ -202,12 +202,12 @@ if RESET_CONFIG_ON_START:
|
||||
os.remove(f"{DATA_DIR}/config.json")
|
||||
with open(f"{DATA_DIR}/config.json", "w") as f:
|
||||
f.write("{}")
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
CONFIG_DATA = json.loads((DATA_DIR / "config.json").read_text())
|
||||
except:
|
||||
except Exception:
|
||||
CONFIG_DATA = {}
|
||||
|
||||
|
||||
@ -433,6 +433,12 @@ OAUTH_PICTURE_CLAIM = PersistentConfig(
|
||||
os.environ.get("OAUTH_PICTURE_CLAIM", "picture"),
|
||||
)
|
||||
|
||||
OAUTH_EMAIL_CLAIM = PersistentConfig(
|
||||
"OAUTH_EMAIL_CLAIM",
|
||||
"oauth.oidc.email_claim",
|
||||
os.environ.get("OAUTH_EMAIL_CLAIM", "email"),
|
||||
)
|
||||
|
||||
|
||||
def load_oauth_providers():
|
||||
OAUTH_PROVIDERS.clear()
|
||||
@ -641,7 +647,7 @@ if AIOHTTP_CLIENT_TIMEOUT == "":
|
||||
else:
|
||||
try:
|
||||
AIOHTTP_CLIENT_TIMEOUT = int(AIOHTTP_CLIENT_TIMEOUT)
|
||||
except:
|
||||
except Exception:
|
||||
AIOHTTP_CLIENT_TIMEOUT = 300
|
||||
|
||||
|
||||
@ -721,7 +727,7 @@ try:
|
||||
OPENAI_API_KEY = OPENAI_API_KEYS.value[
|
||||
OPENAI_API_BASE_URLS.value.index("https://api.openai.com/v1")
|
||||
]
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
|
||||
@ -1037,7 +1043,7 @@ RAG_EMBEDDING_MODEL = PersistentConfig(
|
||||
"rag.embedding_model",
|
||||
os.environ.get("RAG_EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"),
|
||||
)
|
||||
log.info(f"Embedding model set: {RAG_EMBEDDING_MODEL.value}"),
|
||||
log.info(f"Embedding model set: {RAG_EMBEDDING_MODEL.value}")
|
||||
|
||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE = (
|
||||
os.environ.get("RAG_EMBEDDING_MODEL_AUTO_UPDATE", "").lower() == "true"
|
||||
@ -1059,7 +1065,7 @@ RAG_RERANKING_MODEL = PersistentConfig(
|
||||
os.environ.get("RAG_RERANKING_MODEL", ""),
|
||||
)
|
||||
if RAG_RERANKING_MODEL.value != "":
|
||||
log.info(f"Reranking model set: {RAG_RERANKING_MODEL.value}"),
|
||||
log.info(f"Reranking model set: {RAG_RERANKING_MODEL.value}")
|
||||
|
||||
RAG_RERANKING_MODEL_AUTO_UPDATE = (
|
||||
os.environ.get("RAG_RERANKING_MODEL_AUTO_UPDATE", "").lower() == "true"
|
||||
|
@ -51,7 +51,7 @@ from apps.webui.internal.db import Session
|
||||
|
||||
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from apps.webui.models.auths import Auths
|
||||
from apps.webui.models.models import Models
|
||||
@ -1883,7 +1883,7 @@ async def get_pipeline_valves(
|
||||
res = r.json()
|
||||
if "detail" in res:
|
||||
detail = res["detail"]
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise HTTPException(
|
||||
@ -2027,7 +2027,7 @@ async def get_model_filter_config(user=Depends(get_admin_user)):
|
||||
|
||||
class ModelFilterConfigForm(BaseModel):
|
||||
enabled: bool
|
||||
models: List[str]
|
||||
models: list[str]
|
||||
|
||||
|
||||
@app.post("/api/config/model/filter")
|
||||
@ -2158,7 +2158,8 @@ async def oauth_callback(provider: str, request: Request, response: Response):
|
||||
log.warning(f"OAuth callback failed, sub is missing: {user_data}")
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED)
|
||||
provider_sub = f"{provider}@{sub}"
|
||||
email = user_data.get("email", "").lower()
|
||||
email_claim = webui_app.state.config.OAUTH_EMAIL_CLAIM
|
||||
email = user_data.get(email_claim, "").lower()
|
||||
# We currently mandate that email addresses are provided
|
||||
if not email:
|
||||
log.warning(f"OAuth callback failed, email is missing: {user_data}")
|
||||
|
@ -11,7 +11,7 @@ python-jose==3.3.0
|
||||
passlib[bcrypt]==1.7.4
|
||||
|
||||
requests==2.32.3
|
||||
aiohttp==3.9.5
|
||||
aiohttp==3.10.2
|
||||
|
||||
sqlalchemy==2.0.31
|
||||
alembic==1.13.2
|
||||
@ -34,12 +34,12 @@ anthropic
|
||||
google-generativeai==0.7.2
|
||||
tiktoken
|
||||
|
||||
langchain==0.2.11
|
||||
langchain==0.2.12
|
||||
langchain-community==0.2.10
|
||||
langchain-chroma==0.1.2
|
||||
|
||||
fake-useragent==1.5.1
|
||||
chromadb==0.5.4
|
||||
chromadb==0.5.5
|
||||
sentence-transformers==3.0.1
|
||||
pypdf==4.3.1
|
||||
docx2txt==0.8
|
||||
@ -62,11 +62,11 @@ rank-bm25==0.2.2
|
||||
|
||||
faster-whisper==1.0.2
|
||||
|
||||
PyJWT[crypto]==2.8.0
|
||||
PyJWT[crypto]==2.9.0
|
||||
authlib==1.3.1
|
||||
|
||||
black==24.8.0
|
||||
langfuse==2.39.2
|
||||
langfuse==2.43.3
|
||||
youtube-transcript-api==0.6.2
|
||||
pytube==15.0.0
|
||||
|
||||
@ -76,5 +76,5 @@ duckduckgo-search~=6.2.1
|
||||
|
||||
## Tests
|
||||
docker~=7.1.0
|
||||
pytest~=8.2.2
|
||||
pytest~=8.3.2
|
||||
pytest-docker~=3.1.1
|
||||
|
@ -30,7 +30,6 @@ if [[ "${USE_CUDA_DOCKER,,}" == "true" ]]; then
|
||||
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
|
||||
fi
|
||||
|
||||
|
||||
# Check if SPACE_ID is set, if so, configure for space
|
||||
if [ -n "$SPACE_ID" ]; then
|
||||
echo "Configuring for HuggingFace Space deployment"
|
||||
|
@ -2,14 +2,14 @@ from pathlib import Path
|
||||
import hashlib
|
||||
import re
|
||||
from datetime import timedelta
|
||||
from typing import Optional, List, Tuple
|
||||
from typing import Optional, Callable
|
||||
import uuid
|
||||
import time
|
||||
|
||||
from utils.task import prompt_template
|
||||
|
||||
|
||||
def get_last_user_message_item(messages: List[dict]) -> Optional[dict]:
|
||||
def get_last_user_message_item(messages: list[dict]) -> Optional[dict]:
|
||||
for message in reversed(messages):
|
||||
if message["role"] == "user":
|
||||
return message
|
||||
@ -26,7 +26,7 @@ def get_content_from_message(message: dict) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def get_last_user_message(messages: List[dict]) -> Optional[str]:
|
||||
def get_last_user_message(messages: list[dict]) -> Optional[str]:
|
||||
message = get_last_user_message_item(messages)
|
||||
if message is None:
|
||||
return None
|
||||
@ -34,31 +34,31 @@ def get_last_user_message(messages: List[dict]) -> Optional[str]:
|
||||
return get_content_from_message(message)
|
||||
|
||||
|
||||
def get_last_assistant_message(messages: List[dict]) -> Optional[str]:
|
||||
def get_last_assistant_message(messages: list[dict]) -> Optional[str]:
|
||||
for message in reversed(messages):
|
||||
if message["role"] == "assistant":
|
||||
return get_content_from_message(message)
|
||||
return None
|
||||
|
||||
|
||||
def get_system_message(messages: List[dict]) -> Optional[dict]:
|
||||
def get_system_message(messages: list[dict]) -> Optional[dict]:
|
||||
for message in messages:
|
||||
if message["role"] == "system":
|
||||
return message
|
||||
return None
|
||||
|
||||
|
||||
def remove_system_message(messages: List[dict]) -> List[dict]:
|
||||
def remove_system_message(messages: list[dict]) -> list[dict]:
|
||||
return [message for message in messages if message["role"] != "system"]
|
||||
|
||||
|
||||
def pop_system_message(messages: List[dict]) -> Tuple[Optional[dict], List[dict]]:
|
||||
def pop_system_message(messages: list[dict]) -> tuple[Optional[dict], list[dict]]:
|
||||
return get_system_message(messages), remove_system_message(messages)
|
||||
|
||||
|
||||
def prepend_to_first_user_message_content(
|
||||
content: str, messages: List[dict]
|
||||
) -> List[dict]:
|
||||
content: str, messages: list[dict]
|
||||
) -> list[dict]:
|
||||
for message in messages:
|
||||
if message["role"] == "user":
|
||||
if isinstance(message["content"], list):
|
||||
@ -71,7 +71,7 @@ def prepend_to_first_user_message_content(
|
||||
return messages
|
||||
|
||||
|
||||
def add_or_update_system_message(content: str, messages: List[dict]):
|
||||
def add_or_update_system_message(content: str, messages: list[dict]):
|
||||
"""
|
||||
Adds a new system message at the beginning of the messages list
|
||||
or updates the existing system message at the beginning.
|
||||
@ -135,10 +135,21 @@ def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> di
|
||||
|
||||
|
||||
# inplace function: form_data is modified
|
||||
def apply_model_params_to_body(params: dict, form_data: dict) -> dict:
|
||||
def apply_model_params_to_body(
|
||||
params: dict, form_data: dict, mappings: dict[str, Callable]
|
||||
) -> dict:
|
||||
if not params:
|
||||
return form_data
|
||||
|
||||
for key, cast_func in mappings.items():
|
||||
if (value := params.get(key)) is not None:
|
||||
form_data[key] = cast_func(value)
|
||||
|
||||
return form_data
|
||||
|
||||
|
||||
# inplace function: form_data is modified
|
||||
def apply_model_params_to_body_openai(params: dict, form_data: dict) -> dict:
|
||||
mappings = {
|
||||
"temperature": float,
|
||||
"top_p": int,
|
||||
@ -147,10 +158,40 @@ def apply_model_params_to_body(params: dict, form_data: dict) -> dict:
|
||||
"seed": lambda x: x,
|
||||
"stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x],
|
||||
}
|
||||
return apply_model_params_to_body(params, form_data, mappings)
|
||||
|
||||
for key, cast_func in mappings.items():
|
||||
if (value := params.get(key)) is not None:
|
||||
form_data[key] = cast_func(value)
|
||||
|
||||
def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict:
|
||||
opts = [
|
||||
"temperature",
|
||||
"top_p",
|
||||
"seed",
|
||||
"mirostat",
|
||||
"mirostat_eta",
|
||||
"mirostat_tau",
|
||||
"num_ctx",
|
||||
"num_batch",
|
||||
"num_keep",
|
||||
"repeat_last_n",
|
||||
"tfs_z",
|
||||
"top_k",
|
||||
"min_p",
|
||||
"use_mmap",
|
||||
"use_mlock",
|
||||
"num_thread",
|
||||
"num_gpu",
|
||||
]
|
||||
mappings = {i: lambda x: x for i in opts}
|
||||
form_data = apply_model_params_to_body(params, form_data, mappings)
|
||||
|
||||
name_differences = {
|
||||
"max_tokens": "num_predict",
|
||||
"frequency_penalty": "repeat_penalty",
|
||||
}
|
||||
|
||||
for key, value in name_differences.items():
|
||||
if (param := params.get(key, None)) is not None:
|
||||
form_data[value] = param
|
||||
|
||||
return form_data
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import inspect
|
||||
from typing import get_type_hints, List, Dict, Any
|
||||
from typing import get_type_hints
|
||||
|
||||
|
||||
def doc_to_dict(docstring):
|
||||
@ -16,7 +16,7 @@ def doc_to_dict(docstring):
|
||||
return ret_dict
|
||||
|
||||
|
||||
def get_tools_specs(tools) -> List[dict]:
|
||||
def get_tools_specs(tools) -> list[dict]:
|
||||
function_list = [
|
||||
{"name": func, "function": getattr(tools, func)}
|
||||
for func in dir(tools)
|
||||
|
@ -38,9 +38,10 @@ describe('Settings', () => {
|
||||
// User's message should be visible
|
||||
cy.get('.chat-user').should('exist');
|
||||
// Wait for the response
|
||||
cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received
|
||||
.find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received
|
||||
.should('exist');
|
||||
// .chat-assistant is created after the first token is received
|
||||
cy.get('.chat-assistant', { timeout: 10_000 }).should('exist');
|
||||
// Generation Info is created after the stop token is received
|
||||
cy.get('div[aria-label="Generation Info"]', { timeout: 120_000 }).should('exist');
|
||||
});
|
||||
|
||||
it('user can share chat', () => {
|
||||
@ -57,21 +58,24 @@ describe('Settings', () => {
|
||||
// User's message should be visible
|
||||
cy.get('.chat-user').should('exist');
|
||||
// Wait for the response
|
||||
cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received
|
||||
.find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received
|
||||
.should('exist');
|
||||
// .chat-assistant is created after the first token is received
|
||||
cy.get('.chat-assistant', { timeout: 10_000 }).should('exist');
|
||||
// Generation Info is created after the stop token is received
|
||||
cy.get('div[aria-label="Generation Info"]', { timeout: 120_000 }).should('exist');
|
||||
// spy on requests
|
||||
const spy = cy.spy();
|
||||
cy.intercept('GET', '/api/v1/chats/*', spy);
|
||||
cy.intercept('POST', '/api/v1/chats/**/share', spy);
|
||||
// Open context menu
|
||||
cy.get('#chat-context-menu-button').click();
|
||||
// Click share button
|
||||
cy.get('#chat-share-button').click();
|
||||
// Check if the share dialog is visible
|
||||
cy.get('#copy-and-share-chat-button').should('exist');
|
||||
cy.wrap({}, { timeout: 5000 }).should(() => {
|
||||
// Check if the request was made twice (once for to replace chat object and once more due to change event)
|
||||
expect(spy).to.be.callCount(2);
|
||||
// Click the copy button
|
||||
cy.get('#copy-and-share-chat-button').click();
|
||||
cy.wrap({}, { timeout: 5_000 }).should(() => {
|
||||
// Check if the share request was made
|
||||
expect(spy).to.be.callCount(1);
|
||||
});
|
||||
});
|
||||
|
||||
@ -89,9 +93,10 @@ describe('Settings', () => {
|
||||
// User's message should be visible
|
||||
cy.get('.chat-user').should('exist');
|
||||
// Wait for the response
|
||||
cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received
|
||||
.find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received
|
||||
.should('exist');
|
||||
// .chat-assistant is created after the first token is received
|
||||
cy.get('.chat-assistant', { timeout: 10_000 }).should('exist');
|
||||
// Generation Info is created after the stop token is received
|
||||
cy.get('div[aria-label="Generation Info"]', { timeout: 120_000 }).should('exist');
|
||||
// Click on the generate image button
|
||||
cy.get('[aria-label="Generate Image"]').click();
|
||||
// Wait for image to be visible
|
||||
|
@ -22,7 +22,6 @@ Noticed something off? Have an idea? Check our [Issues tab](https://github.com/o
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> - **Template Compliance:** Please be aware that failure to follow the provided issue template, or not providing the requested information at all, will likely result in your issue being closed without further consideration. This approach is critical for maintaining the manageability and integrity of issue tracking.
|
||||
>
|
||||
> - **Detail is Key:** To ensure your issue is understood and can be effectively addressed, it's imperative to include comprehensive details. Descriptions should be clear, including steps to reproduce, expected outcomes, and actual results. Lack of sufficient detail may hinder our ability to resolve your issue.
|
||||
|
||||
### 🧭 Scope of Support
|
||||
|
982
package-lock.json
generated
982
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
38
package.json
38
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "open-webui",
|
||||
"version": "0.3.12",
|
||||
"version": "0.3.13",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "npm run pyodide:fetch && vite dev --host",
|
||||
@ -20,30 +20,31 @@
|
||||
"pyodide:fetch": "node scripts/prepare-pyodide.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-auto": "^2.0.0",
|
||||
"@sveltejs/adapter-static": "^2.0.3",
|
||||
"@sveltejs/kit": "^1.30.0",
|
||||
"@tailwindcss/typography": "^0.5.10",
|
||||
"@sveltejs/adapter-auto": "3.2.2",
|
||||
"@sveltejs/adapter-static": "^3.0.2",
|
||||
"@sveltejs/kit": "^2.5.20",
|
||||
"@sveltejs/vite-plugin-svelte": "^3.1.1",
|
||||
"@tailwindcss/typography": "^0.5.13",
|
||||
"@types/bun": "latest",
|
||||
"@typescript-eslint/eslint-plugin": "^6.17.0",
|
||||
"@typescript-eslint/parser": "^6.17.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"cypress": "^13.8.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-cypress": "^3.0.2",
|
||||
"eslint-plugin-svelte": "^2.30.0",
|
||||
"i18next-parser": "^8.13.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-cypress": "^3.4.0",
|
||||
"eslint-plugin-svelte": "^2.43.0",
|
||||
"i18next-parser": "^9.0.1",
|
||||
"postcss": "^8.4.31",
|
||||
"prettier": "^2.8.0",
|
||||
"prettier-plugin-svelte": "^2.10.1",
|
||||
"svelte": "^4.0.5",
|
||||
"svelte-check": "^3.4.3",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-svelte": "^3.2.6",
|
||||
"svelte": "^4.2.18",
|
||||
"svelte-check": "^3.8.5",
|
||||
"svelte-confetti": "^1.3.2",
|
||||
"tailwindcss": "^3.3.3",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^5.0.0",
|
||||
"vite": "^4.4.2",
|
||||
"typescript": "^5.5.4",
|
||||
"vite": "^5.3.5",
|
||||
"vitest": "^1.6.0"
|
||||
},
|
||||
"type": "module",
|
||||
@ -52,7 +53,7 @@
|
||||
"@codemirror/lang-python": "^6.1.6",
|
||||
"@codemirror/theme-one-dark": "^6.1.2",
|
||||
"@pyscript/core": "^0.4.32",
|
||||
"@sveltejs/adapter-node": "^1.3.1",
|
||||
"@sveltejs/adapter-node": "^2.0.0",
|
||||
"async": "^3.2.5",
|
||||
"bits-ui": "^0.19.7",
|
||||
"codemirror": "^6.0.1",
|
||||
@ -69,6 +70,7 @@
|
||||
"js-sha256": "^0.10.1",
|
||||
"katex": "^0.16.9",
|
||||
"marked": "^9.1.0",
|
||||
"marked-katex-extension": "^5.1.1",
|
||||
"mermaid": "^10.9.1",
|
||||
"pyodide": "^0.26.1",
|
||||
"socket.io-client": "^4.2.0",
|
||||
@ -77,5 +79,9 @@
|
||||
"tippy.js": "^6.3.7",
|
||||
"turndown": "^7.2.0",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.13.0 <=21.x.x",
|
||||
"npm": ">=6.0.0"
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "open-webui"
|
||||
description = "Open WebUI (Formerly Ollama WebUI)"
|
||||
description = "Open WebUI"
|
||||
authors = [
|
||||
{ name = "Timothy Jaeryang Baek", email = "tim@openwebui.com" }
|
||||
]
|
||||
@ -19,7 +19,7 @@ dependencies = [
|
||||
"passlib[bcrypt]==1.7.4",
|
||||
|
||||
"requests==2.32.3",
|
||||
"aiohttp==3.9.5",
|
||||
"aiohttp==3.10.2",
|
||||
|
||||
"sqlalchemy==2.0.31",
|
||||
"alembic==1.13.2",
|
||||
@ -41,12 +41,12 @@ dependencies = [
|
||||
"google-generativeai==0.7.2",
|
||||
"tiktoken",
|
||||
|
||||
"langchain==0.2.11",
|
||||
"langchain==0.2.12",
|
||||
"langchain-community==0.2.10",
|
||||
"langchain-chroma==0.1.2",
|
||||
|
||||
"fake-useragent==1.5.1",
|
||||
"chromadb==0.5.4",
|
||||
"chromadb==0.5.5",
|
||||
"sentence-transformers==3.0.1",
|
||||
"pypdf==4.3.1",
|
||||
"docx2txt==0.8",
|
||||
@ -69,11 +69,11 @@ dependencies = [
|
||||
|
||||
"faster-whisper==1.0.2",
|
||||
|
||||
"PyJWT[crypto]==2.8.0",
|
||||
"PyJWT[crypto]==2.9.0",
|
||||
"authlib==1.3.1",
|
||||
|
||||
"black==24.8.0",
|
||||
"langfuse==2.39.2",
|
||||
"langfuse==2.43.3",
|
||||
"youtube-transcript-api==0.6.2",
|
||||
"pytube==15.0.0",
|
||||
|
||||
|
@ -10,7 +10,9 @@
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
aiohttp==3.9.5
|
||||
aiohappyeyeballs==2.3.5
|
||||
# via aiohttp
|
||||
aiohttp==3.10.2
|
||||
# via langchain
|
||||
# via langchain-community
|
||||
# via open-webui
|
||||
@ -84,9 +86,9 @@ chardet==5.2.0
|
||||
charset-normalizer==3.3.2
|
||||
# via requests
|
||||
# via unstructured-client
|
||||
chroma-hnswlib==0.7.5
|
||||
chroma-hnswlib==0.7.6
|
||||
# via chromadb
|
||||
chromadb==0.5.4
|
||||
chromadb==0.5.5
|
||||
# via langchain-chroma
|
||||
# via open-webui
|
||||
click==8.1.7
|
||||
@ -269,7 +271,7 @@ jsonpointer==2.4
|
||||
# via jsonpatch
|
||||
kubernetes==29.0.0
|
||||
# via chromadb
|
||||
langchain==0.2.11
|
||||
langchain==0.2.12
|
||||
# via langchain-community
|
||||
# via open-webui
|
||||
langchain-chroma==0.1.2
|
||||
@ -285,7 +287,7 @@ langchain-text-splitters==0.2.0
|
||||
# via langchain
|
||||
langdetect==1.0.9
|
||||
# via unstructured
|
||||
langfuse==2.39.2
|
||||
langfuse==2.43.3
|
||||
# via open-webui
|
||||
langsmith==0.1.96
|
||||
# via langchain
|
||||
@ -491,7 +493,7 @@ pydub==0.25.1
|
||||
# via open-webui
|
||||
pygments==2.18.0
|
||||
# via rich
|
||||
pyjwt==2.8.0
|
||||
pyjwt==2.9.0
|
||||
# via open-webui
|
||||
pymongo==4.8.0
|
||||
# via open-webui
|
||||
|
@ -10,7 +10,9 @@
|
||||
# universal: false
|
||||
|
||||
-e file:.
|
||||
aiohttp==3.9.5
|
||||
aiohappyeyeballs==2.3.5
|
||||
# via aiohttp
|
||||
aiohttp==3.10.2
|
||||
# via langchain
|
||||
# via langchain-community
|
||||
# via open-webui
|
||||
@ -84,9 +86,9 @@ chardet==5.2.0
|
||||
charset-normalizer==3.3.2
|
||||
# via requests
|
||||
# via unstructured-client
|
||||
chroma-hnswlib==0.7.5
|
||||
chroma-hnswlib==0.7.6
|
||||
# via chromadb
|
||||
chromadb==0.5.4
|
||||
chromadb==0.5.5
|
||||
# via langchain-chroma
|
||||
# via open-webui
|
||||
click==8.1.7
|
||||
@ -269,7 +271,7 @@ jsonpointer==2.4
|
||||
# via jsonpatch
|
||||
kubernetes==29.0.0
|
||||
# via chromadb
|
||||
langchain==0.2.11
|
||||
langchain==0.2.12
|
||||
# via langchain-community
|
||||
# via open-webui
|
||||
langchain-chroma==0.1.2
|
||||
@ -285,7 +287,7 @@ langchain-text-splitters==0.2.0
|
||||
# via langchain
|
||||
langdetect==1.0.9
|
||||
# via unstructured
|
||||
langfuse==2.39.2
|
||||
langfuse==2.43.3
|
||||
# via open-webui
|
||||
langsmith==0.1.96
|
||||
# via langchain
|
||||
@ -491,7 +493,7 @@ pydub==0.25.1
|
||||
# via open-webui
|
||||
pygments==2.18.0
|
||||
# via rich
|
||||
pyjwt==2.8.0
|
||||
pyjwt==2.9.0
|
||||
# via open-webui
|
||||
pymongo==4.8.0
|
||||
# via open-webui
|
||||
|
@ -1,4 +1,4 @@
|
||||
<!DOCTYPE html>
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
|
@ -69,6 +69,7 @@ type ChatCompletedForm = {
|
||||
model: string;
|
||||
messages: string[];
|
||||
chat_id: string;
|
||||
session_id: string;
|
||||
};
|
||||
|
||||
export const chatCompleted = async (token: string, body: ChatCompletedForm) => {
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { OLLAMA_API_BASE_URL } from '$lib/constants';
|
||||
import { titleGenerationTemplate } from '$lib/utils';
|
||||
|
||||
export const getOllamaConfig = async (token: string = '') => {
|
||||
let error = null;
|
||||
@ -203,55 +202,6 @@ export const getOllamaModels = async (token: string = '') => {
|
||||
});
|
||||
};
|
||||
|
||||
// TODO: migrate to backend
|
||||
export const generateTitle = async (
|
||||
token: string = '',
|
||||
template: string,
|
||||
model: string,
|
||||
prompt: string
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
template = titleGenerationTemplate(template, prompt);
|
||||
|
||||
console.log(template);
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
prompt: template,
|
||||
stream: false,
|
||||
options: {
|
||||
// Restrict the number of tokens generated to 50
|
||||
num_predict: 50
|
||||
}
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res?.response.replace(/["']/g, '') ?? 'New Chat';
|
||||
};
|
||||
|
||||
export const generatePrompt = async (token: string = '', model: string, conversation: string) => {
|
||||
let error = null;
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
import { OPENAI_API_BASE_URL } from '$lib/constants';
|
||||
import { titleGenerationTemplate } from '$lib/utils';
|
||||
import { type Model, models, settings } from '$lib/stores';
|
||||
|
||||
export const getOpenAIConfig = async (token: string = '') => {
|
||||
let error = null;
|
||||
@ -260,7 +258,7 @@ export const getOpenAIModelsDirect = async (
|
||||
throw error;
|
||||
}
|
||||
|
||||
const models = Array.isArray(res) ? res : res?.data ?? null;
|
||||
const models = Array.isArray(res) ? res : (res?.data ?? null);
|
||||
|
||||
return models
|
||||
.map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
|
||||
@ -330,126 +328,3 @@ export const synthesizeOpenAISpeech = async (
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const generateTitle = async (
|
||||
token: string = '',
|
||||
template: string,
|
||||
model: string,
|
||||
prompt: string,
|
||||
chat_id?: string,
|
||||
url: string = OPENAI_API_BASE_URL
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
template = titleGenerationTemplate(template, prompt);
|
||||
|
||||
console.log(template);
|
||||
|
||||
const res = await fetch(`${url}/chat/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: template
|
||||
}
|
||||
],
|
||||
stream: false,
|
||||
// Restricting the max tokens to 50 to avoid long titles
|
||||
max_tokens: 50,
|
||||
...(chat_id && { chat_id: chat_id }),
|
||||
title: true
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? 'New Chat';
|
||||
};
|
||||
|
||||
export const generateSearchQuery = async (
|
||||
token: string = '',
|
||||
model: string,
|
||||
previousMessages: string[],
|
||||
prompt: string,
|
||||
url: string = OPENAI_API_BASE_URL
|
||||
): Promise<string | undefined> => {
|
||||
let error = null;
|
||||
|
||||
// TODO: Allow users to specify the prompt
|
||||
// Get the current date in the format "January 20, 2024"
|
||||
const currentDate = new Intl.DateTimeFormat('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: '2-digit'
|
||||
}).format(new Date());
|
||||
|
||||
const res = await fetch(`${url}/chat/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
// Few shot prompting
|
||||
messages: [
|
||||
{
|
||||
role: 'assistant',
|
||||
content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.`
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt
|
||||
}
|
||||
// {
|
||||
// role: 'user',
|
||||
// content:
|
||||
// (previousMessages.length > 0
|
||||
// ? `Previous Questions:\n${previousMessages.join('\n')}\n\n`
|
||||
// : '') + `Current Question: ${prompt}`
|
||||
// }
|
||||
],
|
||||
stream: false,
|
||||
// Restricting the max tokens to 30 to avoid long search queries
|
||||
max_tokens: 30
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined;
|
||||
};
|
||||
|
@ -739,7 +739,7 @@
|
||||
? await getAndUpdateUserLocation(localStorage.token)
|
||||
: undefined
|
||||
)}${
|
||||
responseMessage?.userContext ?? null
|
||||
(responseMessage?.userContext ?? null)
|
||||
? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
|
||||
: ''
|
||||
}`
|
||||
@ -811,7 +811,7 @@
|
||||
options: {
|
||||
...(params ?? $settings.params ?? {}),
|
||||
stop:
|
||||
params?.stop ?? $settings?.params?.stop ?? undefined
|
||||
(params?.stop ?? $settings?.params?.stop ?? undefined)
|
||||
? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
|
||||
(str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||
)
|
||||
@ -877,6 +877,10 @@
|
||||
} else {
|
||||
responseMessage.content += data.message.content;
|
||||
|
||||
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
|
||||
navigator.vibrate(5);
|
||||
}
|
||||
|
||||
const sentences = extractSentencesForAudio(responseMessage.content);
|
||||
sentences.pop();
|
||||
|
||||
@ -1056,7 +1060,7 @@
|
||||
stream: true,
|
||||
model: model.id,
|
||||
stream_options:
|
||||
model.info?.meta?.capabilities?.usage ?? false
|
||||
(model.info?.meta?.capabilities?.usage ?? false)
|
||||
? {
|
||||
include_usage: true
|
||||
}
|
||||
@ -1072,7 +1076,7 @@
|
||||
? await getAndUpdateUserLocation(localStorage.token)
|
||||
: undefined
|
||||
)}${
|
||||
responseMessage?.userContext ?? null
|
||||
(responseMessage?.userContext ?? null)
|
||||
? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
|
||||
: ''
|
||||
}`
|
||||
@ -1092,7 +1096,7 @@
|
||||
text:
|
||||
arr.length - 1 !== idx
|
||||
? message.content
|
||||
: message?.raContent ?? message.content
|
||||
: (message?.raContent ?? message.content)
|
||||
},
|
||||
...message.files
|
||||
.filter((file) => file.type === 'image')
|
||||
@ -1108,12 +1112,12 @@
|
||||
content:
|
||||
arr.length - 1 !== idx
|
||||
? message.content
|
||||
: message?.raContent ?? message.content
|
||||
: (message?.raContent ?? message.content)
|
||||
})
|
||||
})),
|
||||
seed: params?.seed ?? $settings?.params?.seed ?? undefined,
|
||||
stop:
|
||||
params?.stop ?? $settings?.params?.stop ?? undefined
|
||||
(params?.stop ?? $settings?.params?.stop ?? undefined)
|
||||
? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
|
||||
(str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||
)
|
||||
@ -1177,6 +1181,10 @@
|
||||
} else {
|
||||
responseMessage.content += value;
|
||||
|
||||
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
|
||||
navigator.vibrate(5);
|
||||
}
|
||||
|
||||
const sentences = extractSentencesForAudio(responseMessage.content);
|
||||
sentences.pop();
|
||||
|
||||
|
@ -9,6 +9,8 @@
|
||||
import FileItem from '$lib/components/common/FileItem.svelte';
|
||||
import Collapsible from '$lib/components/common/Collapsible.svelte';
|
||||
|
||||
import { user } from '$lib/stores';
|
||||
|
||||
export let models = [];
|
||||
|
||||
export let chatFiles = [];
|
||||
@ -78,7 +80,7 @@
|
||||
<Collapsible title={$i18n.t('Advanced Params')} open={true}>
|
||||
<div class="text-sm mt-1.5" slot="content">
|
||||
<div>
|
||||
<AdvancedParams bind:params />
|
||||
<AdvancedParams admin={$user?.role === 'admin'} bind:params />
|
||||
</div>
|
||||
</div>
|
||||
</Collapsible>
|
||||
|
@ -305,7 +305,7 @@
|
||||
{#each messages as message, messageIdx}
|
||||
<div class=" w-full {messageIdx === messages.length - 1 ? ' pb-12' : ''}">
|
||||
<div
|
||||
class="flex flex-col justify-between px-5 mb-3 {$settings?.widescreenMode ?? null
|
||||
class="flex flex-col justify-between px-5 mb-3 {($settings?.widescreenMode ?? null)
|
||||
? 'max-w-full'
|
||||
: 'max-w-5xl'} mx-auto rounded-lg group"
|
||||
>
|
||||
@ -317,10 +317,10 @@
|
||||
{message}
|
||||
isFirstMessage={messageIdx === 0}
|
||||
siblings={message.parentId !== null
|
||||
? history.messages[message.parentId]?.childrenIds ?? []
|
||||
: Object.values(history.messages)
|
||||
? (history.messages[message.parentId]?.childrenIds ?? [])
|
||||
: (Object.values(history.messages)
|
||||
.filter((message) => message.parentId === null)
|
||||
.map((message) => message.id) ?? []}
|
||||
.map((message) => message.id) ?? [])}
|
||||
{confirmEditMessage}
|
||||
{showPreviousMessage}
|
||||
{showNextMessage}
|
||||
|
@ -1,17 +1,25 @@
|
||||
<script lang="ts">
|
||||
import Spinner from '$lib/components/common/Spinner.svelte';
|
||||
import { copyToClipboard } from '$lib/utils';
|
||||
import hljs from 'highlight.js';
|
||||
import 'highlight.js/styles/github-dark.min.css';
|
||||
import { loadPyodide } from 'pyodide';
|
||||
import { onMount, tick } from 'svelte';
|
||||
import mermaid from 'mermaid';
|
||||
|
||||
import { getContext, getAllContexts, onMount } from 'svelte';
|
||||
import { copyToClipboard } from '$lib/utils';
|
||||
|
||||
import 'highlight.js/styles/github-dark.min.css';
|
||||
|
||||
import PyodideWorker from '$lib/workers/pyodide.worker?worker';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
export let id = '';
|
||||
|
||||
export let token;
|
||||
export let lang = '';
|
||||
export let code = '';
|
||||
|
||||
let mermaidHtml = null;
|
||||
|
||||
let highlightedCode = null;
|
||||
let executing = false;
|
||||
|
||||
@ -204,7 +212,18 @@ __builtins__.input = input`);
|
||||
};
|
||||
|
||||
let debounceTimeout;
|
||||
|
||||
$: if (code) {
|
||||
if (lang === 'mermaid' && (token?.raw ?? '').endsWith('```')) {
|
||||
(async () => {
|
||||
try {
|
||||
const { svg } = await mermaid.render(`mermaid-${id}`, code);
|
||||
mermaidHtml = svg;
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
})();
|
||||
} else {
|
||||
// Function to perform the code highlighting
|
||||
const highlightCode = () => {
|
||||
highlightedCode = hljs.highlightAuto(code, hljs.getLanguage(lang)?.aliases).value || code;
|
||||
@ -212,13 +231,33 @@ __builtins__.input = input`);
|
||||
|
||||
// Clear the previous timeout if it exists
|
||||
clearTimeout(debounceTimeout);
|
||||
|
||||
// Set a new timeout to debounce the code highlighting
|
||||
debounceTimeout = setTimeout(highlightCode, 10);
|
||||
}
|
||||
}
|
||||
|
||||
onMount(async () => {
|
||||
await mermaid.initialize({ startOnLoad: true });
|
||||
|
||||
if (lang === 'mermaid' && (token?.raw ?? '').endsWith('```')) {
|
||||
try {
|
||||
const { svg } = await mermaid.render(`mermaid-${id}`, code);
|
||||
mermaidHtml = svg;
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="my-2" dir="ltr">
|
||||
{#if lang === 'mermaid'}
|
||||
{#if mermaidHtml}
|
||||
{@html mermaidHtml}
|
||||
{:else}
|
||||
<pre class=" mermaid-{id}">{code}</pre>
|
||||
{/if}
|
||||
{:else}
|
||||
<div
|
||||
class="flex justify-between bg-[#202123] text-white text-xs px-4 pt-1 pb-0.5 rounded-t-lg overflow-x-auto"
|
||||
>
|
||||
@ -233,12 +272,12 @@ __builtins__.input = input`);
|
||||
class="copy-code-button bg-none border-none p-1"
|
||||
on:click={() => {
|
||||
executePython(code);
|
||||
}}>Run</button
|
||||
}}>{$i18n.t('Run')}</button
|
||||
>
|
||||
{/if}
|
||||
{/if}
|
||||
<button class="copy-code-button bg-none border-none p-1" on:click={copyCode}
|
||||
>{copied ? 'Copied' : 'Copy Code'}</button
|
||||
>{copied ? $i18n.t('Copied') : $i18n.t('Copy Code')}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
@ -270,4 +309,5 @@ __builtins__.input = input`);
|
||||
<div class="text-sm">{stdout || stderr || result}</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
9
src/lib/components/chat/Messages/KatexRenderer.svelte
Normal file
9
src/lib/components/chat/Messages/KatexRenderer.svelte
Normal file
@ -0,0 +1,9 @@
|
||||
<script lang="ts">
|
||||
import katex from 'katex';
|
||||
import 'katex/contrib/mhchem';
|
||||
|
||||
export let content: string;
|
||||
export let displayMode: boolean = false;
|
||||
</script>
|
||||
|
||||
{@html katex.renderToString(content, { displayMode, throwOnError: false })}
|
@ -1,8 +1,11 @@
|
||||
<script lang="ts">
|
||||
import type { Token } from 'marked';
|
||||
import { unescapeHtml } from '$lib/utils';
|
||||
import { revertSanitizedResponseContent, unescapeHtml } from '$lib/utils';
|
||||
import { onMount } from 'svelte';
|
||||
import Image from '$lib/components/common/Image.svelte';
|
||||
|
||||
import KatexRenderer from './KatexRenderer.svelte';
|
||||
|
||||
export let id: string;
|
||||
export let tokens: Token[];
|
||||
</script>
|
||||
@ -25,14 +28,18 @@
|
||||
<svelte:self id={`${id}-em`} tokens={token.tokens} />
|
||||
</em>
|
||||
{:else if token.type === 'codespan'}
|
||||
<code class="codespan">{unescapeHtml(token.text.replaceAll('&', '&'))}</code>
|
||||
<code class="codespan">{revertSanitizedResponseContent(token.raw)}</code>
|
||||
{:else if token.type === 'br'}
|
||||
<br />
|
||||
{:else if token.type === 'del'}
|
||||
<del>
|
||||
<svelte:self id={`${id}-del`} tokens={token.tokens} />
|
||||
</del>
|
||||
{:else if token.type === 'inlineKatex'}
|
||||
{#if token.text}
|
||||
<KatexRenderer content={revertSanitizedResponseContent(token.text)} displayMode={false} />
|
||||
{/if}
|
||||
{:else if token.type === 'text'}
|
||||
{unescapeHtml(token.text)}
|
||||
{token.raw}
|
||||
{/if}
|
||||
{/each}
|
||||
|
@ -1,137 +1,132 @@
|
||||
<script lang="ts">
|
||||
import { marked } from 'marked';
|
||||
import { onMount } from 'svelte';
|
||||
import type { Token } from 'marked';
|
||||
import { revertSanitizedResponseContent, unescapeHtml } from '$lib/utils';
|
||||
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
import Image from '$lib/components/common/Image.svelte';
|
||||
import CodeBlock from '$lib/components/chat/Messages/CodeBlock.svelte';
|
||||
|
||||
import MarkdownInlineTokens from '$lib/components/chat/Messages/MarkdownInlineTokens.svelte';
|
||||
import KatexRenderer from './KatexRenderer.svelte';
|
||||
|
||||
export let id: string;
|
||||
export let tokens: Token[];
|
||||
export let top = true;
|
||||
|
||||
let containerElement;
|
||||
|
||||
const headerComponent = (depth: number) => {
|
||||
return 'h' + depth;
|
||||
};
|
||||
|
||||
const renderer = new marked.Renderer();
|
||||
// For code blocks with simple backticks
|
||||
renderer.codespan = (code) => {
|
||||
return `<code class="codespan">${code.replaceAll('&', '&')}</code>`;
|
||||
};
|
||||
|
||||
let codes = [];
|
||||
renderer.code = (code, lang) => {
|
||||
codes.push({
|
||||
code: code,
|
||||
lang: lang
|
||||
});
|
||||
codes = codes;
|
||||
const codeId = `${id}-${codes.length}`;
|
||||
|
||||
const interval = setInterval(() => {
|
||||
const codeElement = document.getElementById(`code-${codeId}`);
|
||||
if (codeElement) {
|
||||
clearInterval(interval);
|
||||
// If the code is already loaded, don't load it again
|
||||
if (codeElement.innerHTML) {
|
||||
return;
|
||||
}
|
||||
|
||||
new CodeBlock({
|
||||
target: codeElement,
|
||||
props: {
|
||||
id: `${id}-${codes.length}`,
|
||||
lang: lang,
|
||||
code: revertSanitizedResponseContent(code)
|
||||
},
|
||||
hydrate: true,
|
||||
$$inline: true
|
||||
});
|
||||
}
|
||||
}, 10);
|
||||
|
||||
return `<div id="code-${id}-${codes.length}"></div>`;
|
||||
};
|
||||
|
||||
let images = [];
|
||||
renderer.image = (href, title, text) => {
|
||||
images.push({
|
||||
href: href,
|
||||
title: title,
|
||||
text: text
|
||||
});
|
||||
images = images;
|
||||
|
||||
const imageId = `${id}-${images.length}`;
|
||||
const interval = setInterval(() => {
|
||||
const imageElement = document.getElementById(`image-${imageId}`);
|
||||
if (imageElement) {
|
||||
clearInterval(interval);
|
||||
|
||||
// If the image is already loaded, don't load it again
|
||||
if (imageElement.innerHTML) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('image', href, text);
|
||||
new Image({
|
||||
target: imageElement,
|
||||
props: {
|
||||
src: href,
|
||||
alt: text
|
||||
},
|
||||
$$inline: true
|
||||
});
|
||||
}
|
||||
}, 10);
|
||||
|
||||
return `<div id="image-${id}-${images.length}"></div>`;
|
||||
};
|
||||
|
||||
// Open all links in a new tab/window (from https://github.com/markedjs/marked/issues/655#issuecomment-383226346)
|
||||
const origLinkRenderer = renderer.link;
|
||||
renderer.link = (href, title, text) => {
|
||||
const html = origLinkRenderer.call(renderer, href, title, text);
|
||||
return html.replace(/^<a /, '<a target="_blank" rel="nofollow" ');
|
||||
};
|
||||
|
||||
const { extensions, ...defaults } = marked.getDefaults() as marked.MarkedOptions & {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
extensions: any;
|
||||
};
|
||||
|
||||
$: if (tokens) {
|
||||
images = [];
|
||||
codes = [];
|
||||
}
|
||||
</script>
|
||||
|
||||
<div bind:this={containerElement} class="flex flex-col">
|
||||
{#each tokens as token, tokenIdx (`${id}-${tokenIdx}`)}
|
||||
{#if token.type === 'code'}
|
||||
{#if token.lang === 'mermaid'}
|
||||
<pre class="mermaid">{revertSanitizedResponseContent(token.text)}</pre>
|
||||
{:else}
|
||||
<!-- {JSON.stringify(tokens)} -->
|
||||
{#each tokens as token, tokenIdx}
|
||||
{#if token.type === 'hr'}
|
||||
<hr />
|
||||
{:else if token.type === 'heading'}
|
||||
<svelte:element this={headerComponent(token.depth)}>
|
||||
<MarkdownInlineTokens id={`${id}-${tokenIdx}-h`} tokens={token.tokens} />
|
||||
</svelte:element>
|
||||
{:else if token.type === 'code'}
|
||||
<CodeBlock
|
||||
id={`${id}-${tokenIdx}`}
|
||||
{token}
|
||||
lang={token?.lang ?? ''}
|
||||
code={revertSanitizedResponseContent(token?.text ?? '')}
|
||||
/>
|
||||
{/if}
|
||||
{:else}
|
||||
{@html marked.parse(token.raw, {
|
||||
...defaults,
|
||||
gfm: true,
|
||||
breaks: true,
|
||||
renderer
|
||||
})}
|
||||
{/if}
|
||||
{:else if token.type === 'table'}
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
{#each token.header as header, headerIdx}
|
||||
<th style={token.align[headerIdx] ? '' : `text-align: ${token.align[headerIdx]}`}>
|
||||
<MarkdownInlineTokens
|
||||
id={`${id}-${tokenIdx}-header-${headerIdx}`}
|
||||
tokens={header.tokens}
|
||||
/>
|
||||
</th>
|
||||
{/each}
|
||||
</div>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each token.rows as row, rowIdx}
|
||||
<tr>
|
||||
{#each row ?? [] as cell, cellIdx}
|
||||
<td style={token.align[cellIdx] ? '' : `text-align: ${token.align[cellIdx]}`}>
|
||||
<MarkdownInlineTokens
|
||||
id={`${id}-${tokenIdx}-row-${rowIdx}-${cellIdx}`}
|
||||
tokens={cell.tokens}
|
||||
/>
|
||||
</td>
|
||||
{/each}
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
{:else if token.type === 'blockquote'}
|
||||
<blockquote>
|
||||
<svelte:self id={`${id}-${tokenIdx}`} tokens={token.tokens} />
|
||||
</blockquote>
|
||||
{:else if token.type === 'list'}
|
||||
{#if token.ordered}
|
||||
<ol start={token.start || 1}>
|
||||
{#each token.items as item, itemIdx}
|
||||
<li>
|
||||
<svelte:self
|
||||
id={`${id}-${tokenIdx}-${itemIdx}`}
|
||||
tokens={item.tokens}
|
||||
top={token.loose}
|
||||
/>
|
||||
</li>
|
||||
{/each}
|
||||
</ol>
|
||||
{:else}
|
||||
<ul>
|
||||
{#each token.items as item, itemIdx}
|
||||
<li>
|
||||
<svelte:self
|
||||
id={`${id}-${tokenIdx}-${itemIdx}`}
|
||||
tokens={item.tokens}
|
||||
top={token.loose}
|
||||
/>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
{:else if token.type === 'html'}
|
||||
{@html token.text}
|
||||
{:else if token.type === 'paragraph'}
|
||||
<p>
|
||||
<MarkdownInlineTokens id={`${id}-${tokenIdx}-p`} tokens={token.tokens ?? []} />
|
||||
</p>
|
||||
{:else if token.type === 'text'}
|
||||
{#if top}
|
||||
<p>
|
||||
{#if token.tokens}
|
||||
<MarkdownInlineTokens id={`${id}-${tokenIdx}-t`} tokens={token.tokens} />
|
||||
{:else}
|
||||
{unescapeHtml(token.text)}
|
||||
{/if}
|
||||
</p>
|
||||
{:else if token.tokens}
|
||||
<MarkdownInlineTokens id={`${id}-${tokenIdx}-p`} tokens={token.tokens ?? []} />
|
||||
{:else}
|
||||
{unescapeHtml(token.text)}
|
||||
{/if}
|
||||
{:else if token.type === 'inlineKatex'}
|
||||
{#if token.text}
|
||||
<KatexRenderer
|
||||
content={revertSanitizedResponseContent(token.text)}
|
||||
displayMode={token?.displayMode ?? false}
|
||||
/>
|
||||
{/if}
|
||||
{:else if token.type === 'blockKatex'}
|
||||
{#if token.text}
|
||||
<KatexRenderer
|
||||
content={revertSanitizedResponseContent(token.text)}
|
||||
displayMode={token?.displayMode ?? false}
|
||||
/>
|
||||
{/if}
|
||||
{:else if token.type === 'space'}
|
||||
{''}
|
||||
{:else}
|
||||
{console.log('Unknown token', token)}
|
||||
{/if}
|
||||
{/each}
|
||||
|
@ -2,10 +2,6 @@
|
||||
import { toast } from 'svelte-sonner';
|
||||
import dayjs from 'dayjs';
|
||||
import { marked } from 'marked';
|
||||
import tippy from 'tippy.js';
|
||||
import auto_render from 'katex/dist/contrib/auto-render.mjs';
|
||||
import 'katex/dist/katex.min.css';
|
||||
import mermaid from 'mermaid';
|
||||
|
||||
import { fade } from 'svelte/transition';
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
@ -79,104 +75,24 @@
|
||||
|
||||
let tokens;
|
||||
|
||||
import 'katex/dist/katex.min.css';
|
||||
|
||||
import markedKatex from '$lib/utils/katex-extension';
|
||||
|
||||
const options = {
|
||||
throwOnError: false
|
||||
};
|
||||
|
||||
marked.use(markedKatex(options));
|
||||
|
||||
$: (async () => {
|
||||
if (message?.content) {
|
||||
tokens = marked.lexer(
|
||||
replaceTokens(sanitizeResponseContent(message?.content), model?.name, $user?.name)
|
||||
);
|
||||
// console.log(message?.content, tokens);
|
||||
}
|
||||
})();
|
||||
|
||||
$: if (message) {
|
||||
renderStyling();
|
||||
}
|
||||
|
||||
const renderStyling = async () => {
|
||||
await tick();
|
||||
|
||||
if (tooltipInstance) {
|
||||
tooltipInstance[0]?.destroy();
|
||||
}
|
||||
|
||||
renderLatex();
|
||||
|
||||
if (message.info) {
|
||||
let tooltipContent = '';
|
||||
if (message.info.openai) {
|
||||
tooltipContent = `prompt_tokens: ${message.info.prompt_tokens ?? 'N/A'}<br/>
|
||||
completion_tokens: ${message.info.completion_tokens ?? 'N/A'}<br/>
|
||||
total_tokens: ${message.info.total_tokens ?? 'N/A'}`;
|
||||
} else {
|
||||
tooltipContent = `response_token/s: ${
|
||||
`${
|
||||
Math.round(
|
||||
((message.info.eval_count ?? 0) / (message.info.eval_duration / 1000000000)) * 100
|
||||
) / 100
|
||||
} tokens` ?? 'N/A'
|
||||
}<br/>
|
||||
prompt_token/s: ${
|
||||
Math.round(
|
||||
((message.info.prompt_eval_count ?? 0) /
|
||||
(message.info.prompt_eval_duration / 1000000000)) *
|
||||
100
|
||||
) / 100 ?? 'N/A'
|
||||
} tokens<br/>
|
||||
total_duration: ${
|
||||
Math.round(((message.info.total_duration ?? 0) / 1000000) * 100) / 100 ??
|
||||
'N/A'
|
||||
}ms<br/>
|
||||
load_duration: ${
|
||||
Math.round(((message.info.load_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
||||
}ms<br/>
|
||||
prompt_eval_count: ${message.info.prompt_eval_count ?? 'N/A'}<br/>
|
||||
prompt_eval_duration: ${
|
||||
Math.round(((message.info.prompt_eval_duration ?? 0) / 1000000) * 100) /
|
||||
100 ?? 'N/A'
|
||||
}ms<br/>
|
||||
eval_count: ${message.info.eval_count ?? 'N/A'}<br/>
|
||||
eval_duration: ${
|
||||
Math.round(((message.info.eval_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
||||
}ms<br/>
|
||||
approximate_total: ${approximateToHumanReadable(message.info.total_duration)}`;
|
||||
}
|
||||
tooltipInstance = tippy(`#info-${message.id}`, {
|
||||
content: `<span class="text-xs" id="tooltip-${message.id}">${tooltipContent}</span>`,
|
||||
allowHTML: true,
|
||||
theme: 'dark',
|
||||
arrow: false,
|
||||
offset: [0, 4]
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const renderLatex = () => {
|
||||
let chatMessageElements = document
|
||||
.getElementById(`message-${message.id}`)
|
||||
?.getElementsByClassName('chat-assistant');
|
||||
|
||||
if (chatMessageElements) {
|
||||
for (const element of chatMessageElements) {
|
||||
auto_render(element, {
|
||||
// customised options
|
||||
// • auto-render specific keys, e.g.:
|
||||
delimiters: [
|
||||
{ left: '$$', right: '$$', display: false },
|
||||
{ left: '$ ', right: ' $', display: false },
|
||||
{ left: '\\pu{', right: '}', display: false },
|
||||
{ left: '\\ce{', right: '}', display: false },
|
||||
{ left: '\\(', right: '\\)', display: false },
|
||||
{ left: '( ', right: ' )', display: false },
|
||||
{ left: '\\[', right: '\\]', display: false },
|
||||
{ left: '[ ', right: ' ]', display: false }
|
||||
],
|
||||
// • rendering keys, e.g.:
|
||||
throwOnError: false
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const playAudio = (idx) => {
|
||||
return new Promise((res) => {
|
||||
speakingIdx = idx;
|
||||
@ -242,7 +158,7 @@
|
||||
const res = await synthesizeOpenAISpeech(
|
||||
localStorage.token,
|
||||
$settings?.audio?.tts?.defaultVoice === $config.audio.tts.voice
|
||||
? $settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice
|
||||
? ($settings?.audio?.tts?.voice ?? $config?.audio?.tts?.voice)
|
||||
: $config?.audio?.tts?.voice,
|
||||
sentence
|
||||
).catch((error) => {
|
||||
@ -330,14 +246,12 @@
|
||||
editedContent = '';
|
||||
|
||||
await tick();
|
||||
renderStyling();
|
||||
};
|
||||
|
||||
const cancelEditMessage = async () => {
|
||||
edit = false;
|
||||
editedContent = '';
|
||||
await tick();
|
||||
renderStyling();
|
||||
};
|
||||
|
||||
const generateImage = async (message) => {
|
||||
@ -362,21 +276,11 @@
|
||||
$: if (!edit) {
|
||||
(async () => {
|
||||
await tick();
|
||||
renderStyling();
|
||||
|
||||
await mermaid.run({
|
||||
querySelector: '.mermaid'
|
||||
});
|
||||
})();
|
||||
}
|
||||
|
||||
onMount(async () => {
|
||||
await tick();
|
||||
renderStyling();
|
||||
|
||||
await mermaid.run({
|
||||
querySelector: '.mermaid'
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
@ -420,7 +324,7 @@
|
||||
{/if}
|
||||
|
||||
<div
|
||||
class="prose chat-{message.role} w-full max-w-full dark:prose-invert prose-p:my-0 prose-img:my-1 prose-headings:my-1 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-ul:-my-2 prose-ol:-my-2 prose-li:-my-3 whitespace-pre-line"
|
||||
class="prose chat-{message.role} w-full max-w-full dark:prose-invert prose-p:my-0 prose-img:my-1 prose-headings:my-1 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-ul:-my-0 prose-ol:-my-0 prose-li:-my-0 whitespace-pre-line"
|
||||
>
|
||||
<div>
|
||||
{#if (message?.statusHistory ?? [...(message?.status ? [message?.status] : [])]).length > 0}
|
||||
@ -841,6 +745,45 @@
|
||||
{/if}
|
||||
|
||||
{#if message.info}
|
||||
<Tooltip
|
||||
content={message.info.openai
|
||||
? `prompt_tokens: ${message.info.prompt_tokens ?? 'N/A'}<br/>
|
||||
completion_tokens: ${message.info.completion_tokens ?? 'N/A'}<br/>
|
||||
total_tokens: ${message.info.total_tokens ?? 'N/A'}`
|
||||
: `response_token/s: ${
|
||||
`${
|
||||
Math.round(
|
||||
((message.info.eval_count ?? 0) /
|
||||
(message.info.eval_duration / 1000000000)) *
|
||||
100
|
||||
) / 100
|
||||
} tokens` ?? 'N/A'
|
||||
}<br/>
|
||||
prompt_token/s: ${
|
||||
Math.round(
|
||||
((message.info.prompt_eval_count ?? 0) /
|
||||
(message.info.prompt_eval_duration / 1000000000)) *
|
||||
100
|
||||
) / 100 ?? 'N/A'
|
||||
} tokens<br/>
|
||||
total_duration: ${
|
||||
Math.round(((message.info.total_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
||||
}ms<br/>
|
||||
load_duration: ${
|
||||
Math.round(((message.info.load_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
||||
}ms<br/>
|
||||
prompt_eval_count: ${message.info.prompt_eval_count ?? 'N/A'}<br/>
|
||||
prompt_eval_duration: ${
|
||||
Math.round(((message.info.prompt_eval_duration ?? 0) / 1000000) * 100) / 100 ??
|
||||
'N/A'
|
||||
}ms<br/>
|
||||
eval_count: ${message.info.eval_count ?? 'N/A'}<br/>
|
||||
eval_duration: ${
|
||||
Math.round(((message.info.eval_duration ?? 0) / 1000000) * 100) / 100 ?? 'N/A'
|
||||
}ms<br/>
|
||||
approximate_total: ${approximateToHumanReadable(message.info.total_duration)}`}
|
||||
placement="top"
|
||||
>
|
||||
<Tooltip content={$i18n.t('Generation Info')} placement="bottom">
|
||||
<button
|
||||
class=" {isLastMessage
|
||||
@ -867,6 +810,7 @@
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if !readOnly}
|
||||
|
@ -62,8 +62,8 @@
|
||||
{#if !($settings?.chatBubble ?? true)}
|
||||
<ProfileImage
|
||||
src={message.user
|
||||
? $models.find((m) => m.id === message.user)?.info?.meta?.profile_image_url ?? '/user.png'
|
||||
: user?.profile_image_url ?? '/user.png'}
|
||||
? ($models.find((m) => m.id === message.user)?.info?.meta?.profile_image_url ?? '/user.png')
|
||||
: (user?.profile_image_url ?? '/user.png')}
|
||||
/>
|
||||
{/if}
|
||||
<div class="w-full overflow-hidden pl-1">
|
||||
@ -96,7 +96,7 @@
|
||||
{#if message.files}
|
||||
<div class="mt-2.5 mb-1 w-full flex flex-col justify-end overflow-x-auto gap-1 flex-wrap">
|
||||
{#each message.files as file}
|
||||
<div class={$settings?.chatBubble ?? true ? 'self-end' : ''}>
|
||||
<div class={($settings?.chatBubble ?? true) ? 'self-end' : ''}>
|
||||
{#if file.type === 'image'}
|
||||
<img src={file.url} alt="input" class=" max-h-96 rounded-lg" draggable="false" />
|
||||
{:else}
|
||||
@ -162,9 +162,9 @@
|
||||
</div>
|
||||
{:else}
|
||||
<div class="w-full">
|
||||
<div class="flex {$settings?.chatBubble ?? true ? 'justify-end' : ''} mb-2">
|
||||
<div class="flex {($settings?.chatBubble ?? true) ? 'justify-end' : ''} mb-2">
|
||||
<div
|
||||
class="rounded-3xl {$settings?.chatBubble ?? true
|
||||
class="rounded-3xl {($settings?.chatBubble ?? true)
|
||||
? `max-w-[90%] px-5 py-2 bg-gray-50 dark:bg-gray-850 ${
|
||||
message.files ? 'rounded-tr-lg' : ''
|
||||
}`
|
||||
@ -175,7 +175,7 @@
|
||||
</div>
|
||||
|
||||
<div
|
||||
class=" flex {$settings?.chatBubble ?? true
|
||||
class=" flex {($settings?.chatBubble ?? true)
|
||||
? 'justify-end'
|
||||
: ''} text-gray-600 dark:text-gray-500"
|
||||
>
|
||||
|
@ -29,6 +29,7 @@
|
||||
use_mmap: null,
|
||||
use_mlock: null,
|
||||
num_thread: null,
|
||||
num_gpu: null,
|
||||
template: null
|
||||
};
|
||||
|
||||
@ -864,6 +865,52 @@
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class=" py-0.5 w-full justify-between">
|
||||
<div class="flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('num_gpu (Ollama)')}</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded transition flex-shrink-0 outline-none"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
params.num_gpu = (params?.num_gpu ?? null) === null ? 0 : null;
|
||||
}}
|
||||
>
|
||||
{#if (params?.num_gpu ?? null) === null}
|
||||
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if (params?.num_gpu ?? null) !== null}
|
||||
<div class="flex mt-0.5 space-x-2">
|
||||
<div class=" flex-1">
|
||||
<input
|
||||
id="steps-range"
|
||||
type="range"
|
||||
min="0"
|
||||
max="256"
|
||||
step="1"
|
||||
bind:value={params.num_gpu}
|
||||
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
|
||||
/>
|
||||
</div>
|
||||
<div class="">
|
||||
<input
|
||||
bind:value={params.num_gpu}
|
||||
type="number"
|
||||
class=" bg-transparent text-center w-14"
|
||||
min="0"
|
||||
max="256"
|
||||
step="1"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- <div class=" py-0.5 w-full justify-between">
|
||||
<div class="flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('Template')}</div>
|
||||
|
@ -34,6 +34,7 @@
|
||||
|
||||
let showEmojiInCall = false;
|
||||
let voiceInterruption = false;
|
||||
let hapticFeedback = false;
|
||||
|
||||
const toggleSplitLargeChunks = async () => {
|
||||
splitLargeChunks = !splitLargeChunks;
|
||||
@ -70,6 +71,11 @@
|
||||
saveSettings({ voiceInterruption: voiceInterruption });
|
||||
};
|
||||
|
||||
const toggleHapticFeedback = async () => {
|
||||
hapticFeedback = !hapticFeedback;
|
||||
saveSettings({ hapticFeedback: hapticFeedback });
|
||||
};
|
||||
|
||||
const toggleUserLocation = async () => {
|
||||
userLocation = !userLocation;
|
||||
|
||||
@ -151,6 +157,8 @@
|
||||
chatDirection = $settings.chatDirection ?? 'LTR';
|
||||
userLocation = $settings.userLocation ?? false;
|
||||
|
||||
hapticFeedback = $settings.hapticFeedback ?? false;
|
||||
|
||||
defaultModelId = $settings?.models?.at(0) ?? '';
|
||||
if ($config?.default_models) {
|
||||
defaultModelId = $config.default_models.split(',')[0];
|
||||
@ -438,6 +446,26 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs">{$i18n.t('Haptic Feedback')}</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded transition"
|
||||
on:click={() => {
|
||||
toggleHapticFeedback();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if hapticFeedback === true}
|
||||
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class=" my-1.5 text-sm font-medium">{$i18n.t('Voice')}</div>
|
||||
|
||||
<div>
|
||||
|
@ -27,7 +27,7 @@
|
||||
on:click={() => {
|
||||
valves[property] =
|
||||
(valves[property] ?? null) === null
|
||||
? valvesSpec.properties[property]?.default ?? ''
|
||||
? (valvesSpec.properties[property]?.default ?? '')
|
||||
: null;
|
||||
|
||||
dispatch('change');
|
||||
|
@ -292,7 +292,7 @@
|
||||
>
|
||||
<div class=" self-start w-8 pt-0.5">
|
||||
<div
|
||||
class=" rounded-full bg-stone-700 {model?.info?.meta?.hidden ?? false
|
||||
class=" rounded-full bg-stone-700 {(model?.info?.meta?.hidden ?? false)
|
||||
? 'brightness-90 dark:brightness-50'
|
||||
: ''} "
|
||||
>
|
||||
@ -305,7 +305,7 @@
|
||||
</div>
|
||||
|
||||
<div
|
||||
class=" flex-1 self-center {model?.info?.meta?.hidden ?? false ? 'text-gray-500' : ''}"
|
||||
class=" flex-1 self-center {(model?.info?.meta?.hidden ?? false) ? 'text-gray-500' : ''}"
|
||||
>
|
||||
<div class=" font-semibold line-clamp-1">{model.name}</div>
|
||||
<div class=" text-xs overflow-hidden text-ellipsis line-clamp-1">
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "متابعة الرد",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "تم نسخ عنوان URL للدردشة المشتركة إلى الحافظة",
|
||||
"Copy": "نسخ",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "انسخ كتلة التعليمات البرمجية الأخيرة",
|
||||
"Copy last response": "انسخ الرد الأخير",
|
||||
"Copy Link": "أنسخ الرابط",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "مفتاح واجهة برمجة تطبيقات PSE من Google",
|
||||
"Google PSE Engine Id": "معرف محرك PSE من Google",
|
||||
"h:mm a": "الساعة:الدقائق صباحا/مساء",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "ليس لديه محادثات.",
|
||||
"Hello, {{name}}": " {{name}} مرحبا",
|
||||
"Help": "مساعدة",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "ملاحظة: إذا قمت بتعيين الحد الأدنى من النقاط، فلن يؤدي البحث إلا إلى إرجاع المستندات التي لها نقاط أكبر من أو تساوي الحد الأدنى من النقاط.",
|
||||
"Notifications": "إشعارات",
|
||||
"November": "نوفمبر",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (أولاما)",
|
||||
"OAuth ID": "",
|
||||
"October": "اكتوبر",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "من اليمين إلى اليسار",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "حفظ",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Продължи отговора",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Копирана е връзката за чат!",
|
||||
"Copy": "Копирай",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Копиране на последен код блок",
|
||||
"Copy last response": "Копиране на последен отговор",
|
||||
"Copy Link": "Копиране на връзка",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API ключ",
|
||||
"Google PSE Engine Id": "Идентификатор на двигателя на Google PSE",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "няма разговори.",
|
||||
"Hello, {{name}}": "Здравей, {{name}}",
|
||||
"Help": "Помощ",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Забележка: Ако зададете минимален резултат, търсенето ще върне само документи с резултат, по-голям или равен на минималния резултат.",
|
||||
"Notifications": "Десктоп Известия",
|
||||
"November": "Ноември",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "Октомври",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Запис",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "যাচাই করুন",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "শেয়ারকৃত কথা-ব্যবহারের URL ক্লিপবোর্ডে কপি করা হয়েছে!",
|
||||
"Copy": "অনুলিপি",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "সর্বশেষ কোড ব্লক কপি করুন",
|
||||
"Copy last response": "সর্বশেষ রেসপন্স কপি করুন",
|
||||
"Copy Link": "লিংক কপি করুন",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "গুগল পিএসই এপিআই কী",
|
||||
"Google PSE Engine Id": "গুগল পিএসই ইঞ্জিন আইডি",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "কোন কনভার্সেশন আছে না।",
|
||||
"Hello, {{name}}": "হ্যালো, {{name}}",
|
||||
"Help": "সহায়তা",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "দ্রষ্টব্য: আপনি যদি ন্যূনতম স্কোর সেট করেন তবে অনুসন্ধানটি কেবলমাত্র ন্যূনতম স্কোরের চেয়ে বেশি বা সমান স্কোর সহ নথিগুলি ফেরত দেবে।",
|
||||
"Notifications": "নোটিফিকেশনসমূহ",
|
||||
"November": "নভেম্বর",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (ওলামা)",
|
||||
"OAuth ID": "",
|
||||
"October": "অক্টোবর",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "রোজ পাইন",
|
||||
"Rosé Pine Dawn": "ভোরের রোজ পাইন",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "সংরক্ষণ",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Continuar la resposta",
|
||||
"Continue with {{provider}}": "Continuar amb {{provider}}",
|
||||
"Controls": "Controls",
|
||||
"Copied": "Copiat",
|
||||
"Copied shared chat URL to clipboard!": "S'ha copiat l'URL compartida al porta-retalls!",
|
||||
"Copy": "Copiar",
|
||||
"Copy Code": "Copiar el codi",
|
||||
"Copy last code block": "Copiar l'últim bloc de codi",
|
||||
"Copy last response": "Copiar l'última resposta",
|
||||
"Copy Link": "Copiar l'enllaç",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Clau API PSE de Google",
|
||||
"Google PSE Engine Id": "Identificador del motor PSE de Google",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "no té converses.",
|
||||
"Hello, {{name}}": "Hola, {{name}}",
|
||||
"Help": "Ajuda",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Nota: Si s'estableix una puntuació mínima, la cerca només retornarà documents amb una puntuació major o igual a la puntuació mínima.",
|
||||
"Notifications": "Notificacions",
|
||||
"November": "Novembre",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "ID OAuth",
|
||||
"October": "Octubre",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Albada Rosé Pine",
|
||||
"RTL": "RTL",
|
||||
"Run": "Executar",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "Executa Llama 2, Code Llama, i altres models. Personalitza i crea els teus propis models.",
|
||||
"Running": "S'està executant",
|
||||
"Save": "Desar",
|
||||
@ -509,7 +514,7 @@
|
||||
"Scan": "Escanejar",
|
||||
"Scan complete!": "Escaneigr completat!",
|
||||
"Scan for documents from {{path}}": "Escanejar documents des de {{path}}",
|
||||
"Scroll to bottom when switching between branches": "",
|
||||
"Scroll to bottom when switching between branches": "Desplaçar a la part inferior quan es canviï de branca",
|
||||
"Search": "Cercar",
|
||||
"Search a model": "Cercar un model",
|
||||
"Search Chats": "Cercar xats",
|
||||
@ -624,7 +629,7 @@
|
||||
"To access the WebUI, please reach out to the administrator. Admins can manage user statuses from the Admin Panel.": "Per accedir a la WebUI, poseu-vos en contacte amb l'administrador. Els administradors poden gestionar els estats dels usuaris des del tauler d'administració.",
|
||||
"To add documents here, upload them to the \"Documents\" workspace first.": "Per afegir documents aquí, puja-ls primer a l'espai de treball \"Documents\".",
|
||||
"to chat input.": "a l'entrada del xat.",
|
||||
"To select actions here, add them to the \"Functions\" workspace first.": "Per seleccionar accions aquí, afegeix-los primer a l'espai de treball \"Funcions\".",
|
||||
"To select actions here, add them to the \"Functions\" workspace first.": "Per seleccionar accions aquí, afegeix-les primer a l'espai de treball \"Funcions\".",
|
||||
"To select filters here, add them to the \"Functions\" workspace first.": "Per seleccionar filtres aquí, afegeix-los primer a l'espai de treball \"Funcions\".",
|
||||
"To select toolkits here, add them to the \"Tools\" workspace first.": "Per seleccionar kits d'eines aquí, afegeix-los primer a l'espai de treball \"Eines\".",
|
||||
"Today": "Avui",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "",
|
||||
"Copy": "",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Kopyaha ang katapusang bloke sa code",
|
||||
"Copy last response": "Kopyaha ang kataposang tubag",
|
||||
"Copy Link": "",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "",
|
||||
"Google PSE Engine Id": "",
|
||||
"h:mm a": "",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "",
|
||||
"Hello, {{name}}": "Maayong buntag, {{name}}",
|
||||
"Help": "",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "",
|
||||
"Notifications": "Mga pahibalo sa desktop",
|
||||
"November": "",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "",
|
||||
"OAuth ID": "",
|
||||
"October": "",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Aube Pine Rosé",
|
||||
"RTL": "",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Tipigi",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Antwort fortsetzen",
|
||||
"Continue with {{provider}}": "Mit {{provider}} fortfahren",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Freigabelink in die Zwischenablage kopiert!",
|
||||
"Copy": "Kopieren",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Letzten Codeblock kopieren",
|
||||
"Copy last response": "Letzte Antwort kopieren",
|
||||
"Copy Link": "Link kopieren",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE-API-Schlüssel",
|
||||
"Google PSE Engine Id": "Google PSE-Engine-ID",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "hat keine Unterhaltungen.",
|
||||
"Hello, {{name}}": "Hallo, {{name}}",
|
||||
"Help": "Hilfe",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Hinweis: Wenn Sie eine Mindestpunktzahl festlegen, werden in der Suche nur Dokumente mit einer Punktzahl größer oder gleich der Mindestpunktzahl zurückgegeben.",
|
||||
"Notifications": "Benachrichtigungen",
|
||||
"November": "November",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "OAuth-ID",
|
||||
"October": "Oktober",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "Läuft",
|
||||
"Save": "Speichern",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "",
|
||||
"Copy": "",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Copy last code block",
|
||||
"Copy last response": "Copy last response",
|
||||
"Copy Link": "",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "",
|
||||
"Google PSE Engine Id": "",
|
||||
"h:mm a": "",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "",
|
||||
"Hello, {{name}}": "Much helo, {{name}}",
|
||||
"Help": "",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "",
|
||||
"Notifications": "Notifications",
|
||||
"November": "",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "",
|
||||
"OAuth ID": "",
|
||||
"October": "",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Save much wow",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "",
|
||||
"Copy": "",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "",
|
||||
"Copy last response": "",
|
||||
"Copy Link": "",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "",
|
||||
"Google PSE Engine Id": "",
|
||||
"h:mm a": "",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "",
|
||||
"Hello, {{name}}": "",
|
||||
"Help": "",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "",
|
||||
"Notifications": "",
|
||||
"November": "",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "",
|
||||
"OAuth ID": "",
|
||||
"October": "",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "",
|
||||
"Rosé Pine Dawn": "",
|
||||
"RTL": "",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "",
|
||||
"Copy": "",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "",
|
||||
"Copy last response": "",
|
||||
"Copy Link": "",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "",
|
||||
"Google PSE Engine Id": "",
|
||||
"h:mm a": "",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "",
|
||||
"Hello, {{name}}": "",
|
||||
"Help": "",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "",
|
||||
"Notifications": "",
|
||||
"November": "",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "",
|
||||
"OAuth ID": "",
|
||||
"October": "",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "",
|
||||
"Rosé Pine Dawn": "",
|
||||
"RTL": "",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Continuar Respuesta",
|
||||
"Continue with {{provider}}": "Continuar con {{provider}}",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "¡URL de chat compartido copiado al portapapeles!",
|
||||
"Copy": "Copiar",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Copia el último bloque de código",
|
||||
"Copy last response": "Copia la última respuesta",
|
||||
"Copy Link": "Copiar enlace",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Clave API de Google PSE",
|
||||
"Google PSE Engine Id": "ID del motor PSE de Google",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "no tiene conversaciones.",
|
||||
"Hello, {{name}}": "Hola, {{name}}",
|
||||
"Help": "Ayuda",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Nota: Si estableces una puntuación mínima, la búsqueda sólo devolverá documentos con una puntuación mayor o igual a la puntuación mínima.",
|
||||
"Notifications": "Notificaciones",
|
||||
"November": "Noviembre",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "OAuth ID",
|
||||
"October": "Octubre",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "Ejecutando",
|
||||
"Save": "Guardar",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "ادامه پاسخ",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "URL چت به کلیپ بورد کپی شد!",
|
||||
"Copy": "کپی",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "کپی آخرین بلوک کد",
|
||||
"Copy last response": "کپی آخرین پاسخ",
|
||||
"Copy Link": "کپی لینک",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "گوگل PSE API کلید",
|
||||
"Google PSE Engine Id": "شناسه موتور PSE گوگل",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "ندارد.",
|
||||
"Hello, {{name}}": "سلام، {{name}}",
|
||||
"Help": "کمک",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "توجه: اگر حداقل نمره را تعیین کنید، جستجو تنها اسنادی را با نمره بیشتر یا برابر با حداقل نمره باز می گرداند.",
|
||||
"Notifications": "اعلان",
|
||||
"November": "نوامبر",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (اولاما)",
|
||||
"OAuth ID": "",
|
||||
"October": "اکتبر",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "ذخیره",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Jatka vastausta",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Jaettu keskustelulinkki kopioitu leikepöydälle!",
|
||||
"Copy": "Kopioi",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Kopioi viimeisin koodilohko",
|
||||
"Copy last response": "Kopioi viimeisin vastaus",
|
||||
"Copy Link": "Kopioi linkki",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API -avain",
|
||||
"Google PSE Engine Id": "Google PSE -moduulin tunnus",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "ei ole keskusteluja.",
|
||||
"Hello, {{name}}": "Terve, {{name}}",
|
||||
"Help": "Apua",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Huom: Jos asetat vähimmäispisteet, haku palauttaa vain asiakirjat, joiden pisteet ovat suurempia tai yhtä suuria kuin vähimmäispistemäärä.",
|
||||
"Notifications": "Ilmoitukset",
|
||||
"November": "marraskuu",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "lokakuu",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosee-mänty",
|
||||
"Rosé Pine Dawn": "Aamuinen Rosee-mänty",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Tallenna",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Continuer la réponse",
|
||||
"Continue with {{provider}}": "Continuer avec {{provider}}",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "URL du chat copiée dans le presse-papiers\u00a0!",
|
||||
"Copy": "Copie",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Copier le dernier bloc de code",
|
||||
"Copy last response": "Copier la dernière réponse",
|
||||
"Copy Link": "Copier le lien",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Clé API Google PSE",
|
||||
"Google PSE Engine Id": "ID du moteur de recherche personnalisé de Google",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "n'a aucune conversation.",
|
||||
"Hello, {{name}}": "Bonjour, {{name}}.",
|
||||
"Help": "Aide",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Note : Si vous définissez un score minimum, seuls les documents ayant un score supérieur ou égal à ce score minimum seront retournés par la recherche.",
|
||||
"Notifications": "Notifications",
|
||||
"November": "Novembre",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "ID OAuth",
|
||||
"October": "Octobre",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Pin rosé",
|
||||
"Rosé Pine Dawn": "Aube de Pin Rosé",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "Courir",
|
||||
"Save": "Enregistrer",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Continuer la réponse",
|
||||
"Continue with {{provider}}": "Continuer avec {{provider}}",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "URL du chat copiée dans le presse-papiers\u00a0!",
|
||||
"Copy": "Copie",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Copier le dernier bloc de code",
|
||||
"Copy last response": "Copier la dernière réponse",
|
||||
"Copy Link": "Copier le lien",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Clé API Google PSE",
|
||||
"Google PSE Engine Id": "ID du moteur de recherche personnalisé de Google",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "n'a aucune conversation.",
|
||||
"Hello, {{name}}": "Bonjour, {{name}}.",
|
||||
"Help": "Aide",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Note : Si vous définissez un score minimum, seuls les documents ayant un score supérieur ou égal à ce score minimum seront retournés par la recherche.",
|
||||
"Notifications": "Notifications",
|
||||
"November": "Novembre",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "ID OAuth",
|
||||
"October": "Octobre",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Pin rosé",
|
||||
"Rosé Pine Dawn": "Aube de Pin Rosé",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "Courir",
|
||||
"Save": "Enregistrer",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "המשך תגובה",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "העתקת כתובת URL של צ'אט משותף ללוח!",
|
||||
"Copy": "העתק",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "העתק את בלוק הקוד האחרון",
|
||||
"Copy last response": "העתק את התגובה האחרונה",
|
||||
"Copy Link": "העתק קישור",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "מפתח API של Google PSE",
|
||||
"Google PSE Engine Id": "מזהה מנוע PSE של Google",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "אין שיחות.",
|
||||
"Hello, {{name}}": "שלום, {{name}}",
|
||||
"Help": "עזרה",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "הערה: אם תקבע ציון מינימלי, החיפוש יחזיר רק מסמכים עם ציון שגבוה או שווה לציון המינימלי.",
|
||||
"Notifications": "התראות",
|
||||
"November": "נובמבר",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "אוקטובר",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "שמור",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "प्रतिक्रिया जारी रखें",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "साझा चैट URL को क्लिपबोर्ड पर कॉपी किया गया!",
|
||||
"Copy": "कॉपी",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "अंतिम कोड ब्लॉक कॉपी करें",
|
||||
"Copy last response": "अंतिम प्रतिक्रिया कॉपी करें",
|
||||
"Copy Link": "लिंक को कॉपी करें",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API कुंजी",
|
||||
"Google PSE Engine Id": "Google PSE इंजन आईडी",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "कोई बातचीत नहीं है",
|
||||
"Hello, {{name}}": "नमस्ते, {{name}}",
|
||||
"Help": "मदद",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "ध्यान दें: यदि आप न्यूनतम स्कोर निर्धारित करते हैं, तो खोज केवल न्यूनतम स्कोर से अधिक या उसके बराबर स्कोर वाले दस्तावेज़ वापस लाएगी।",
|
||||
"Notifications": "सूचनाएं",
|
||||
"November": "नवंबर",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (ओलामा)",
|
||||
"OAuth ID": "",
|
||||
"October": "अक्टूबर",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "रोसे पिन",
|
||||
"Rosé Pine Dawn": "रोसे पिन डेन",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "सहेजें",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Nastavi odgovor",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "URL dijeljenog razgovora kopiran u međuspremnik!",
|
||||
"Copy": "Kopiraj",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Kopiraj zadnji blok koda",
|
||||
"Copy last response": "Kopiraj zadnji odgovor",
|
||||
"Copy Link": "Kopiraj vezu",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API ključ",
|
||||
"Google PSE Engine Id": "ID Google PSE modula",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "nema razgovora.",
|
||||
"Hello, {{name}}": "Bok, {{name}}",
|
||||
"Help": "Pomoć",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Napomena: Ako postavite minimalnu ocjenu, pretraga će vratiti samo dokumente s ocjenom većom ili jednakom minimalnoj ocjeni.",
|
||||
"Notifications": "Obavijesti",
|
||||
"November": "Studeni",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "Listopad",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "Pokrenuto",
|
||||
"Save": "Spremi",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Lanjutkan Tanggapan",
|
||||
"Continue with {{provider}}": "Lanjutkan dengan {{penyedia}}",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Menyalin URL obrolan bersama ke papan klip!",
|
||||
"Copy": "Menyalin",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Salin blok kode terakhir",
|
||||
"Copy last response": "Salin tanggapan terakhir",
|
||||
"Copy Link": "Salin Tautan",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Kunci API Google PSE",
|
||||
"Google PSE Engine Id": "Id Mesin Google PSE",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "tidak memiliki percakapan.",
|
||||
"Hello, {{name}}": "Halo, {{name}}",
|
||||
"Help": "Bantuan",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Catatan: Jika Anda menetapkan skor minimum, pencarian hanya akan mengembalikan dokumen dengan skor yang lebih besar atau sama dengan skor minimum.",
|
||||
"Notifications": "Pemberitahuan",
|
||||
"November": "November",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "ID OAuth",
|
||||
"October": "Oktober",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Pinus Rosé",
|
||||
"Rosé Pine Dawn": "Rosé Pine Fajar",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "Berjalan",
|
||||
"Save": "Simpan",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Continua risposta",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "URL della chat condivisa copiato negli appunti!",
|
||||
"Copy": "Copia",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Copia ultimo blocco di codice",
|
||||
"Copy last response": "Copia ultima risposta",
|
||||
"Copy Link": "Copia link",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Chiave API PSE di Google",
|
||||
"Google PSE Engine Id": "ID motore PSE di Google",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "non ha conversazioni.",
|
||||
"Hello, {{name}}": "Ciao, {{name}}",
|
||||
"Help": "Aiuto",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Nota: se imposti un punteggio minimo, la ricerca restituirà solo i documenti con un punteggio maggiore o uguale al punteggio minimo.",
|
||||
"Notifications": "Notifiche desktop",
|
||||
"November": "Novembre",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "Ottobre",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Salva",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "続きの応答",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "共有チャットURLをクリップボードにコピーしました!",
|
||||
"Copy": "コピー",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "最後のコードブロックをコピー",
|
||||
"Copy last response": "最後の応答をコピー",
|
||||
"Copy Link": "リンクをコピー",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE APIキー",
|
||||
"Google PSE Engine Id": "Google PSE エンジン ID",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "対話はありません。",
|
||||
"Hello, {{name}}": "こんにちは、{{name}} さん",
|
||||
"Help": "ヘルプ",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "注意:最小スコアを設定した場合、検索は最小スコア以上のスコアを持つドキュメントのみを返します。",
|
||||
"Notifications": "デスクトップ通知",
|
||||
"November": "11月",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread(オラマ)",
|
||||
"OAuth ID": "",
|
||||
"October": "10月",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "保存",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "პასუხის გაგრძელება",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "ყავს ჩათის URL-ი კლიპბორდში!",
|
||||
"Copy": "კოპირება",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "ბოლო ბლოკის კოპირება",
|
||||
"Copy last response": "ბოლო პასუხის კოპირება",
|
||||
"Copy Link": "კოპირება",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API გასაღები",
|
||||
"Google PSE Engine Id": "Google PSE ძრავის Id",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "არა უფლება ჩაწერა",
|
||||
"Hello, {{name}}": "გამარჯობა, {{name}}",
|
||||
"Help": "დახმარება",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "შენიშვნა: თუ თქვენ დააყენებთ მინიმალურ ქულას, ძებნა დააბრუნებს მხოლოდ დოკუმენტებს მინიმალური ქულის მეტი ან ტოლი ქულით.",
|
||||
"Notifications": "შეტყობინება",
|
||||
"November": "ნოემბერი",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (ოლამა)",
|
||||
"OAuth ID": "",
|
||||
"October": "ოქტომბერი",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "ვარდისფერი ფიჭვის ხე",
|
||||
"Rosé Pine Dawn": "ვარდისფერი ფიჭვის გარიჟრაჟი",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "შენახვა",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "대화 계속",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "공유 채팅 URL이 클립보드에 복사되었습니다!",
|
||||
"Copy": "복사",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "마지막 코드 블록 복사",
|
||||
"Copy last response": "마지막 응답 복사",
|
||||
"Copy Link": "링크 복사",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API 키",
|
||||
"Google PSE Engine Id": "Google PSE 엔진 ID",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "대화가 없습니다.",
|
||||
"Hello, {{name}}": "안녕하세요, {{name}}",
|
||||
"Help": "도움말",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "참고: 최소 점수를 설정하면, 검색 결과로 최소 점수 이상의 점수를 가진 문서만 반환합니다.",
|
||||
"Notifications": "알림",
|
||||
"November": "11월",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (올라마)",
|
||||
"OAuth ID": "",
|
||||
"October": "10월",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "실행 중",
|
||||
"Save": "저장",
|
||||
|
@ -1,52 +1,52 @@
|
||||
{
|
||||
"'s', 'm', 'h', 'd', 'w' or '-1' for no expiration.": "'s', 'm', 'h', 'd', 'w' arba '-1' kad neišteitų iš galiojimo.",
|
||||
"(Beta)": "(Beta)",
|
||||
"(e.g. `sh webui.sh --api --api-auth username_password`)": "",
|
||||
"(e.g. `sh webui.sh --api --api-auth username_password`)": "(pvz. `sh webui.sh --api --api-auth username_password`)",
|
||||
"(e.g. `sh webui.sh --api`)": "(pvz. `sh webui.sh --api`)",
|
||||
"(latest)": "(naujausias)",
|
||||
"{{ models }}": "",
|
||||
"{{ owner }}: You cannot delete a base model": "",
|
||||
"{{ models }}": "{{ models }}",
|
||||
"{{ owner }}: You cannot delete a base model": "{{ owner }}: Negalite ištrinti bazinio modelio",
|
||||
"{{modelName}} is thinking...": "{{modelName}} mąsto...",
|
||||
"{{user}}'s Chats": "{{user}} susirašinėjimai",
|
||||
"{{webUIName}} Backend Required": "{{webUIName}} būtinas serveris",
|
||||
"A task model is used when performing tasks such as generating titles for chats and web search queries": "",
|
||||
"A task model is used when performing tasks such as generating titles for chats and web search queries": "Užduočių modelis naudojamas pokalbių pavadinimų ir paieškos užklausų generavimui.",
|
||||
"a user": "naudotojas",
|
||||
"About": "Apie",
|
||||
"Account": "Paskyra",
|
||||
"Account Activation Pending": "",
|
||||
"Account Activation Pending": "Laukiama paskyros patvirtinimo",
|
||||
"Accurate information": "Tiksli informacija",
|
||||
"Actions": "",
|
||||
"Active Users": "",
|
||||
"Add": "",
|
||||
"Add a model id": "",
|
||||
"Add a short description about what this model does": "",
|
||||
"Actions": "Veiksmai",
|
||||
"Active Users": "Aktyvūs naudotojai",
|
||||
"Add": "Pridėti",
|
||||
"Add a model id": "Pridėti modelio ID",
|
||||
"Add a short description about what this model does": "Pridėti trumpą modelio aprašymą",
|
||||
"Add a short title for this prompt": "Pridėti trumpą šios užklausos pavadinimą",
|
||||
"Add a tag": "Pridėti žymą",
|
||||
"Add custom prompt": "Pridėti užklausos šabloną",
|
||||
"Add Docs": "Pridėti dokumentų",
|
||||
"Add Files": "Pridėti failus",
|
||||
"Add Memory": "",
|
||||
"Add Memory": "Pridėti atminį",
|
||||
"Add message": "Pridėti žinutę",
|
||||
"Add Model": "Pridėti modelį",
|
||||
"Add Tag": "",
|
||||
"Add Tag": "Pridėti žymą",
|
||||
"Add Tags": "Pridėti žymas",
|
||||
"Add User": "Pridėti naudotoją",
|
||||
"Adjusting these settings will apply changes universally to all users.": "Šių nustatymų pakeitimas bus pritakytas visiems naudotojams.",
|
||||
"admin": "Administratorius",
|
||||
"Admin": "",
|
||||
"Admin": "Administratorius",
|
||||
"Admin Panel": "Administratorių panelė",
|
||||
"Admin Settings": "Administratorių nustatymai",
|
||||
"Admins have access to all tools at all times; users need tools assigned per model in the workspace.": "",
|
||||
"Advanced Parameters": "Gilieji nustatymai",
|
||||
"Advanced Params": "",
|
||||
"Admins have access to all tools at all times; users need tools assigned per model in the workspace.": "Administratoriai visada turi visus įrankius. Naudotojai turi tuėti prieigą prie dokumentų per modelių nuostatas",
|
||||
"Advanced Parameters": "Pažengę nustatymai",
|
||||
"Advanced Params": "Pažengę nustatymai",
|
||||
"all": "visi",
|
||||
"All Documents": "Visi dokumentai",
|
||||
"All Users": "Visi naudotojai",
|
||||
"Allow": "Leisti",
|
||||
"Allow Chat Deletion": "Leisti pokalbių ištrynimą",
|
||||
"Allow non-local voices": "",
|
||||
"Allow User Location": "",
|
||||
"Allow Voice Interruption in Call": "",
|
||||
"Allow non-local voices": "Leisti nelokalius balsus",
|
||||
"Allow User Location": "Leisti naudotojo vietos matymą",
|
||||
"Allow Voice Interruption in Call": "Leisti pertraukimą skambučio metu",
|
||||
"alphanumeric characters and hyphens": "skaičiai, raidės ir brūkšneliai",
|
||||
"Already have an account?": "Ar jau turite paskyrą?",
|
||||
"an assistant": "assistentas",
|
||||
@ -58,40 +58,40 @@
|
||||
"API keys": "API raktai",
|
||||
"April": "Balandis",
|
||||
"Archive": "Archyvai",
|
||||
"Archive All Chats": "",
|
||||
"Archive All Chats": "Archyvuoti visus pokalbius",
|
||||
"Archived Chats": "Archyvuoti pokalbiai",
|
||||
"are allowed - Activate this command by typing": "leistina - aktyvuokite komandą rašydami",
|
||||
"Are you sure?": "Are esate tikri?",
|
||||
"Attach file": "Pridėti failą",
|
||||
"Attention to detail": "Dėmesys detalėms",
|
||||
"Audio": "Audio įrašas",
|
||||
"Audio settings updated successfully": "",
|
||||
"Audio settings updated successfully": "Audio nustatymai sėkmingai išsaugoti",
|
||||
"August": "Rugpjūtis",
|
||||
"Auto-playback response": "Automatinis atsakymo skaitymas",
|
||||
"AUTOMATIC1111 Api Auth String": "",
|
||||
"AUTOMATIC1111 Api Auth String": "AUTOMATIC1111 Api Auth String",
|
||||
"AUTOMATIC1111 Base URL": "AUTOMATIC1111 bazės nuoroda",
|
||||
"AUTOMATIC1111 Base URL is required.": "AUTOMATIC1111 bazės nuoroda reikalinga.",
|
||||
"available!": "prieinama!",
|
||||
"Back": "Atgal",
|
||||
"Bad Response": "Neteisingas atsakymas",
|
||||
"Banners": "",
|
||||
"Base Model (From)": "",
|
||||
"Batch Size (num_batch)": "",
|
||||
"Banners": "Baneriai",
|
||||
"Base Model (From)": "Bazinis modelis",
|
||||
"Batch Size (num_batch)": "Batch dydis",
|
||||
"before": "prieš",
|
||||
"Being lazy": "Būvimas tingiu",
|
||||
"Brave Search API Key": "",
|
||||
"Brave Search API Key": "Brave Search API raktas",
|
||||
"Bypass SSL verification for Websites": "Išvengti SSL patikros puslapiams",
|
||||
"Call": "",
|
||||
"Call feature is not supported when using Web STT engine": "",
|
||||
"Camera": "",
|
||||
"Call": "Skambinti",
|
||||
"Call feature is not supported when using Web STT engine": "Skambučio funkcionalumas neleidžiamas naudojant Web STT variklį",
|
||||
"Camera": "Kamera",
|
||||
"Cancel": "Atšaukti",
|
||||
"Capabilities": "",
|
||||
"Capabilities": "Gebėjimai",
|
||||
"Change Password": "Keisti slaptažodį",
|
||||
"Chat": "Pokalbis",
|
||||
"Chat Background Image": "",
|
||||
"Chat Bubble UI": "",
|
||||
"Chat Controls": "",
|
||||
"Chat direction": "",
|
||||
"Chat Background Image": "Pokalbio galinė užsklanda",
|
||||
"Chat Bubble UI": "Pokalbio burbulo sąsaja",
|
||||
"Chat Controls": "Pokalbio valdymas",
|
||||
"Chat direction": "Pokalbio linkmė",
|
||||
"Chat History": "Pokalbių istorija",
|
||||
"Chat History is off for this browser.": "Šioje naršyklėje pokalbių istorija išjungta.",
|
||||
"Chats": "Pokalbiai",
|
||||
@ -103,108 +103,110 @@
|
||||
"Chunk Params": "Blokų nustatymai",
|
||||
"Chunk Size": "Blokų dydis",
|
||||
"Citation": "Citata",
|
||||
"Clear memory": "",
|
||||
"Clear memory": "Ištrinti atmintį",
|
||||
"Click here for help.": "Paspauskite čia dėl pagalbos.",
|
||||
"Click here to": "Paspauskite čia, kad:",
|
||||
"Click here to download user import template file.": "",
|
||||
"Click here to download user import template file.": "Pasauskite čia norėdami sukurti naudotojo įkėlimo šablono rinkmeną",
|
||||
"Click here to select": "Spauskite čia norėdami pasirinkti",
|
||||
"Click here to select a csv file.": "Spauskite čia tam, kad pasirinkti csv failą",
|
||||
"Click here to select a py file.": "",
|
||||
"Click here to select a py file.": "Spauskite čia norėdami pasirinkti py failą",
|
||||
"Click here to select documents.": "Spauskite čia norėdami pasirinkti dokumentus.",
|
||||
"click here.": "paspauskite čia.",
|
||||
"Click on the user role button to change a user's role.": "Paspauskite ant naudotojo rolės mygtuko tam, kad pakeisti naudotojo rolę.",
|
||||
"Clipboard write permission denied. Please check your browser settings to grant the necessary access.": "",
|
||||
"Clone": "",
|
||||
"Clipboard write permission denied. Please check your browser settings to grant the necessary access.": "Iškarpinės naudojimas neleidžiamas naršyklės.",
|
||||
"Clone": "Klonuoti",
|
||||
"Close": "Uždaryti",
|
||||
"Code formatted successfully": "",
|
||||
"Code formatted successfully": "Kodas suformatuotas sėkmingai",
|
||||
"Collection": "Kolekcija",
|
||||
"ComfyUI": "ComfyUI",
|
||||
"ComfyUI Base URL": "ComfyUI bazės nuoroda",
|
||||
"ComfyUI Base URL is required.": "ComfyUI bazės nuoroda privaloma",
|
||||
"Command": "Command",
|
||||
"Concurrent Requests": "",
|
||||
"Confirm": "",
|
||||
"Concurrent Requests": "Kelios užklausos vienu metu",
|
||||
"Confirm": "Patvrtinti",
|
||||
"Confirm Password": "Patvirtinkite slaptažodį",
|
||||
"Confirm your action": "",
|
||||
"Confirm your action": "Patvirtinkite veiksmą",
|
||||
"Connections": "Ryšiai",
|
||||
"Contact Admin for WebUI Access": "",
|
||||
"Contact Admin for WebUI Access": "Susisiekite su administratoriumi dėl prieigos",
|
||||
"Content": "Turinys",
|
||||
"Content Extraction": "",
|
||||
"Content Extraction": "Turinio ištraukimas",
|
||||
"Context Length": "Konteksto ilgis",
|
||||
"Continue Response": "Tęsti atsakymą",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Continue with {{provider}}": "Tęstti su {{tiekėju}}",
|
||||
"Controls": "Valdymas",
|
||||
"Copied": "Nukopijuota",
|
||||
"Copied shared chat URL to clipboard!": "Nukopijavote pokalbio nuorodą",
|
||||
"Copy": "Kopijuoti",
|
||||
"Copy Code": "Kopijuoti kodą",
|
||||
"Copy last code block": "Kopijuoti paskutinį kodo bloką",
|
||||
"Copy last response": "Kopijuoti paskutinį atsakymą",
|
||||
"Copy Link": "Kopijuoti nuorodą",
|
||||
"Copying to clipboard was successful!": "La copie dans le presse-papiers a réussi !",
|
||||
"Create a model": "",
|
||||
"Create a model": "Sukurti modelį",
|
||||
"Create Account": "Créer un compte",
|
||||
"Create new key": "Sukurti naują raktą",
|
||||
"Create new secret key": "Sukurti naują slaptą raktą",
|
||||
"Created at": "Sukurta",
|
||||
"Created At": "Sukurta",
|
||||
"Created by": "",
|
||||
"CSV Import": "",
|
||||
"Created by": "Sukurta",
|
||||
"CSV Import": "CSV importavimas",
|
||||
"Current Model": "Dabartinis modelis",
|
||||
"Current Password": "Esamas slaptažodis",
|
||||
"Custom": "Personalizuota",
|
||||
"Customize models for a specific purpose": "",
|
||||
"Customize models for a specific purpose": "Pritaikykite modelius specifiniams tikslams",
|
||||
"Dark": "Tamsus",
|
||||
"Dashboard": "",
|
||||
"Dashboard": "Valdymo panelė",
|
||||
"Database": "Duomenų bazė",
|
||||
"December": "Gruodis",
|
||||
"Default": "Numatytasis",
|
||||
"Default (Automatic1111)": "Numatytasis (Automatic1111)",
|
||||
"Default (SentenceTransformers)": "Numatytasis (SentenceTransformers)",
|
||||
"Default Model": "",
|
||||
"Default Model": "Numatytasis modelis",
|
||||
"Default model updated": "Numatytasis modelis atnaujintas",
|
||||
"Default Prompt Suggestions": "Numatytieji užklausų pasiūlymai",
|
||||
"Default User Role": "Numatytoji naudotojo rolė",
|
||||
"delete": "ištrinti",
|
||||
"Delete": "ištrinti",
|
||||
"Delete a model": "Ištrinti modėlį",
|
||||
"Delete All Chats": "",
|
||||
"Delete All Chats": "Ištrinti visus pokalbius",
|
||||
"Delete chat": "Išrinti pokalbį",
|
||||
"Delete Chat": "Ištrinti pokalbį",
|
||||
"Delete chat?": "",
|
||||
"Delete Doc": "",
|
||||
"Delete function?": "",
|
||||
"Delete prompt?": "",
|
||||
"Delete chat?": "Ištrinti pokalbį?",
|
||||
"Delete Doc": "Ištrinti dokumentą",
|
||||
"Delete function?": "Ištrinti funkciją",
|
||||
"Delete prompt?": "Ištrinti užklausą?",
|
||||
"delete this link": "Ištrinti nuorodą",
|
||||
"Delete tool?": "",
|
||||
"Delete tool?": "Ištrinti įrankį?",
|
||||
"Delete User": "Ištrinti naudotoją",
|
||||
"Deleted {{deleteModelTag}}": "{{deleteModelTag}} ištrinta",
|
||||
"Deleted {{name}}": "",
|
||||
"Deleted {{name}}": "Ištrinta {{name}}",
|
||||
"Description": "Aprašymas",
|
||||
"Didn't fully follow instructions": "Pilnai nesekė instrukcijų",
|
||||
"Disabled": "",
|
||||
"Discover a function": "",
|
||||
"Discover a model": "",
|
||||
"Disabled": "Išjungta",
|
||||
"Discover a function": "Atrasti funkciją",
|
||||
"Discover a model": "Atrasti modelį",
|
||||
"Discover a prompt": "Atrasti užklausas",
|
||||
"Discover a tool": "",
|
||||
"Discover, download, and explore custom functions": "",
|
||||
"Discover a tool": "Atrasti įrankį",
|
||||
"Discover, download, and explore custom functions": "Atrasti, atsisiųsti arba rasti naujas funkcijas",
|
||||
"Discover, download, and explore custom prompts": "Atrasti ir parsisiųsti užklausas",
|
||||
"Discover, download, and explore custom tools": "",
|
||||
"Discover, download, and explore custom tools": "Atrasti, atsisiųsti arba rasti naujų įrankių",
|
||||
"Discover, download, and explore model presets": "Atrasti ir parsisiųsti modelių konfigūracija",
|
||||
"Dismissible": "",
|
||||
"Display Emoji in Call": "",
|
||||
"Dismissible": "Atemtama",
|
||||
"Display Emoji in Call": "Rodyti emoji pokalbiuose",
|
||||
"Display the username instead of You in the Chat": "Rodyti naudotojo vardą vietoje žodžio Jūs pokalbyje",
|
||||
"Do not install functions from sources you do not fully trust.": "",
|
||||
"Do not install tools from sources you do not fully trust.": "",
|
||||
"Do not install functions from sources you do not fully trust.": "Neinstaliuokite funkcijų iš nepatikimų šaltinių",
|
||||
"Do not install tools from sources you do not fully trust.": "Neinstaliuokite įrankių iš nepatikimų šaltinių",
|
||||
"Document": "Dokumentas",
|
||||
"Document Settings": "Dokumento nuostatos",
|
||||
"Documentation": "",
|
||||
"Documentation": "Dokumentacija",
|
||||
"Documents": "Dokumentai",
|
||||
"does not make any external connections, and your data stays securely on your locally hosted server.": "neturi jokių išorinių ryšių ir duomenys lieka serveryje.",
|
||||
"Don't Allow": "Neleisti",
|
||||
"Don't have an account?": "Neturite paskyros?",
|
||||
"don't install random functions from sources you don't trust.": "",
|
||||
"don't install random tools from sources you don't trust.": "",
|
||||
"don't install random functions from sources you don't trust.": "neinstaliuokite funkcijų iš nepatikimų šaltinių",
|
||||
"don't install random tools from sources you don't trust.": "neinstaliuokite įrankių iš nepatikimų šaltinių",
|
||||
"Don't like the style": "Nepatinka stilius",
|
||||
"Done": "",
|
||||
"Done": "Atlikta",
|
||||
"Download": "Parsisiųsti",
|
||||
"Download canceled": "Parsisiuntimas atšauktas",
|
||||
"Download Database": "Parsisiųsti duomenų bazę",
|
||||
@ -212,188 +214,189 @@
|
||||
"e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.": "pvz. '30s', '10m'. Laiko vienetai yra 's', 'm', 'h'.",
|
||||
"Edit": "Redaguoti",
|
||||
"Edit Doc": "Redaguoti dokumentą",
|
||||
"Edit Memory": "",
|
||||
"Edit Memory": "Koreguoti atminį",
|
||||
"Edit User": "Redaguoti naudotoją",
|
||||
"ElevenLabs": "",
|
||||
"ElevenLabs": "ElevenLabs",
|
||||
"Email": "El. paštas",
|
||||
"Embedding Batch Size": "",
|
||||
"Embedding Batch Size": "Embedding dydis",
|
||||
"Embedding Model": "Embedding modelis",
|
||||
"Embedding Model Engine": "Embedding modelio variklis",
|
||||
"Embedding model set to \"{{embedding_model}}\"": "Embedding modelis nustatytas kaip\"{{embedding_model}}\"",
|
||||
"Enable Chat History": "Aktyvuoti pokalbių istoriją",
|
||||
"Enable Community Sharing": "",
|
||||
"Enable Community Sharing": "Leisti dalinimąsi su bendruomene",
|
||||
"Enable New Sign Ups": "Aktyvuoti naujas registracijas",
|
||||
"Enable Web Search": "",
|
||||
"Enabled": "",
|
||||
"Engine": "",
|
||||
"Enable Web Search": "Leisti paiešką internete",
|
||||
"Enabled": "Leisti",
|
||||
"Engine": "Variklis",
|
||||
"Ensure your CSV file includes 4 columns in this order: Name, Email, Password, Role.": "Įsitikinkite, kad CSV failas turi 4 kolonas šiuo eiliškumu: Name, Email, Password, Role.",
|
||||
"Enter {{role}} message here": "Įveskite {{role}} žinutę čia",
|
||||
"Enter a detail about yourself for your LLMs to recall": "",
|
||||
"Enter api auth string (e.g. username:password)": "",
|
||||
"Enter Brave Search API Key": "",
|
||||
"Enter a detail about yourself for your LLMs to recall": "Įveskite informaciją apie save jūsų modelio atminčiai",
|
||||
"Enter api auth string (e.g. username:password)": "Įveskite API autentifikacijos kodą (pvz. username:password)",
|
||||
"Enter Brave Search API Key": "Įveskite Bravo Search API raktą",
|
||||
"Enter Chunk Overlap": "Įveskite blokų persidengimą",
|
||||
"Enter Chunk Size": "Įveskite blokų dydį",
|
||||
"Enter Github Raw URL": "",
|
||||
"Enter Google PSE API Key": "",
|
||||
"Enter Google PSE Engine Id": "",
|
||||
"Enter Github Raw URL": "Įveskite GitHub Raw nuorodą",
|
||||
"Enter Google PSE API Key": "Įveskite Google PSE API raktą",
|
||||
"Enter Google PSE Engine Id": "Įveskite Google PSE variklio ID",
|
||||
"Enter Image Size (e.g. 512x512)": "Įveskite paveiksliuko dydį (pvz. 512x512)",
|
||||
"Enter language codes": "Įveskite kalbos kodus",
|
||||
"Enter model tag (e.g. {{modelTag}})": "Įveskite modelio žymą (pvz. {{modelTag}})",
|
||||
"Enter Number of Steps (e.g. 50)": "Įveskite žingsnių kiekį (pvz. 50)",
|
||||
"Enter Score": "Įveskite rezultatą",
|
||||
"Enter Searxng Query URL": "",
|
||||
"Enter Serper API Key": "",
|
||||
"Enter Serply API Key": "",
|
||||
"Enter Serpstack API Key": "",
|
||||
"Enter Searxng Query URL": "Įveskite Searxng Query nuorodą",
|
||||
"Enter Serper API Key": "Įveskite Serper API raktą",
|
||||
"Enter Serply API Key": "Įveskite Serply API raktą",
|
||||
"Enter Serpstack API Key": "Įveskite Serpstack API raktą",
|
||||
"Enter stop sequence": "Įveskite pabaigos sekvenciją",
|
||||
"Enter system prompt": "",
|
||||
"Enter Tavily API Key": "",
|
||||
"Enter Tika Server URL": "",
|
||||
"Enter system prompt": "Įveskite sistemos užklausą",
|
||||
"Enter Tavily API Key": "Įveskite Tavily API raktą",
|
||||
"Enter Tika Server URL": "Įveskite Tika serverio nuorodą",
|
||||
"Enter Top K": "Įveskite Top K",
|
||||
"Enter URL (e.g. http://127.0.0.1:7860/)": "Įveskite nuorodą (pvz. http://127.0.0.1:7860/)",
|
||||
"Enter URL (e.g. http://localhost:11434)": "Įveskite nuorododą (pvz. http://localhost:11434",
|
||||
"Enter Your Email": "Įveskite el. pašto adresą",
|
||||
"Enter Your Full Name": "Įveskite vardą bei pavardę",
|
||||
"Enter your message": "",
|
||||
"Enter your message": "Įveskite žinutę",
|
||||
"Enter Your Password": "Įveskite slaptažodį",
|
||||
"Enter Your Role": "Įveskite savo rolę",
|
||||
"Error": "",
|
||||
"Error": "Klaida",
|
||||
"Experimental": "Eksperimentinis",
|
||||
"Export": "",
|
||||
"Export": "Eksportuoti",
|
||||
"Export All Chats (All Users)": "Eksportuoti visų naudotojų visus pokalbius",
|
||||
"Export chat (.json)": "",
|
||||
"Export chat (.json)": "Eksportuoti pokalbį (.json)",
|
||||
"Export Chats": "Eksportuoti pokalbius",
|
||||
"Export Documents Mapping": "Eksportuoti dokumentų žemėlapį",
|
||||
"Export Functions": "",
|
||||
"Export LiteLLM config.yaml": "",
|
||||
"Export Models": "",
|
||||
"Export Functions": "Eksportuoti funkcijas",
|
||||
"Export LiteLLM config.yaml": "Eksportuoti LiteLLM config.yaml",
|
||||
"Export Models": "Eksportuoti modelius",
|
||||
"Export Prompts": "Eksportuoti užklausas",
|
||||
"Export Tools": "",
|
||||
"External Models": "",
|
||||
"Export Tools": "Eksportuoti įrankius",
|
||||
"External Models": "Išoriniai modeliai",
|
||||
"Failed to create API Key.": "Nepavyko sukurti API rakto",
|
||||
"Failed to read clipboard contents": "Nepavyko perskaityti kopijuoklės",
|
||||
"Failed to update settings": "",
|
||||
"Failed to update settings": "Nepavyko atnaujinti nustatymų",
|
||||
"February": "Vasaris",
|
||||
"Feel free to add specific details": "Galite pridėti specifinių detalių",
|
||||
"File": "",
|
||||
"File Mode": "Dokumentų rėžimas",
|
||||
"File": "Rinkmena",
|
||||
"File Mode": "Rinkmenų rėžimas",
|
||||
"File not found.": "Failas nerastas.",
|
||||
"Files": "",
|
||||
"Filter is now globally disabled": "",
|
||||
"Filter is now globally enabled": "",
|
||||
"Filters": "",
|
||||
"Files": "Rinkmenos",
|
||||
"Filter is now globally disabled": "Filtrai nėra leidžiami globaliai",
|
||||
"Filter is now globally enabled": "Filtrai globaliai leidžiami",
|
||||
"Filters": "Filtrai",
|
||||
"Fingerprint spoofing detected: Unable to use initials as avatar. Defaulting to default profile image.": "Nepavyko nsutatyti profilio nuotraukos",
|
||||
"Fluidly stream large external response chunks": "Sklandžiai transliuoti ilgus atsakymus",
|
||||
"Focus chat input": "Fokusuoti žinutės įvestį",
|
||||
"Followed instructions perfectly": "Tobulai sekė instrukcijas",
|
||||
"Form": "",
|
||||
"Form": "Forma",
|
||||
"Format your variables using square brackets like this:": "Formatuokite kintamuosius su kvadratiniais skliausteliais:",
|
||||
"Frequency Penalty": "",
|
||||
"Function created successfully": "",
|
||||
"Function deleted successfully": "",
|
||||
"Function Description (e.g. A filter to remove profanity from text)": "",
|
||||
"Function ID (e.g. my_filter)": "",
|
||||
"Function is now globally disabled": "",
|
||||
"Function is now globally enabled": "",
|
||||
"Function Name (e.g. My Filter)": "",
|
||||
"Function updated successfully": "",
|
||||
"Functions": "",
|
||||
"Functions allow arbitrary code execution": "",
|
||||
"Functions allow arbitrary code execution.": "",
|
||||
"Functions imported successfully": "",
|
||||
"Frequency Penalty": "Dažnumo bauda",
|
||||
"Function created successfully": "Funkcija sukurta sėkmingai",
|
||||
"Function deleted successfully": "Funkcija ištrinta sėkmingai",
|
||||
"Function Description (e.g. A filter to remove profanity from text)": "Funkcijos aprašymas (pvz. filtras keiksmažodžių išėmimui)",
|
||||
"Function ID (e.g. my_filter)": "Funkcijos ID",
|
||||
"Function is now globally disabled": "Funkcijos šiuo metu neleidžiamos",
|
||||
"Function is now globally enabled": "Funkcijos leidžiamos",
|
||||
"Function Name (e.g. My Filter)": "Funkcijos pavadinimas",
|
||||
"Function updated successfully": "Funkcija atnaujinta sėkmingai",
|
||||
"Functions": "Funkcijos",
|
||||
"Functions allow arbitrary code execution": "Funkcijos leidžia nekontroliuojamo kodo vykdymą",
|
||||
"Functions allow arbitrary code execution.": "Funkcijos leidžia nekontroliuojamo kodo vykdymą",
|
||||
"Functions imported successfully": "Funkcijos importuotos sėkmingai",
|
||||
"General": "Bendri",
|
||||
"General Settings": "Bendri nustatymai",
|
||||
"Generate Image": "",
|
||||
"Generating search query": "",
|
||||
"Generate Image": "Generuoti paveikslėlį",
|
||||
"Generating search query": "Generuoti paieškos užklausą",
|
||||
"Generation Info": "Generavimo informacija",
|
||||
"Get up and running with": "",
|
||||
"Global": "",
|
||||
"Get up and running with": "Pradėti dirbti su",
|
||||
"Global": "Globalu",
|
||||
"Good Response": "Geras atsakymas",
|
||||
"Google PSE API Key": "",
|
||||
"Google PSE Engine Id": "",
|
||||
"h:mm a": "",
|
||||
"Google PSE API Key": "Google PSE API raktas",
|
||||
"Google PSE Engine Id": "Google PSE variklio ID",
|
||||
"h:mm a": "valanda:mėnesis:metai",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "neturi pokalbių",
|
||||
"Hello, {{name}}": "Sveiki, {{name}}",
|
||||
"Help": "Pagalba",
|
||||
"Hide": "Paslėpti",
|
||||
"Hide Model": "",
|
||||
"Hide Model": "Paslėpti modelį",
|
||||
"How can I help you today?": "Kuo galėčiau Jums padėti ?",
|
||||
"Hybrid Search": "Hibridinė paieška",
|
||||
"I acknowledge that I have read and I understand the implications of my action. I am aware of the risks associated with executing arbitrary code and I have verified the trustworthiness of the source.": "",
|
||||
"I acknowledge that I have read and I understand the implications of my action. I am aware of the risks associated with executing arbitrary code and I have verified the trustworthiness of the source.": "Suprantu veiksmų ir kodo vykdymo rizikas.",
|
||||
"Image Generation (Experimental)": "Vaizdų generavimas (eksperimentinis)",
|
||||
"Image Generation Engine": "Vaizdų generavimo variklis",
|
||||
"Image Settings": "Vaizdų nustatymai",
|
||||
"Images": "Vaizdai",
|
||||
"Import Chats": "Importuoti pokalbius",
|
||||
"Import Documents Mapping": "Importuoti dokumentų žemėlapį",
|
||||
"Import Functions": "",
|
||||
"Import Models": "",
|
||||
"Import Functions": "Importuoti funkcijas",
|
||||
"Import Models": "Importuoti modelius",
|
||||
"Import Prompts": "Importuoti užklausas",
|
||||
"Import Tools": "",
|
||||
"Include `--api-auth` flag when running stable-diffusion-webui": "",
|
||||
"Import Tools": "Importuoti įrankius",
|
||||
"Include `--api-auth` flag when running stable-diffusion-webui": "Įtraukti `--api-auth` flag when running stable-diffusion-webui",
|
||||
"Include `--api` flag when running stable-diffusion-webui": "Pridėti `--api` kai vykdomas stable-diffusion-webui",
|
||||
"Info": "",
|
||||
"Info": "Informacija",
|
||||
"Input commands": "Įvesties komandos",
|
||||
"Install from Github URL": "",
|
||||
"Instant Auto-Send After Voice Transcription": "",
|
||||
"Install from Github URL": "Instaliuoti Github nuorodą",
|
||||
"Instant Auto-Send After Voice Transcription": "Siųsti iškart po balso transkripcijos",
|
||||
"Interface": "Sąsaja",
|
||||
"Invalid Tag": "Neteisinga žyma",
|
||||
"January": "Sausis",
|
||||
"join our Discord for help.": "prisijunkite prie mūsų Discord.",
|
||||
"JSON": "JSON",
|
||||
"JSON Preview": "",
|
||||
"JSON Preview": "JSON peržiūra",
|
||||
"July": "liepa",
|
||||
"June": "birželis",
|
||||
"JWT Expiration": "JWT išėjimas iš galiojimo",
|
||||
"JWT Token": "JWT žetonas",
|
||||
"Keep Alive": "Išlaikyti aktyviu",
|
||||
"Keyboard shortcuts": "Klaviatūros trumpiniai",
|
||||
"Knowledge": "",
|
||||
"Knowledge": "Žinios",
|
||||
"Language": "Kalba",
|
||||
"large language models, locally.": "",
|
||||
"large language models, locally.": "dideli kalbos modeliai, lokaliai",
|
||||
"Last Active": "Paskutinį kartą aktyvus",
|
||||
"Last Modified": "",
|
||||
"Last Modified": "Paskutinis pakeitimas",
|
||||
"Light": "Šviesus",
|
||||
"Listening...": "",
|
||||
"Listening...": "Klausoma...",
|
||||
"LLMs can make mistakes. Verify important information.": "Dideli kalbos modeliai gali klysti. Patikrinkite atsakymų teisingumą.",
|
||||
"Local Models": "",
|
||||
"LTR": "",
|
||||
"Local Models": "Lokalūs modeliai",
|
||||
"LTR": "LTR",
|
||||
"Made by OpenWebUI Community": "Sukurta OpenWebUI bendruomenės",
|
||||
"Make sure to enclose them with": "Užtikrinktie, kad įtraukiate viduje:",
|
||||
"Manage": "",
|
||||
"Manage": "Tvarkyti",
|
||||
"Manage Models": "Tvarkyti modelius",
|
||||
"Manage Ollama Models": "Tvarkyti Ollama modelius",
|
||||
"Manage Pipelines": "",
|
||||
"Manage Pipelines": "Tvarkyti procesus",
|
||||
"March": "Kovas",
|
||||
"Max Tokens (num_predict)": "",
|
||||
"Max Tokens (num_predict)": "Maksimalus žetonų kiekis (num_predict)",
|
||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "Daugiausiai trys modeliai gali būti parsisiunčiami vienu metu.",
|
||||
"May": "gegužė",
|
||||
"Memories accessible by LLMs will be shown here.": "",
|
||||
"Memory": "",
|
||||
"Memory added successfully": "",
|
||||
"Memory cleared successfully": "",
|
||||
"Memory deleted successfully": "",
|
||||
"Memory updated successfully": "",
|
||||
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "",
|
||||
"Min P": "",
|
||||
"Memories accessible by LLMs will be shown here.": "Atminitis prieinama kalbos modelio bus rodoma čia.",
|
||||
"Memory": "Atmintis",
|
||||
"Memory added successfully": "Atmintis pridėta sėkmingai",
|
||||
"Memory cleared successfully": "Atmintis ištrinta sėkmingai",
|
||||
"Memory deleted successfully": "Atmintis ištrinta sėkmingai",
|
||||
"Memory updated successfully": "Atmintis atnaujinta sėkmingai",
|
||||
"Messages you send after creating your link won't be shared. Users with the URL will be able to view the shared chat.": "Žinutės, kurias siunčiate po nuorodos sukūrimo nebus matomos nuorodos turėtojams. Naudotojai su nuoroda matys žinutes iki nuorodos sukūrimo.",
|
||||
"Min P": "Mažiausias p",
|
||||
"Minimum Score": "Minimalus rezultatas",
|
||||
"Mirostat": "Mirostat",
|
||||
"Mirostat Eta": "Mirostat Eta",
|
||||
"Mirostat Tau": "Mirostat Tau",
|
||||
"MMMM DD, YYYY": "MMMM DD, YYYY",
|
||||
"MMMM DD, YYYY HH:mm": "MMMM DD, YYYY HH:mm",
|
||||
"MMMM DD, YYYY hh:mm:ss A": "",
|
||||
"MMMM DD, YYYY hh:mm:ss A": "MMMM DD, YYYY hh:mm:ss A",
|
||||
"Model '{{modelName}}' has been successfully downloaded.": "'{{modelName}}' modelis sėkmingai atsisiųstas.",
|
||||
"Model '{{modelTag}}' is already in queue for downloading.": "Modelis '{{modelTag}}' jau atsisiuntimų eilėje.",
|
||||
"Model {{modelId}} not found": "Modelis {{modelId}} nerastas",
|
||||
"Model {{modelName}} is not vision capable": "",
|
||||
"Model {{name}} is now {{status}}": "",
|
||||
"Model created successfully!": "",
|
||||
"Model {{modelName}} is not vision capable": "Modelis {{modelName}} neturi vaizdo gebėjimų",
|
||||
"Model {{name}} is now {{status}}": "Modelis {{name}} dabar {{status}}",
|
||||
"Model created successfully!": "Modelis sukurtas sėkmingai",
|
||||
"Model filesystem path detected. Model shortname is required for update, cannot continue.": "Modelio failų sistemos kelias aptiktas. Reikalingas trumpas modelio pavadinimas atnaujinimui.",
|
||||
"Model ID": "",
|
||||
"Model ID": "Modelio ID",
|
||||
"Model not selected": "Modelis nepasirinktas",
|
||||
"Model Params": "",
|
||||
"Model updated successfully": "",
|
||||
"Model Params": "Modelio parametrai",
|
||||
"Model updated successfully": "Modelis atnaujintas sėkmingai",
|
||||
"Model Whitelisting": "Modeliu baltasis sąrašas",
|
||||
"Model(s) Whitelisted": "Modelis baltąjame sąraše",
|
||||
"Modelfile Content": "Modelio failo turinys",
|
||||
@ -401,42 +404,43 @@
|
||||
"More": "Daugiau",
|
||||
"Name": "Pavadinimas",
|
||||
"Name Tag": "Žymos pavadinimas",
|
||||
"Name your model": "",
|
||||
"Name your model": "Pavadinkite savo modelį",
|
||||
"New Chat": "Naujas pokalbis",
|
||||
"New Password": "Naujas slaptažodis",
|
||||
"No content to speak": "",
|
||||
"No documents found": "",
|
||||
"No file selected": "",
|
||||
"No content to speak": "Nėra turinio kalbėjimui",
|
||||
"No documents found": "Dokumentų nerasta",
|
||||
"No file selected": "Nėra pasirinktų dokumentų",
|
||||
"No results found": "Rezultatų nerasta",
|
||||
"No search query generated": "",
|
||||
"No search query generated": "Paieškos užklausa nesugeneruota",
|
||||
"No source available": "Šaltinių nerasta",
|
||||
"No valves to update": "",
|
||||
"None": "",
|
||||
"No valves to update": "Nėra atnaujinamų įeičių",
|
||||
"None": "Nėra",
|
||||
"Not factually correct": "Faktiškai netikslu",
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Jei turite minimalų įvertį, paieška gražins tik tą informaciją, kuri viršyje šį įvertį",
|
||||
"Notifications": "Pranešimai",
|
||||
"November": "lapkritis",
|
||||
"num_thread (Ollama)": "",
|
||||
"OAuth ID": "",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "OAuth ID",
|
||||
"October": "spalis",
|
||||
"Off": "Išjungta",
|
||||
"Okay, Let's Go!": "Gerai, važiuojam!",
|
||||
"OLED Dark": "OLED tamsus",
|
||||
"Ollama": "Ollama",
|
||||
"Ollama API": "",
|
||||
"Ollama API disabled": "",
|
||||
"Ollama API is disabled": "",
|
||||
"Ollama API": "Ollama API",
|
||||
"Ollama API disabled": "Ollama API išjungtas",
|
||||
"Ollama API is disabled": "Ollama API išjungtas",
|
||||
"Ollama Version": "Ollama versija",
|
||||
"On": "Aktyvuota",
|
||||
"Only": "Tiktais",
|
||||
"Only alphanumeric characters and hyphens are allowed in the command string.": "Leistinos tik raidės, skaičiai ir brūkšneliai.",
|
||||
"Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.": "Jūsų failai vis dar tvarkomi.",
|
||||
"Oops! Looks like the URL is invalid. Please double-check and try again.": "Oops! Looks like the URL is invalid. Please double-check and try again.",
|
||||
"Oops! There was an error in the previous response. Please try again or contact admin.": "",
|
||||
"Oops! Looks like the URL is invalid. Please double-check and try again.": "Regis nuoroda nevalidi. Prašau patikrtinkite ir pabandykite iš naujo.",
|
||||
"Oops! There was an error in the previous response. Please try again or contact admin.": "Įvyko klaida. Pabandykite iš naujo arba susisiekite su administratoriumi.",
|
||||
"Oops! You're using an unsupported method (frontend only). Please serve the WebUI from the backend.": "Naudojate nepalaikomą (front-end) web ui rėžimą. Prašau serviruokite WebUI iš back-end",
|
||||
"Open AI (Dall-E)": "Open AI (Dall-E)",
|
||||
"Open new chat": "Atverti naują pokalbį",
|
||||
"Open WebUI version (v{{OPEN_WEBUI_VERSION}}) is lower than required version (v{{REQUIRED_VERSION}})": "",
|
||||
"Open WebUI version (v{{OPEN_WEBUI_VERSION}}) is lower than required version (v{{REQUIRED_VERSION}})": "Tortue Chat versija per sena. Reikalinga (v{{REQUIRED_VERSION}}) versija.",
|
||||
"OpenAI": "OpenAI",
|
||||
"OpenAI API": "OpenAI API",
|
||||
"OpenAI API Config": "Open AI API nustatymai",
|
||||
@ -448,20 +452,20 @@
|
||||
"PDF document (.pdf)": "PDF dokumentas (.pdf)",
|
||||
"PDF Extract Images (OCR)": "PDF paveikslėlių skaitymas (OCR)",
|
||||
"pending": "laukiama",
|
||||
"Permission denied when accessing media devices": "",
|
||||
"Permission denied when accessing microphone": "",
|
||||
"Permission denied when accessing media devices": "Leidimas atmestas bandant prisijungti prie medijos įrenginių",
|
||||
"Permission denied when accessing microphone": "Mikrofono leidimas atmestas",
|
||||
"Permission denied when accessing microphone: {{error}}": "Leidimas naudoti mikrofoną atmestas: {{error}}",
|
||||
"Personalization": "",
|
||||
"Pin": "",
|
||||
"Pinned": "",
|
||||
"Pipeline deleted successfully": "",
|
||||
"Pipeline downloaded successfully": "",
|
||||
"Pipelines": "",
|
||||
"Pipelines Not Detected": "",
|
||||
"Pipelines Valves": "",
|
||||
"Personalization": "Personalizacija",
|
||||
"Pin": "Smeigtukas",
|
||||
"Pinned": "Įsmeigta",
|
||||
"Pipeline deleted successfully": "Procesas ištrintas sėkmingai",
|
||||
"Pipeline downloaded successfully": "Procesas atsisiųstas sėkmingai",
|
||||
"Pipelines": "Procesai",
|
||||
"Pipelines Not Detected": "Procesai neaptikti",
|
||||
"Pipelines Valves": "Procesų įeitys",
|
||||
"Plain text (.txt)": "Grynas tekstas (.txt)",
|
||||
"Playground": "Eksperimentavimo erdvė",
|
||||
"Please carefully review the following warnings:": "",
|
||||
"Please carefully review the following warnings:": "Peržiūrėkite šiuos perspėjimus:",
|
||||
"Positive attitude": "Pozityvus elgesys",
|
||||
"Previous 30 days": "Paskutinės 30 dienų",
|
||||
"Previous 7 days": "Paskutinės 7 dienos",
|
||||
@ -478,7 +482,7 @@
|
||||
"Read Aloud": "Skaityti garsiai",
|
||||
"Record voice": "Įrašyti balsą",
|
||||
"Redirecting you to OpenWebUI Community": "Perkeliam Jus į OpenWebUI bendruomenę",
|
||||
"Refer to yourself as \"User\" (e.g., \"User is learning Spanish\")": "",
|
||||
"Refer to yourself as \"User\" (e.g., \"User is learning Spanish\")": "Vadinkite save Naudotoju (pvz. Naudotojas mokosi prancūzų kalbos)",
|
||||
"Refused when it shouldn't have": "Atmesta kai neturėtų būti atmesta",
|
||||
"Regenerate": "Generuoti iš naujo",
|
||||
"Release Notes": "Naujovės",
|
||||
@ -490,66 +494,67 @@
|
||||
"Reranking Model": "Reranking modelis",
|
||||
"Reranking model disabled": "Reranking modelis neleidžiamas",
|
||||
"Reranking model set to \"{{reranking_model}}\"": "Nustatytas rereanking modelis: \"{{reranking_model}}\"",
|
||||
"Reset": "",
|
||||
"Reset Upload Directory": "",
|
||||
"Reset": "Atkurti",
|
||||
"Reset Upload Directory": "Atkurti įkėlimų direktoiją",
|
||||
"Reset Vector Storage": "Reinicializuoti vektorių atmintį",
|
||||
"Response AutoCopy to Clipboard": "Automatiškai nukopijuoti atsakymą",
|
||||
"Response notifications cannot be activated as the website permissions have been denied. Please visit your browser settings to grant the necessary access.": "",
|
||||
"Response notifications cannot be activated as the website permissions have been denied. Please visit your browser settings to grant the necessary access.": "Naršyklė neleidžia siųsti pranešimų",
|
||||
"Role": "Rolė",
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "Naudokite egzistuojančius modelius ir sukurkite saviškius.",
|
||||
"Running": "Veikia",
|
||||
"Save": "Išsaugoti",
|
||||
"Save & Create": "Išsaugoti ir sukurti",
|
||||
"Save & Update": "Išsaugoti ir atnaujinti",
|
||||
"Save Tag": "",
|
||||
"Save Tag": "Išsaugoti žymą",
|
||||
"Saving chat logs directly to your browser's storage is no longer supported. Please take a moment to download and delete your chat logs by clicking the button below. Don't worry, you can easily re-import your chat logs to the backend through": "Pokalbių saugojimas naršyklėje nebegalimas.",
|
||||
"Scan": "Skenuoti",
|
||||
"Scan complete!": "Skenavimas baigtas!",
|
||||
"Scan for documents from {{path}}": "Skenuoti dokumentus iš {{path}}",
|
||||
"Scroll to bottom when switching between branches": "",
|
||||
"Scroll to bottom when switching between branches": "Slikite link apačios norėdami pakeisti šakas",
|
||||
"Search": "Ieškoti",
|
||||
"Search a model": "Ieškoti modelio",
|
||||
"Search Chats": "",
|
||||
"Search Chats": "Ieškoti pokalbiuose",
|
||||
"Search Documents": "Ieškoti dokumentų",
|
||||
"Search Functions": "",
|
||||
"Search Models": "",
|
||||
"Search Functions": "Ieškoti funkcijų",
|
||||
"Search Models": "Ieškoti modelių",
|
||||
"Search Prompts": "Ieškoti užklausų",
|
||||
"Search Query Generation Prompt": "",
|
||||
"Search Query Generation Prompt Length Threshold": "",
|
||||
"Search Result Count": "",
|
||||
"Search Tools": "",
|
||||
"Searched {{count}} sites_one": "",
|
||||
"Searched {{count}} sites_few": "",
|
||||
"Searched {{count}} sites_many": "",
|
||||
"Searched {{count}} sites_other": "",
|
||||
"Searching \"{{searchQuery}}\"": "",
|
||||
"Searxng Query URL": "",
|
||||
"Search Query Generation Prompt": "Paieškos užklausos generavimo formuluotė",
|
||||
"Search Query Generation Prompt Length Threshold": "Paieškos užklausos generavimo formuluotės ilgio riba",
|
||||
"Search Result Count": "Paieškos rezultatų skaičius",
|
||||
"Search Tools": "Paieškos įrankiai",
|
||||
"Searched {{count}} sites_one": "Ieškota {{count}} sites_one",
|
||||
"Searched {{count}} sites_few": "Ieškota {{count}} sites_few",
|
||||
"Searched {{count}} sites_many": "Ieškota {{count}} sites_many",
|
||||
"Searched {{count}} sites_other": "Ieškota {{count}} sites_other",
|
||||
"Searching \"{{searchQuery}}\"": "Ieškoma \"{{searchQuery}}\"",
|
||||
"Searxng Query URL": "Searxng užklausos URL",
|
||||
"See readme.md for instructions": "Žiūrėti readme.md papildomoms instrukcijoms",
|
||||
"See what's new": "Žiūrėti naujoves",
|
||||
"Seed": "Sėkla",
|
||||
"Select a base model": "",
|
||||
"Select a engine": "",
|
||||
"Select a function": "",
|
||||
"Select a base model": "Pasirinkite bazinį modelį",
|
||||
"Select a engine": "Pasirinkite variklį",
|
||||
"Select a function": "Pasirinkite funkciją",
|
||||
"Select a mode": "Pasirinkti režimą",
|
||||
"Select a model": "Pasirinkti modelį",
|
||||
"Select a pipeline": "",
|
||||
"Select a pipeline url": "",
|
||||
"Select a tool": "",
|
||||
"Select a pipeline": "Pasirinkite procesą",
|
||||
"Select a pipeline url": "Pasirinkite proceso nuorodą",
|
||||
"Select a tool": "Pasirinkite įrankį",
|
||||
"Select an Ollama instance": "Pasirinkti Ollama instanciją",
|
||||
"Select Documents": "",
|
||||
"Select Documents": "Pasirinkite dokumentus",
|
||||
"Select model": "Pasirinkti modelį",
|
||||
"Select only one model to call": "",
|
||||
"Selected model(s) do not support image inputs": "",
|
||||
"Send": "",
|
||||
"Select only one model to call": "Pasirinkite vieną modelį",
|
||||
"Selected model(s) do not support image inputs": "Pasirinkti modeliai nepalaiko vaizdinių užklausų",
|
||||
"Send": "Siųsti",
|
||||
"Send a Message": "Siųsti žinutę",
|
||||
"Send message": "Siųsti žinutę",
|
||||
"September": "rugsėjis",
|
||||
"Serper API Key": "",
|
||||
"Serply API Key": "",
|
||||
"Serpstack API Key": "",
|
||||
"Serper API Key": "Serper API raktas",
|
||||
"Serply API Key": "Serply API raktas",
|
||||
"Serpstack API Key": "Serpstach API raktas",
|
||||
"Server connection verified": "Serverio sujungimas patvirtintas",
|
||||
"Set as default": "Nustatyti numatytąjį",
|
||||
"Set Default Model": "Nustatyti numatytąjį modelį",
|
||||
@ -557,20 +562,20 @@
|
||||
"Set Image Size": "Nustatyti paveikslėlių dydį",
|
||||
"Set reranking model (e.g. {{model}})": "Nustatyti reranking modelį",
|
||||
"Set Steps": "Numatyti etapus",
|
||||
"Set Task Model": "",
|
||||
"Set Task Model": "Numatyti užduočių modelį",
|
||||
"Set Voice": "Numatyti balsą",
|
||||
"Settings": "Nustatymai",
|
||||
"Settings saved successfully!": "Parametrai sėkmingai išsaugoti!",
|
||||
"Settings updated successfully": "",
|
||||
"Settings updated successfully": "Nustatymai atnaujinti sėkmingai",
|
||||
"Share": "Dalintis",
|
||||
"Share Chat": "Dalintis pokalbiu",
|
||||
"Share to OpenWebUI Community": "Dalintis su OpenWebUI bendruomene",
|
||||
"short-summary": "trumpinys",
|
||||
"Show": "Rodyti",
|
||||
"Show Admin Details in Account Pending Overlay": "",
|
||||
"Show Model": "",
|
||||
"Show Admin Details in Account Pending Overlay": "Rodyti administratoriaus duomenis laukiant paskyros patvirtinimo",
|
||||
"Show Model": "Rodyti modelį",
|
||||
"Show shortcuts": "Rodyti trumpinius",
|
||||
"Show your support!": "",
|
||||
"Show your support!": "Palaikykite",
|
||||
"Showcased creativity": "Kūrybingų užklausų paroda",
|
||||
"Sign in": "Prisijungti",
|
||||
"Sign Out": "Atsijungti",
|
||||
@ -580,20 +585,20 @@
|
||||
"Speech recognition error: {{error}}": "Balso atpažinimo problema: {{error}}",
|
||||
"Speech-to-Text Engine": "Balso atpažinimo modelis",
|
||||
"Stop Sequence": "Baigt sekvenciją",
|
||||
"STT Model": "",
|
||||
"STT Model": "STT modelis",
|
||||
"STT Settings": "STT nustatymai",
|
||||
"Submit": "Pateikti",
|
||||
"Subtitle (e.g. about the Roman Empire)": "Subtitras",
|
||||
"Success": "Sėkmingai",
|
||||
"Successfully updated.": "Sėkmingai atnaujinta.",
|
||||
"Suggested": "Siūloma",
|
||||
"Support": "",
|
||||
"Support this plugin:": "",
|
||||
"Support": "Palaikyti",
|
||||
"Support this plugin:": "Palaikykite šitą modulį",
|
||||
"System": "Sistema",
|
||||
"System Prompt": "Sistemos užklausa",
|
||||
"Tags": "Žymos",
|
||||
"Tap to interrupt": "",
|
||||
"Tavily API Key": "",
|
||||
"Tap to interrupt": "Paspauskite norėdami pertraukti",
|
||||
"Tavily API Key": "Tavily API raktas",
|
||||
"Tell us more:": "Papasakokite daugiau",
|
||||
"Temperature": "Temperatūra",
|
||||
"Template": "Modelis",
|
||||
@ -601,18 +606,18 @@
|
||||
"Text-to-Speech Engine": "Balso sintezės modelis",
|
||||
"Tfs Z": "Tfs Z",
|
||||
"Thanks for your feedback!": "Ačiū už atsiliepimus",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "Šis modulis kuriamas savanorių. Palaikykite jų darbus finansiškai arba prisidėdami kodu.",
|
||||
"The score should be a value between 0.0 (0%) and 1.0 (100%).": "Rezultatas turėtų būti tarp 0.0 (0%) ir 1.0 (100%)",
|
||||
"Theme": "Tema",
|
||||
"Thinking...": "",
|
||||
"This action cannot be undone. Do you wish to continue?": "",
|
||||
"Thinking...": "Mąsto...",
|
||||
"This action cannot be undone. Do you wish to continue?": "Šis veiksmas negali būti atšauktas. Ar norite tęsti?",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Tai užtikrina, kad Jūsų pokalbiai saugiai saugojami duomenų bazėje. Ačiū!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Tai eksperimentinė funkcija ir gali veikti nevisada.",
|
||||
"This setting does not sync across browsers or devices.": "Šis parametras nesisinchronizuoja su skirtingomis naršyklėmis ir įrankiais.",
|
||||
"This will delete": "",
|
||||
"This will delete": "Tai ištrins",
|
||||
"Thorough explanation": "Platus paaiškinimas",
|
||||
"Tika": "",
|
||||
"Tika Server URL required.": "",
|
||||
"Tika": "Tika",
|
||||
"Tika Server URL required.": "Reiklainga Tika serverio nuorodą",
|
||||
"Tip: Update multiple variable slots consecutively by pressing the tab key in the chat input after each replacement.": "Jei norite pakeisti keletą kintamųjų vieną po kitos, spauskite Tab",
|
||||
"Title": "Pavadinimas",
|
||||
"Title (e.g. Tell me a fun fact)": "Pavadinimas",
|
||||
@ -622,96 +627,96 @@
|
||||
"to": "➡️",
|
||||
"To access the available model names for downloading,": "Tam, kad prieiti prie galimų parsisiųsti modelių",
|
||||
"To access the GGUF models available for downloading,": "Tam, kad prieiti prie galimų parsisiųsti GGUF,",
|
||||
"To access the WebUI, please reach out to the administrator. Admins can manage user statuses from the Admin Panel.": "",
|
||||
"To add documents here, upload them to the \"Documents\" workspace first.": "",
|
||||
"To access the WebUI, please reach out to the administrator. Admins can manage user statuses from the Admin Panel.": "Norėdami prieiti prie programos, susisiekite su administratoriumi, kuris Jus patvirtins.",
|
||||
"To add documents here, upload them to the \"Documents\" workspace first.": "Norėdami čia pridėti dokumentus, pridėkite juos Dokumentų erdvėje nuostatose.",
|
||||
"to chat input.": "į pokalbio įvestį",
|
||||
"To select actions here, add them to the \"Functions\" workspace first.": "",
|
||||
"To select filters here, add them to the \"Functions\" workspace first.": "",
|
||||
"To select toolkits here, add them to the \"Tools\" workspace first.": "",
|
||||
"To select actions here, add them to the \"Functions\" workspace first.": "Norėdami pasirinkti veiksmus, pirmiausia pridėkite juos funkcijų nuostatuose",
|
||||
"To select filters here, add them to the \"Functions\" workspace first.": "Norėdami pasirinkti filtrus, pirmiausia pridėkite juos funkcijų nuostatuose",
|
||||
"To select toolkits here, add them to the \"Tools\" workspace first.": "Norėdami pasirinkti įrankius, pirmiausia pridėkite juos prie įrankių nuostatuose",
|
||||
"Today": "Šiandien",
|
||||
"Toggle settings": "Atverti/užverti parametrus",
|
||||
"Toggle sidebar": "Atverti/užverti šoninį meniu",
|
||||
"Tokens To Keep On Context Refresh (num_keep)": "",
|
||||
"Tool created successfully": "",
|
||||
"Tool deleted successfully": "",
|
||||
"Tool imported successfully": "",
|
||||
"Tool updated successfully": "",
|
||||
"Toolkit Description (e.g. A toolkit for performing various operations)": "",
|
||||
"Toolkit ID (e.g. my_toolkit)": "",
|
||||
"Toolkit Name (e.g. My ToolKit)": "",
|
||||
"Tools": "",
|
||||
"Tools are a function calling system with arbitrary code execution": "",
|
||||
"Tools have a function calling system that allows arbitrary code execution": "",
|
||||
"Tools have a function calling system that allows arbitrary code execution.": "",
|
||||
"Tokens To Keep On Context Refresh (num_keep)": "Žetonų kiekis konteksto atnaujinimui (num_keep)",
|
||||
"Tool created successfully": "Įrankis sukurtas sėkmingai",
|
||||
"Tool deleted successfully": "Įrankis ištrintas sėkmingai",
|
||||
"Tool imported successfully": "Įrankis importuotas sėkmingai",
|
||||
"Tool updated successfully": "Įrankis atnaujintas sėkmingai",
|
||||
"Toolkit Description (e.g. A toolkit for performing various operations)": "Įrankių aprašymas",
|
||||
"Toolkit ID (e.g. my_toolkit)": "Įrakinių ID",
|
||||
"Toolkit Name (e.g. My ToolKit)": "Įrankių pavadinimas",
|
||||
"Tools": "Įrankiai",
|
||||
"Tools are a function calling system with arbitrary code execution": "Įrankiai gali naudoti funkcijas ir vykdyti kodą",
|
||||
"Tools have a function calling system that allows arbitrary code execution": "Įrankiai gali naudoti funkcijas ir leisti vykdyti kodą",
|
||||
"Tools have a function calling system that allows arbitrary code execution.": "Įrankiai gali naudoti funkcijas ir leisti vykdyti kodą",
|
||||
"Top K": "Top K",
|
||||
"Top P": "Top P",
|
||||
"Trouble accessing Ollama?": "Problemos prieinant prie Ollama?",
|
||||
"TTS Model": "",
|
||||
"TTS Model": "TTS modelis",
|
||||
"TTS Settings": "TTS parametrai",
|
||||
"TTS Voice": "",
|
||||
"Type": "",
|
||||
"TTS Voice": "TTS balsas",
|
||||
"Type": "Tipas",
|
||||
"Type Hugging Face Resolve (Download) URL": "Įveskite Hugging Face Resolve nuorodą",
|
||||
"Uh-oh! There was an issue connecting to {{provider}}.": "O ne! Prisijungiant prie {{provider}} kilo problema.",
|
||||
"UI": "",
|
||||
"Unknown file type '{{file_type}}'. Proceeding with the file upload anyway.": "",
|
||||
"Unpin": "",
|
||||
"Update": "",
|
||||
"UI": "sąsaja",
|
||||
"Unknown file type '{{file_type}}'. Proceeding with the file upload anyway.": "Nežinomas rinkmenos tipas. Bandoma vistiek įkelti rinkmeną.",
|
||||
"Unpin": "Atsemigti",
|
||||
"Update": "Atnaujinti",
|
||||
"Update and Copy Link": "Atnaujinti ir kopijuoti nuorodą",
|
||||
"Update password": "Atnaujinti slaptažodį",
|
||||
"Updated at": "",
|
||||
"Upload": "",
|
||||
"Updated at": "Atnaujinta",
|
||||
"Upload": "Atnaujinti",
|
||||
"Upload a GGUF model": "Parsisiųsti GGUF modelį",
|
||||
"Upload Files": "",
|
||||
"Upload Pipeline": "",
|
||||
"Upload Files": "Atnaujinti rinkmenas",
|
||||
"Upload Pipeline": "Atnaujinti procesą",
|
||||
"Upload Progress": "Įkėlimo progresas",
|
||||
"URL Mode": "URL režimas",
|
||||
"Use '#' in the prompt input to load and select your documents.": "Naudokite '#' norėdami naudoti dokumentą.",
|
||||
"Use Gravatar": "Naudoti Gravatar",
|
||||
"Use Initials": "Naudotojo inicialai",
|
||||
"use_mlock (Ollama)": "",
|
||||
"use_mmap (Ollama)": "",
|
||||
"use_mlock (Ollama)": "use_mlock (Ollama)",
|
||||
"use_mmap (Ollama)": "use_mmap (Ollama)",
|
||||
"user": "naudotojas",
|
||||
"User location successfully retrieved.": "",
|
||||
"User location successfully retrieved.": "Naudotojo vieta sėkmingai gauta",
|
||||
"User Permissions": "Naudotojo leidimai",
|
||||
"Users": "Naudotojai",
|
||||
"Utilize": "Naudoti",
|
||||
"Valid time units:": "Teisingūs laiko vienetai :",
|
||||
"Valves": "",
|
||||
"Valves updated": "",
|
||||
"Valves updated successfully": "",
|
||||
"Valves": "Įeitys",
|
||||
"Valves updated": "Įeitys atnaujintos",
|
||||
"Valves updated successfully": "Įeitys atnaujintos sėkmingai",
|
||||
"variable": "kintamasis",
|
||||
"variable to have them replaced with clipboard content.": "kintamoji pakeičiama kopijuoklės turiniu.",
|
||||
"Version": "Versija",
|
||||
"Voice": "",
|
||||
"Warning": "",
|
||||
"Warning:": "",
|
||||
"Voice": "Balsas",
|
||||
"Warning": "Perspėjimas",
|
||||
"Warning:": "Perspėjimas",
|
||||
"Warning: If you update or change your embedding model, you will need to re-import all documents.": "Jei pakeisite embedding modelį, turėsite reimportuoti visus dokumentus",
|
||||
"Web": "Web",
|
||||
"Web API": "",
|
||||
"Web API": "Web API",
|
||||
"Web Loader Settings": "Web krovimo nustatymai",
|
||||
"Web Params": "Web nustatymai",
|
||||
"Web Search": "",
|
||||
"Web Search Engine": "",
|
||||
"Web Search": "Web paieška",
|
||||
"Web Search Engine": "Web paieškos variklis",
|
||||
"Webhook URL": "Webhook nuoroda",
|
||||
"WebUI Settings": "WebUI parametrai",
|
||||
"WebUI will make requests to": "WebUI vykdys užklausas",
|
||||
"What’s New in": "Kas naujo",
|
||||
"When history is turned off, new chats on this browser won't appear in your history on any of your devices.": "Kai istorija išjungta, pokalbiai neatsiras jūsų istorijoje.",
|
||||
"Whisper (Local)": "",
|
||||
"Widescreen Mode": "",
|
||||
"Workspace": "",
|
||||
"Whisper (Local)": "Whisper (lokalus)",
|
||||
"Widescreen Mode": "Plataus ekrano rėžimas",
|
||||
"Workspace": "Nuostatos",
|
||||
"Write a prompt suggestion (e.g. Who are you?)": "Parašykite užklausą",
|
||||
"Write a summary in 50 words that summarizes [topic or keyword].": "Parašyk santrumpą trumpesnę nei 50 žodžių šiam tekstui: [tekstas]",
|
||||
"Yesterday": "Vakar",
|
||||
"You": "Jūs",
|
||||
"You can personalize your interactions with LLMs by adding memories through the 'Manage' button below, making them more helpful and tailored to you.": "",
|
||||
"You cannot clone a base model": "",
|
||||
"You can personalize your interactions with LLMs by adding memories through the 'Manage' button below, making them more helpful and tailored to you.": "Galite pagerinti modelių darbą suteikdami jiems atminties funkcionalumą.",
|
||||
"You cannot clone a base model": "Negalite klonuoti bazinio modelio",
|
||||
"You have no archived conversations.": "Jūs neturite archyvuotų pokalbių",
|
||||
"You have shared this chat": "Pasidalinote šiuo pokalbiu",
|
||||
"You're a helpful assistant.": "Esi asistentas.",
|
||||
"You're now logged in.": "Esate prisijungę.",
|
||||
"Your account status is currently pending activation.": "",
|
||||
"Your entire contribution will go directly to the plugin developer; Open WebUI does not take any percentage. However, the chosen funding platform might have its own fees.": "",
|
||||
"Your account status is currently pending activation.": "Jūsų paskyra laukia administratoriaus patvirtinimo.",
|
||||
"Your entire contribution will go directly to the plugin developer; Open WebUI does not take any percentage. However, the chosen funding platform might have its own fees.": "Jūsų finansinis prisidėjimas tiesiogiai keliaus modulio kūrėjui.",
|
||||
"Youtube": "Youtube",
|
||||
"Youtube Loader Settings": "Youtube krovimo nustatymai"
|
||||
}
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Teruskan Respons",
|
||||
"Continue with {{provider}}": "Teruskan dengan {{provider}}",
|
||||
"Controls": "Kawalan",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Menyalin URL sembang kongsi ke papan klip",
|
||||
"Copy": "Salin",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Salin Blok Kod Terakhir",
|
||||
"Copy last response": "Salin Respons Terakhir",
|
||||
"Copy Link": "Salin Pautan",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Kunci API Google PSE",
|
||||
"Google PSE Engine Id": "ID Enjin Google PSE",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "tidak mempunyai perbualan.",
|
||||
"Hello, {{name}}": "Hello, {{name}}",
|
||||
"Help": "Bantuan",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Nota: Jika anda menetapkan skor minimum, carian hanya akan mengembalikan dokumen dengan skor lebih besar daripada atau sama dengan skor minimum.",
|
||||
"Notifications": "Pemberitahuan",
|
||||
"November": "November",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "ID OAuth",
|
||||
"October": "Oktober",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "Jalankan Llama 2, Code Llama dan model lain. Sesuaikan dan buat sendiri.",
|
||||
"Running": "Sedang dijalankan",
|
||||
"Save": "Simpan",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Fortsett svar",
|
||||
"Continue with {{provider}}": "Fortsett med {{provider}}",
|
||||
"Controls": "Kontroller",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Kopiert delt chat-URL til utklippstavlen!",
|
||||
"Copy": "Kopier",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Kopier siste kodeblokk",
|
||||
"Copy last response": "Kopier siste svar",
|
||||
"Copy Link": "Kopier lenke",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API-nøkkel",
|
||||
"Google PSE Engine Id": "Google PSE Motor-ID",
|
||||
"h:mm a": "t:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "har ingen samtaler.",
|
||||
"Hello, {{name}}": "Hei, {{name}}",
|
||||
"Help": "Hjelp",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Merk: Hvis du setter en minimums poengsum, vil søket kun returnere dokumenter med en poengsum som er større enn eller lik minimums poengsummen.",
|
||||
"Notifications": "Varsler",
|
||||
"November": "november",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "OAuth-ID",
|
||||
"October": "oktober",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "Kjør Llama 2, Code Llama og andre modeller. Tilpass og lag egne versjoner.",
|
||||
"Running": "Kjører",
|
||||
"Save": "Lagre",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Doorgaan met Antwoord",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "URL van gedeelde gesprekspagina gekopieerd naar klembord!",
|
||||
"Copy": "Kopieer",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Kopieer laatste code blok",
|
||||
"Copy last response": "Kopieer laatste antwoord",
|
||||
"Copy Link": "Kopieer Link",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API-sleutel",
|
||||
"Google PSE Engine Id": "Google PSE-engine-ID",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "heeft geen gesprekken.",
|
||||
"Hello, {{name}}": "Hallo, {{name}}",
|
||||
"Help": "Help",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Opmerking: Als u een minimumscore instelt, levert de zoekopdracht alleen documenten op met een score groter dan of gelijk aan de minimumscore.",
|
||||
"Notifications": "Desktop Notificaties",
|
||||
"November": "November",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "Oktober",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Opslaan",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "ਜਵਾਬ ਜਾਰੀ ਰੱਖੋ",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "ਸਾਂਝੇ ਕੀਤੇ ਗੱਲਬਾਤ URL ਨੂੰ ਕਲਿੱਪਬੋਰਡ 'ਤੇ ਕਾਪੀ ਕਰ ਦਿੱਤਾ!",
|
||||
"Copy": "ਕਾਪੀ ਕਰੋ",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "ਆਖਰੀ ਕੋਡ ਬਲਾਕ ਨੂੰ ਕਾਪੀ ਕਰੋ",
|
||||
"Copy last response": "ਆਖਰੀ ਜਵਾਬ ਨੂੰ ਕਾਪੀ ਕਰੋ",
|
||||
"Copy Link": "ਲਿੰਕ ਕਾਪੀ ਕਰੋ",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Google PSE API ਕੁੰਜੀ",
|
||||
"Google PSE Engine Id": "ਗੂਗਲ PSE ਇੰਜਣ ID",
|
||||
"h:mm a": "ਹ:ਮਿੰਟ ਪੂਃ",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "ਕੋਈ ਗੱਲਬਾਤ ਨਹੀਂ ਹੈ।",
|
||||
"Hello, {{name}}": "ਸਤ ਸ੍ਰੀ ਅਕਾਲ, {{name}}",
|
||||
"Help": "ਮਦਦ",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "ਨੋਟ: ਜੇ ਤੁਸੀਂ ਘੱਟੋ-ਘੱਟ ਸਕੋਰ ਸੈੱਟ ਕਰਦੇ ਹੋ, ਤਾਂ ਖੋਜ ਸਿਰਫ਼ ਉਹੀ ਡਾਕੂਮੈਂਟ ਵਾਪਸ ਕਰੇਗੀ ਜਿਨ੍ਹਾਂ ਦਾ ਸਕੋਰ ਘੱਟੋ-ਘੱਟ ਸਕੋਰ ਦੇ ਬਰਾਬਰ ਜਾਂ ਵੱਧ ਹੋਵੇ।",
|
||||
"Notifications": "ਸੂਚਨਾਵਾਂ",
|
||||
"November": "ਨਵੰਬਰ",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (ਓਲਾਮਾ)",
|
||||
"OAuth ID": "",
|
||||
"October": "ਅਕਤੂਬਰ",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "ਰੋਜ਼ ਪਾਈਨ",
|
||||
"Rosé Pine Dawn": "ਰੋਜ਼ ਪਾਈਨ ਡਾਨ",
|
||||
"RTL": "RTL",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "ਸੰਭਾਲੋ",
|
||||
|
@ -134,8 +134,10 @@
|
||||
"Continue Response": "Kontynuuj odpowiedź",
|
||||
"Continue with {{provider}}": "",
|
||||
"Controls": "",
|
||||
"Copied": "",
|
||||
"Copied shared chat URL to clipboard!": "Skopiowano URL czatu do schowka!",
|
||||
"Copy": "Kopiuj",
|
||||
"Copy Code": "",
|
||||
"Copy last code block": "Skopiuj ostatni blok kodu",
|
||||
"Copy last response": "Skopiuj ostatnią odpowiedź",
|
||||
"Copy Link": "Kopiuj link",
|
||||
@ -312,6 +314,7 @@
|
||||
"Google PSE API Key": "Klucz API Google PSE",
|
||||
"Google PSE Engine Id": "Identyfikator silnika Google PSE",
|
||||
"h:mm a": "h:mm a",
|
||||
"Haptic Feedback": "",
|
||||
"has no conversations.": "nie ma rozmów.",
|
||||
"Hello, {{name}}": "Witaj, {{name}}",
|
||||
"Help": "Pomoc",
|
||||
@ -416,6 +419,7 @@
|
||||
"Note: If you set a minimum score, the search will only return documents with a score greater than or equal to the minimum score.": "Uwaga: Jeśli ustawisz minimalny wynik, szukanie zwróci jedynie dokumenty z wynikiem większym lub równym minimalnemu.",
|
||||
"Notifications": "Powiadomienia",
|
||||
"November": "Listopad",
|
||||
"num_gpu (Ollama)": "",
|
||||
"num_thread (Ollama)": "num_thread (Ollama)",
|
||||
"OAuth ID": "",
|
||||
"October": "Październik",
|
||||
@ -499,6 +503,7 @@
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
"RTL": "RLT",
|
||||
"Run": "",
|
||||
"Run Llama 2, Code Llama, and other models. Customize and create your own.": "",
|
||||
"Running": "",
|
||||
"Save": "Zapisz",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user