Merge branch 'dev' into support-py-for-run-code

This commit is contained in:
arkohut 2024-05-28 01:05:15 +08:00 committed by GitHub
commit 5166e92f90
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
169 changed files with 12170 additions and 10150 deletions

View File

@ -10,8 +10,4 @@ OPENAI_API_KEY=''
# DO NOT TRACK
SCARF_NO_ANALYTICS=true
DO_NOT_TRACK=true
ANONYMIZED_TELEMETRY=false
# Use locally bundled version of the LiteLLM cost map json
# to avoid repetitive startup connections
LITELLM_LOCAL_MODEL_COST_MAP="True"
ANONYMIZED_TELEMETRY=false

View File

@ -11,7 +11,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v4
- name: Check for changes in package.json
run: |
@ -36,7 +36,7 @@ jobs:
echo "::set-output name=content::$CHANGELOG_ESCAPED"
- name: Create GitHub release
uses: actions/github-script@v5
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@ -51,7 +51,7 @@ jobs:
console.log(`Created release ${release.data.html_url}`)
- name: Upload package to GitHub release
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: package
path: .

View File

@ -0,0 +1,59 @@
name: Deploy to HuggingFace Spaces
on:
push:
branches:
- dev
- main
workflow_dispatch:
jobs:
check-secret:
runs-on: ubuntu-latest
outputs:
token-set: ${{ steps.check-key.outputs.defined }}
steps:
- id: check-key
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
if: "${{ env.HF_TOKEN != '' }}"
run: echo "defined=true" >> $GITHUB_OUTPUT
deploy:
runs-on: ubuntu-latest
needs: [check-secret]
if: needs.check-secret.outputs.token-set == 'true'
env:
HF_TOKEN: ${{ secrets.HF_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Remove git history
run: rm -rf .git
- name: Prepend YAML front matter to README.md
run: |
echo "---" > temp_readme.md
echo "title: Open WebUI" >> temp_readme.md
echo "emoji: 🐳" >> temp_readme.md
echo "colorFrom: purple" >> temp_readme.md
echo "colorTo: gray" >> temp_readme.md
echo "sdk: docker" >> temp_readme.md
echo "app_port: 8080" >> temp_readme.md
echo "---" >> temp_readme.md
cat README.md >> temp_readme.md
mv temp_readme.md README.md
- name: Configure git
run: |
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global user.name "github-actions[bot]"
- name: Set up Git and push to Space
run: |
git init --initial-branch=main
git lfs track "*.ttf"
rm demo.gif
git add .
git commit -m "GitHub deploy: ${{ github.sha }}"
git push --force https://open-webui:${HF_TOKEN}@huggingface.co/spaces/open-webui/open-webui main

View File

@ -84,6 +84,8 @@ jobs:
outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }}
cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max
build-args: |
BUILD_HASH=${{ github.sha }}
- name: Export digest
run: |
@ -170,7 +172,9 @@ jobs:
outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }}
cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max
build-args: USE_CUDA=true
build-args: |
BUILD_HASH=${{ github.sha }}
USE_CUDA=true
- name: Export digest
run: |
@ -257,7 +261,9 @@ jobs:
outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }}
cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max
build-args: USE_OLLAMA=true
build-args: |
BUILD_HASH=${{ github.sha }}
USE_OLLAMA=true
- name: Export digest
run: |

View File

@ -23,7 +23,7 @@ jobs:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

View File

@ -19,7 +19,7 @@ jobs:
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: '20' # Or specify any other version you want to use

View File

@ -20,7 +20,11 @@ jobs:
- name: Build and run Compose Stack
run: |
docker compose --file docker-compose.yaml --file docker-compose.api.yaml up --detach --build
docker compose \
--file docker-compose.yaml \
--file docker-compose.api.yaml \
--file docker-compose.a1111-test.yaml \
up --detach --build
- name: Wait for Ollama to be up
timeout-minutes: 5
@ -95,7 +99,7 @@ jobs:
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

32
.github/workflows/release-pypi.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: Release to PyPI
on:
push:
branches:
- main # or whatever branch you want to use
- dev
jobs:
release:
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/open-webui
permissions:
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 18
- uses: actions/setup-python@v5
with:
python-version: 3.11
- name: Build
run: |
python -m pip install --upgrade pip
pip install build
python -m build .
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@release/v1

View File

@ -11,12 +11,14 @@ ARG USE_CUDA_VER=cu121
# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
ARG USE_RERANKING_MODEL=""
ARG BUILD_HASH=dev-build
# Override at your own risk - non-root configurations are untested
ARG UID=0
ARG GID=0
######## WebUI frontend ########
FROM --platform=$BUILDPLATFORM node:21-alpine3.19 as build
ARG BUILD_HASH
WORKDIR /app
@ -24,6 +26,7 @@ COPY package.json package-lock.json ./
RUN npm ci
COPY . .
ENV APP_BUILD_HASH=${BUILD_HASH}
RUN npm run build
######## WebUI backend ########
@ -35,6 +38,7 @@ ARG USE_OLLAMA
ARG USE_CUDA_VER
ARG USE_EMBEDDING_MODEL
ARG USE_RERANKING_MODEL
ARG BUILD_HASH
ARG UID
ARG GID
@ -59,11 +63,6 @@ ENV OPENAI_API_KEY="" \
DO_NOT_TRACK=true \
ANONYMIZED_TELEMETRY=false
# Use locally bundled version of the LiteLLM cost map json
# to avoid repetitive startup connections
ENV LITELLM_LOCAL_MODEL_COST_MAP="True"
#### Other models #########################################################
## whisper TTS model settings ##
ENV WHISPER_MODEL="base" \
@ -83,10 +82,10 @@ WORKDIR /app/backend
ENV HOME /root
# Create user and group if not root
RUN if [ $UID -ne 0 ]; then \
if [ $GID -ne 0 ]; then \
addgroup --gid $GID app; \
fi; \
adduser --uid $UID --gid $GID --home $HOME --disabled-password --no-create-home app; \
if [ $GID -ne 0 ]; then \
addgroup --gid $GID app; \
fi; \
adduser --uid $UID --gid $GID --home $HOME --disabled-password --no-create-home app; \
fi
RUN mkdir -p $HOME/.cache/chroma
@ -132,7 +131,8 @@ RUN pip3 install uv && \
uv pip install --system -r requirements.txt --no-cache-dir && \
python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \
python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \
fi
fi; \
chown -R $UID:$GID /app/backend/data/
@ -154,4 +154,6 @@ HEALTHCHECK CMD curl --silent --fail http://localhost:8080/health | jq -e '.stat
USER $UID:$GID
ENV WEBUI_BUILD_VERSION=${BUILD_HASH}
CMD [ "bash", "start.sh"]

View File

@ -1,379 +0,0 @@
import sys
from contextlib import asynccontextmanager
from fastapi import FastAPI, Depends, HTTPException
from fastapi.routing import APIRoute
from fastapi.middleware.cors import CORSMiddleware
import logging
from fastapi import FastAPI, Request, Depends, status, Response
from fastapi.responses import JSONResponse
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.responses import StreamingResponse
import json
import time
import requests
from pydantic import BaseModel, ConfigDict
from typing import Optional, List
from utils.utils import get_verified_user, get_current_user, get_admin_user
from config import SRC_LOG_LEVELS, ENV
from constants import MESSAGES
import os
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["LITELLM"])
from config import (
ENABLE_LITELLM,
ENABLE_MODEL_FILTER,
MODEL_FILTER_LIST,
DATA_DIR,
LITELLM_PROXY_PORT,
LITELLM_PROXY_HOST,
)
import warnings
warnings.simplefilter("ignore")
from litellm.utils import get_llm_provider
import asyncio
import subprocess
import yaml
@asynccontextmanager
async def lifespan(app: FastAPI):
log.info("startup_event")
# TODO: Check config.yaml file and create one
asyncio.create_task(start_litellm_background())
yield
app = FastAPI(lifespan=lifespan)
origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
LITELLM_CONFIG_DIR = f"{DATA_DIR}/litellm/config.yaml"
with open(LITELLM_CONFIG_DIR, "r") as file:
litellm_config = yaml.safe_load(file)
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER.value
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST.value
app.state.ENABLE = ENABLE_LITELLM
app.state.CONFIG = litellm_config
# Global variable to store the subprocess reference
background_process = None
CONFLICT_ENV_VARS = [
# Uvicorn uses PORT, so LiteLLM might use it as well
"PORT",
# LiteLLM uses DATABASE_URL for Prisma connections
"DATABASE_URL",
]
async def run_background_process(command):
global background_process
log.info("run_background_process")
try:
# Log the command to be executed
log.info(f"Executing command: {command}")
# Filter environment variables known to conflict with litellm
env = {k: v for k, v in os.environ.items() if k not in CONFLICT_ENV_VARS}
# Execute the command and create a subprocess
process = await asyncio.create_subprocess_exec(
*command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
background_process = process
log.info("Subprocess started successfully.")
# Capture STDERR for debugging purposes
stderr_output = await process.stderr.read()
stderr_text = stderr_output.decode().strip()
if stderr_text:
log.info(f"Subprocess STDERR: {stderr_text}")
# log.info output line by line
async for line in process.stdout:
log.info(line.decode().strip())
# Wait for the process to finish
returncode = await process.wait()
log.info(f"Subprocess exited with return code {returncode}")
except Exception as e:
log.error(f"Failed to start subprocess: {e}")
raise # Optionally re-raise the exception if you want it to propagate
async def start_litellm_background():
log.info("start_litellm_background")
# Command to run in the background
command = [
"litellm",
"--port",
str(LITELLM_PROXY_PORT),
"--host",
LITELLM_PROXY_HOST,
"--telemetry",
"False",
"--config",
LITELLM_CONFIG_DIR,
]
await run_background_process(command)
async def shutdown_litellm_background():
log.info("shutdown_litellm_background")
global background_process
if background_process:
background_process.terminate()
await background_process.wait() # Ensure the process has terminated
log.info("Subprocess terminated")
background_process = None
@app.get("/")
async def get_status():
return {"status": True}
async def restart_litellm():
"""
Endpoint to restart the litellm background service.
"""
log.info("Requested restart of litellm service.")
try:
# Shut down the existing process if it is running
await shutdown_litellm_background()
log.info("litellm service shutdown complete.")
# Restart the background service
asyncio.create_task(start_litellm_background())
log.info("litellm service restart complete.")
return {
"status": "success",
"message": "litellm service restarted successfully.",
}
except Exception as e:
log.info(f"Error restarting litellm service: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
@app.get("/restart")
async def restart_litellm_handler(user=Depends(get_admin_user)):
return await restart_litellm()
@app.get("/config")
async def get_config(user=Depends(get_admin_user)):
return app.state.CONFIG
class LiteLLMConfigForm(BaseModel):
general_settings: Optional[dict] = None
litellm_settings: Optional[dict] = None
model_list: Optional[List[dict]] = None
router_settings: Optional[dict] = None
model_config = ConfigDict(protected_namespaces=())
@app.post("/config/update")
async def update_config(form_data: LiteLLMConfigForm, user=Depends(get_admin_user)):
app.state.CONFIG = form_data.model_dump(exclude_none=True)
with open(LITELLM_CONFIG_DIR, "w") as file:
yaml.dump(app.state.CONFIG, file)
await restart_litellm()
return app.state.CONFIG
@app.get("/models")
@app.get("/v1/models")
async def get_models(user=Depends(get_current_user)):
if app.state.ENABLE:
while not background_process:
await asyncio.sleep(0.1)
url = f"http://localhost:{LITELLM_PROXY_PORT}/v1"
r = None
try:
r = requests.request(method="GET", url=f"{url}/models")
r.raise_for_status()
data = r.json()
if app.state.ENABLE_MODEL_FILTER:
if user and user.role == "user":
data["data"] = list(
filter(
lambda model: model["id"] in app.state.MODEL_FILTER_LIST,
data["data"],
)
)
return data
except Exception as e:
log.exception(e)
error_detail = "Open WebUI: Server Connection Error"
if r is not None:
try:
res = r.json()
if "error" in res:
error_detail = f"External: {res['error']}"
except:
error_detail = f"External: {e}"
return {
"data": [
{
"id": model["model_name"],
"object": "model",
"created": int(time.time()),
"owned_by": "openai",
}
for model in app.state.CONFIG["model_list"]
],
"object": "list",
}
else:
return {
"data": [],
"object": "list",
}
@app.get("/model/info")
async def get_model_list(user=Depends(get_admin_user)):
return {"data": app.state.CONFIG["model_list"]}
class AddLiteLLMModelForm(BaseModel):
model_name: str
litellm_params: dict
model_config = ConfigDict(protected_namespaces=())
@app.post("/model/new")
async def add_model_to_config(
form_data: AddLiteLLMModelForm, user=Depends(get_admin_user)
):
try:
get_llm_provider(model=form_data.model_name)
app.state.CONFIG["model_list"].append(form_data.model_dump())
with open(LITELLM_CONFIG_DIR, "w") as file:
yaml.dump(app.state.CONFIG, file)
await restart_litellm()
return {"message": MESSAGES.MODEL_ADDED(form_data.model_name)}
except Exception as e:
print(e)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)
)
class DeleteLiteLLMModelForm(BaseModel):
id: str
@app.post("/model/delete")
async def delete_model_from_config(
form_data: DeleteLiteLLMModelForm, user=Depends(get_admin_user)
):
app.state.CONFIG["model_list"] = [
model
for model in app.state.CONFIG["model_list"]
if model["model_name"] != form_data.id
]
with open(LITELLM_CONFIG_DIR, "w") as file:
yaml.dump(app.state.CONFIG, file)
await restart_litellm()
return {"message": MESSAGES.MODEL_DELETED(form_data.id)}
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
body = await request.body()
url = f"http://localhost:{LITELLM_PROXY_PORT}"
target_url = f"{url}/{path}"
headers = {}
# headers["Authorization"] = f"Bearer {key}"
headers["Content-Type"] = "application/json"
r = None
try:
r = requests.request(
method=request.method,
url=target_url,
data=body,
headers=headers,
stream=True,
)
r.raise_for_status()
# Check if response is SSE
if "text/event-stream" in r.headers.get("Content-Type", ""):
return StreamingResponse(
r.iter_content(chunk_size=8192),
status_code=r.status_code,
headers=dict(r.headers),
)
else:
response_data = r.json()
return response_data
except Exception as e:
log.exception(e)
error_detail = "Open WebUI: Server Connection Error"
if r is not None:
try:
res = r.json()
if "error" in res:
error_detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}"
except:
error_detail = f"External: {e}"
raise HTTPException(
status_code=r.status_code if r else 500, detail=error_detail
)

View File

@ -29,8 +29,8 @@ import time
from urllib.parse import urlparse
from typing import Optional, List, Union
from apps.web.models.users import Users
from apps.webui.models.models import Models
from apps.webui.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import (
decode_token,
@ -39,10 +39,13 @@ from utils.utils import (
get_admin_user,
)
from utils.models import get_model_id_from_custom_model_id
from config import (
SRC_LOG_LEVELS,
OLLAMA_BASE_URLS,
ENABLE_OLLAMA_API,
ENABLE_MODEL_FILTER,
MODEL_FILTER_LIST,
UPLOAD_DIR,
@ -67,6 +70,7 @@ app.state.config = AppConfig()
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
app.state.config.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
app.state.MODELS = {}
@ -96,6 +100,21 @@ async def get_status():
return {"status": True}
@app.get("/config")
async def get_config(user=Depends(get_admin_user)):
return {"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API}
class OllamaConfigForm(BaseModel):
enable_ollama_api: Optional[bool] = None
@app.post("/config/update")
async def update_config(form_data: OllamaConfigForm, user=Depends(get_admin_user)):
app.state.config.ENABLE_OLLAMA_API = form_data.enable_ollama_api
return {"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API}
@app.get("/urls")
async def get_ollama_api_urls(user=Depends(get_admin_user)):
return {"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS}
@ -156,14 +175,23 @@ def merge_models_lists(model_lists):
async def get_all_models():
log.info("get_all_models()")
tasks = [fetch_url(f"{url}/api/tags") for url in app.state.config.OLLAMA_BASE_URLS]
responses = await asyncio.gather(*tasks)
models = {
"models": merge_models_lists(
map(lambda response: response["models"] if response else None, responses)
)
}
if app.state.config.ENABLE_OLLAMA_API:
tasks = [
fetch_url(f"{url}/api/tags") for url in app.state.config.OLLAMA_BASE_URLS
]
responses = await asyncio.gather(*tasks)
models = {
"models": merge_models_lists(
map(
lambda response: response["models"] if response else None, responses
)
)
}
else:
models = {"models": []}
app.state.MODELS = {model["model"]: model for model in models["models"]}
@ -278,6 +306,9 @@ async def pull_model(
r = None
# Admin should be able to pull models from any source
payload = {**form_data.model_dump(exclude_none=True), "insecure": True}
def get_request():
nonlocal url
nonlocal r
@ -305,7 +336,7 @@ async def pull_model(
r = requests.request(
method="POST",
url=f"{url}/api/pull",
data=form_data.model_dump_json(exclude_none=True).encode(),
data=json.dumps(payload),
stream=True,
)
@ -848,14 +879,93 @@ async def generate_chat_completion(
user=Depends(get_verified_user),
):
log.debug(
"form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(
form_data.model_dump_json(exclude_none=True).encode()
)
)
payload = {
**form_data.model_dump(exclude_none=True),
}
model_id = form_data.model
model_info = Models.get_model_by_id(model_id)
if model_info:
print(model_info)
if model_info.base_model_id:
payload["model"] = model_info.base_model_id
model_info.params = model_info.params.model_dump()
if model_info.params:
payload["options"] = {}
payload["options"]["mirostat"] = model_info.params.get("mirostat", None)
payload["options"]["mirostat_eta"] = model_info.params.get(
"mirostat_eta", None
)
payload["options"]["mirostat_tau"] = model_info.params.get(
"mirostat_tau", None
)
payload["options"]["num_ctx"] = model_info.params.get("num_ctx", None)
payload["options"]["repeat_last_n"] = model_info.params.get(
"repeat_last_n", None
)
payload["options"]["repeat_penalty"] = model_info.params.get(
"frequency_penalty", None
)
payload["options"]["temperature"] = model_info.params.get(
"temperature", None
)
payload["options"]["seed"] = model_info.params.get("seed", None)
payload["options"]["stop"] = (
[
bytes(stop, "utf-8").decode("unicode_escape")
for stop in model_info.params["stop"]
]
if model_info.params.get("stop", None)
else None
)
payload["options"]["tfs_z"] = model_info.params.get("tfs_z", None)
payload["options"]["num_predict"] = model_info.params.get(
"max_tokens", None
)
payload["options"]["top_k"] = model_info.params.get("top_k", None)
payload["options"]["top_p"] = model_info.params.get("top_p", None)
if model_info.params.get("system", None):
# Check if the payload already has a system message
# If not, add a system message to the payload
if payload.get("messages"):
for message in payload["messages"]:
if message.get("role") == "system":
message["content"] = (
model_info.params.get("system", None) + message["content"]
)
break
else:
payload["messages"].insert(
0,
{
"role": "system",
"content": model_info.params.get("system", None),
},
)
if url_idx == None:
model = form_data.model
if ":" not in payload["model"]:
payload["model"] = f"{payload['model']}:latest"
if ":" not in model:
model = f"{model}:latest"
if model in app.state.MODELS:
url_idx = random.choice(app.state.MODELS[model]["urls"])
if payload["model"] in app.state.MODELS:
url_idx = random.choice(app.state.MODELS[payload["model"]]["urls"])
else:
raise HTTPException(
status_code=400,
@ -865,16 +975,12 @@ async def generate_chat_completion(
url = app.state.config.OLLAMA_BASE_URLS[url_idx]
log.info(f"url: {url}")
print(payload)
r = None
log.debug(
"form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(
form_data.model_dump_json(exclude_none=True).encode()
)
)
def get_request():
nonlocal form_data
nonlocal payload
nonlocal r
request_id = str(uuid.uuid4())
@ -883,7 +989,7 @@ async def generate_chat_completion(
def stream_content():
try:
if form_data.stream:
if payload.get("stream", None):
yield json.dumps({"id": request_id, "done": False}) + "\n"
for chunk in r.iter_content(chunk_size=8192):
@ -901,7 +1007,7 @@ async def generate_chat_completion(
r = requests.request(
method="POST",
url=f"{url}/api/chat",
data=form_data.model_dump_json(exclude_none=True).encode(),
data=json.dumps(payload),
stream=True,
)
@ -957,14 +1063,62 @@ async def generate_openai_chat_completion(
user=Depends(get_verified_user),
):
payload = {
**form_data.model_dump(exclude_none=True),
}
model_id = form_data.model
model_info = Models.get_model_by_id(model_id)
if model_info:
print(model_info)
if model_info.base_model_id:
payload["model"] = model_info.base_model_id
model_info.params = model_info.params.model_dump()
if model_info.params:
payload["temperature"] = model_info.params.get("temperature", None)
payload["top_p"] = model_info.params.get("top_p", None)
payload["max_tokens"] = model_info.params.get("max_tokens", None)
payload["frequency_penalty"] = model_info.params.get(
"frequency_penalty", None
)
payload["seed"] = model_info.params.get("seed", None)
payload["stop"] = (
[
bytes(stop, "utf-8").decode("unicode_escape")
for stop in model_info.params["stop"]
]
if model_info.params.get("stop", None)
else None
)
if model_info.params.get("system", None):
# Check if the payload already has a system message
# If not, add a system message to the payload
if payload.get("messages"):
for message in payload["messages"]:
if message.get("role") == "system":
message["content"] = (
model_info.params.get("system", None) + message["content"]
)
break
else:
payload["messages"].insert(
0,
{
"role": "system",
"content": model_info.params.get("system", None),
},
)
if url_idx == None:
model = form_data.model
if ":" not in payload["model"]:
payload["model"] = f"{payload['model']}:latest"
if ":" not in model:
model = f"{model}:latest"
if model in app.state.MODELS:
url_idx = random.choice(app.state.MODELS[model]["urls"])
if payload["model"] in app.state.MODELS:
url_idx = random.choice(app.state.MODELS[payload["model"]]["urls"])
else:
raise HTTPException(
status_code=400,
@ -977,7 +1131,7 @@ async def generate_openai_chat_completion(
r = None
def get_request():
nonlocal form_data
nonlocal payload
nonlocal r
request_id = str(uuid.uuid4())
@ -986,7 +1140,7 @@ async def generate_openai_chat_completion(
def stream_content():
try:
if form_data.stream:
if payload.get("stream"):
yield json.dumps(
{"request_id": request_id, "done": False}
) + "\n"
@ -1006,7 +1160,7 @@ async def generate_openai_chat_completion(
r = requests.request(
method="POST",
url=f"{url}/v1/chat/completions",
data=form_data.model_dump_json(exclude_none=True).encode(),
data=json.dumps(payload),
stream=True,
)

View File

@ -10,8 +10,8 @@ import logging
from pydantic import BaseModel
from apps.web.models.users import Users
from apps.webui.models.models import Models
from apps.webui.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import (
decode_token,
@ -53,7 +53,6 @@ app.state.config = AppConfig()
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
@ -199,14 +198,20 @@ async def fetch_url(url, key):
def merge_models_lists(model_lists):
log.info(f"merge_models_lists {model_lists}")
log.debug(f"merge_models_lists {model_lists}")
merged_list = []
for idx, models in enumerate(model_lists):
if models is not None and "error" not in models:
merged_list.extend(
[
{**model, "urlIdx": idx}
{
**model,
"name": model.get("name", model["id"]),
"owned_by": "openai",
"openai": model,
"urlIdx": idx,
}
for model in models
if "api.openai.com"
not in app.state.config.OPENAI_API_BASE_URLS[idx]
@ -232,7 +237,7 @@ async def get_all_models():
]
responses = await asyncio.gather(*tasks)
log.info(f"get_all_models:responses() {responses}")
log.debug(f"get_all_models:responses() {responses}")
models = {
"data": merge_models_lists(
@ -249,10 +254,10 @@ async def get_all_models():
)
}
log.info(f"models: {models}")
log.debug(f"models: {models}")
app.state.MODELS = {model["id"]: model for model in models["data"]}
return models
return models
@app.get("/models")
@ -310,31 +315,93 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
body = await request.body()
# TODO: Remove below after gpt-4-vision fix from Open AI
# Try to decode the body of the request from bytes to a UTF-8 string (Require add max_token to fix gpt-4-vision)
payload = None
try:
body = body.decode("utf-8")
body = json.loads(body)
if "chat/completions" in path:
body = body.decode("utf-8")
body = json.loads(body)
idx = app.state.MODELS[body.get("model")]["urlIdx"]
payload = {**body}
# Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
# This is a workaround until OpenAI fixes the issue with this model
if body.get("model") == "gpt-4-vision-preview":
if "max_tokens" not in body:
body["max_tokens"] = 4000
log.debug("Modified body_dict:", body)
model_id = body.get("model")
model_info = Models.get_model_by_id(model_id)
# Fix for ChatGPT calls failing because the num_ctx key is in body
if "num_ctx" in body:
# If 'num_ctx' is in the dictionary, delete it
# Leaving it there generates an error with the
# OpenAI API (Feb 2024)
del body["num_ctx"]
if model_info:
print(model_info)
if model_info.base_model_id:
payload["model"] = model_info.base_model_id
model_info.params = model_info.params.model_dump()
if model_info.params:
payload["temperature"] = model_info.params.get("temperature", None)
payload["top_p"] = model_info.params.get("top_p", None)
payload["max_tokens"] = model_info.params.get("max_tokens", None)
payload["frequency_penalty"] = model_info.params.get(
"frequency_penalty", None
)
payload["seed"] = model_info.params.get("seed", None)
payload["stop"] = (
[
bytes(stop, "utf-8").decode("unicode_escape")
for stop in model_info.params["stop"]
]
if model_info.params.get("stop", None)
else None
)
if model_info.params.get("system", None):
# Check if the payload already has a system message
# If not, add a system message to the payload
if payload.get("messages"):
for message in payload["messages"]:
if message.get("role") == "system":
message["content"] = (
model_info.params.get("system", None)
+ message["content"]
)
break
else:
payload["messages"].insert(
0,
{
"role": "system",
"content": model_info.params.get("system", None),
},
)
else:
pass
print(app.state.MODELS)
model = app.state.MODELS[payload.get("model")]
idx = model["urlIdx"]
if "pipeline" in model and model.get("pipeline"):
payload["user"] = {"name": user.name, "id": user.id}
payload["title"] = (
True
if payload["stream"] == False and payload["max_tokens"] == 50
else False
)
# Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
# This is a workaround until OpenAI fixes the issue with this model
if payload.get("model") == "gpt-4-vision-preview":
if "max_tokens" not in payload:
payload["max_tokens"] = 4000
log.debug("Modified payload:", payload)
# Convert the modified body back to JSON
payload = json.dumps(payload)
# Convert the modified body back to JSON
body = json.dumps(body)
except json.JSONDecodeError as e:
log.error("Error loading request body into a dictionary:", e)
print(payload)
url = app.state.config.OPENAI_API_BASE_URLS[idx]
key = app.state.config.OPENAI_API_KEYS[idx]
@ -353,7 +420,7 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
r = requests.request(
method=request.method,
url=target_url,
data=body,
data=payload if payload else body,
headers=headers,
stream=True,
)

View File

@ -46,7 +46,7 @@ import json
import sentence_transformers
from apps.web.models.documents import (
from apps.webui.models.documents import (
Documents,
DocumentForm,
DocumentResponse,

View File

@ -1,136 +0,0 @@
from pydantic import BaseModel
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
import time
from utils.utils import decode_token
from utils.misc import get_gravatar_url
from apps.web.internal.db import DB
import json
####################
# Modelfile DB Schema
####################
class Modelfile(Model):
tag_name = CharField(unique=True)
user_id = CharField()
modelfile = TextField()
timestamp = BigIntegerField()
class Meta:
database = DB
class ModelfileModel(BaseModel):
tag_name: str
user_id: str
modelfile: str
timestamp: int # timestamp in epoch
####################
# Forms
####################
class ModelfileForm(BaseModel):
modelfile: dict
class ModelfileTagNameForm(BaseModel):
tag_name: str
class ModelfileUpdateForm(ModelfileForm, ModelfileTagNameForm):
pass
class ModelfileResponse(BaseModel):
tag_name: str
user_id: str
modelfile: dict
timestamp: int # timestamp in epoch
class ModelfilesTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Modelfile])
def insert_new_modelfile(
self, user_id: str, form_data: ModelfileForm
) -> Optional[ModelfileModel]:
if "tagName" in form_data.modelfile:
modelfile = ModelfileModel(
**{
"user_id": user_id,
"tag_name": form_data.modelfile["tagName"],
"modelfile": json.dumps(form_data.modelfile),
"timestamp": int(time.time()),
}
)
try:
result = Modelfile.create(**modelfile.model_dump())
if result:
return modelfile
else:
return None
except:
return None
else:
return None
def get_modelfile_by_tag_name(self, tag_name: str) -> Optional[ModelfileModel]:
try:
modelfile = Modelfile.get(Modelfile.tag_name == tag_name)
return ModelfileModel(**model_to_dict(modelfile))
except:
return None
def get_modelfiles(self, skip: int = 0, limit: int = 50) -> List[ModelfileResponse]:
return [
ModelfileResponse(
**{
**model_to_dict(modelfile),
"modelfile": json.loads(modelfile.modelfile),
}
)
for modelfile in Modelfile.select()
# .limit(limit).offset(skip)
]
def update_modelfile_by_tag_name(
self, tag_name: str, modelfile: dict
) -> Optional[ModelfileModel]:
try:
query = Modelfile.update(
modelfile=json.dumps(modelfile),
timestamp=int(time.time()),
).where(Modelfile.tag_name == tag_name)
query.execute()
modelfile = Modelfile.get(Modelfile.tag_name == tag_name)
return ModelfileModel(**model_to_dict(modelfile))
except:
return None
def delete_modelfile_by_tag_name(self, tag_name: str) -> bool:
try:
query = Modelfile.delete().where((Modelfile.tag_name == tag_name))
query.execute() # Remove the rows, return number of rows removed.
return True
except:
return False
Modelfiles = ModelfilesTable(DB)

View File

@ -1,124 +0,0 @@
from fastapi import Depends, FastAPI, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union, Optional
from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.web.models.modelfiles import (
Modelfiles,
ModelfileForm,
ModelfileTagNameForm,
ModelfileUpdateForm,
ModelfileResponse,
)
from utils.utils import get_current_user, get_admin_user
from constants import ERROR_MESSAGES
router = APIRouter()
############################
# GetModelfiles
############################
@router.get("/", response_model=List[ModelfileResponse])
async def get_modelfiles(
skip: int = 0, limit: int = 50, user=Depends(get_current_user)
):
return Modelfiles.get_modelfiles(skip, limit)
############################
# CreateNewModelfile
############################
@router.post("/create", response_model=Optional[ModelfileResponse])
async def create_new_modelfile(form_data: ModelfileForm, user=Depends(get_admin_user)):
modelfile = Modelfiles.insert_new_modelfile(user.id, form_data)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.DEFAULT(),
)
############################
# GetModelfileByTagName
############################
@router.post("/", response_model=Optional[ModelfileResponse])
async def get_modelfile_by_tag_name(
form_data: ModelfileTagNameForm, user=Depends(get_current_user)
):
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.NOT_FOUND,
)
############################
# UpdateModelfileByTagName
############################
@router.post("/update", response_model=Optional[ModelfileResponse])
async def update_modelfile_by_tag_name(
form_data: ModelfileUpdateForm, user=Depends(get_admin_user)
):
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if modelfile:
updated_modelfile = {
**json.loads(modelfile.modelfile),
**form_data.modelfile,
}
modelfile = Modelfiles.update_modelfile_by_tag_name(
form_data.tag_name, updated_modelfile
)
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
############################
# DeleteModelfileByTagName
############################
@router.delete("/delete", response_model=bool)
async def delete_modelfile_by_tag_name(
form_data: ModelfileTagNameForm, user=Depends(get_admin_user)
):
result = Modelfiles.delete_modelfile_by_tag_name(form_data.tag_name)
return result

View File

@ -1,13 +1,25 @@
import json
from peewee import *
from peewee_migrate import Router
from playhouse.db_url import connect
from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL
from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL, BACKEND_DIR
import os
import logging
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["DB"])
class JSONField(TextField):
def db_value(self, value):
return json.dumps(value)
def python_value(self, value):
if value is not None:
return json.loads(value)
# Check if the file exists
if os.path.exists(f"{DATA_DIR}/ollama.db"):
# Rename the file
@ -18,6 +30,10 @@ else:
DB = connect(DATABASE_URL)
log.info(f"Connected to a {DB.__class__.__name__} database.")
router = Router(DB, migrate_dir="apps/web/internal/migrations", logger=log)
router = Router(
DB,
migrate_dir=BACKEND_DIR / "apps" / "webui" / "internal" / "migrations",
logger=log,
)
router.run()
DB.connect(reuse_if_open=True)

View File

@ -0,0 +1,61 @@
"""Peewee migrations -- 009_add_models.py.
Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.add_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
> migrator.add_constraint(model, name, sql)
> migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints)
"""
from contextlib import suppress
import peewee as pw
from peewee_migrate import Migrator
with suppress(ImportError):
import playhouse.postgres_ext as pw_pext
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your migrations here."""
@migrator.create_model
class Model(pw.Model):
id = pw.TextField(unique=True)
user_id = pw.TextField()
base_model_id = pw.TextField(null=True)
name = pw.TextField()
meta = pw.TextField()
params = pw.TextField()
created_at = pw.BigIntegerField(null=False)
updated_at = pw.BigIntegerField(null=False)
class Meta:
table_name = "model"
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your rollback migrations here."""
migrator.remove_model("model")

View File

@ -0,0 +1,130 @@
"""Peewee migrations -- 009_add_models.py.
Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.add_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
> migrator.add_constraint(model, name, sql)
> migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints)
"""
from contextlib import suppress
import peewee as pw
from peewee_migrate import Migrator
import json
from utils.misc import parse_ollama_modelfile
with suppress(ImportError):
import playhouse.postgres_ext as pw_pext
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your migrations here."""
# Fetch data from 'modelfile' table and insert into 'model' table
migrate_modelfile_to_model(migrator, database)
# Drop the 'modelfile' table
migrator.remove_model("modelfile")
def migrate_modelfile_to_model(migrator: Migrator, database: pw.Database):
ModelFile = migrator.orm["modelfile"]
Model = migrator.orm["model"]
modelfiles = ModelFile.select()
for modelfile in modelfiles:
# Extract and transform data in Python
modelfile.modelfile = json.loads(modelfile.modelfile)
meta = json.dumps(
{
"description": modelfile.modelfile.get("desc"),
"profile_image_url": modelfile.modelfile.get("imageUrl"),
"ollama": {"modelfile": modelfile.modelfile.get("content")},
"suggestion_prompts": modelfile.modelfile.get("suggestionPrompts"),
"categories": modelfile.modelfile.get("categories"),
"user": {**modelfile.modelfile.get("user", {}), "community": True},
}
)
info = parse_ollama_modelfile(modelfile.modelfile.get("content"))
# Insert the processed data into the 'model' table
Model.create(
id=f"ollama-{modelfile.tag_name}",
user_id=modelfile.user_id,
base_model_id=info.get("base_model_id"),
name=modelfile.modelfile.get("title"),
meta=meta,
params=json.dumps(info.get("params", {})),
created_at=modelfile.timestamp,
updated_at=modelfile.timestamp,
)
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your rollback migrations here."""
recreate_modelfile_table(migrator, database)
move_data_back_to_modelfile(migrator, database)
migrator.remove_model("model")
def recreate_modelfile_table(migrator: Migrator, database: pw.Database):
query = """
CREATE TABLE IF NOT EXISTS modelfile (
user_id TEXT,
tag_name TEXT,
modelfile JSON,
timestamp BIGINT
)
"""
migrator.sql(query)
def move_data_back_to_modelfile(migrator: Migrator, database: pw.Database):
Model = migrator.orm["model"]
Modelfile = migrator.orm["modelfile"]
models = Model.select()
for model in models:
# Extract and transform data in Python
meta = json.loads(model.meta)
modelfile_data = {
"title": model.name,
"desc": meta.get("description"),
"imageUrl": meta.get("profile_image_url"),
"content": meta.get("ollama", {}).get("modelfile"),
"suggestionPrompts": meta.get("suggestion_prompts"),
"categories": meta.get("categories"),
"user": {k: v for k, v in meta.get("user", {}).items() if k != "community"},
}
# Insert the processed data back into the 'modelfile' table
Modelfile.create(
user_id=model.user_id,
tag_name=model.id,
modelfile=modelfile_data,
timestamp=model.created_at,
)

View File

@ -0,0 +1,48 @@
"""Peewee migrations -- 002_add_local_sharing.py.
Some examples (model - class or model name)::
> Model = migrator.orm['table_name'] # Return model in current state by name
> Model = migrator.ModelClass # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.run(func, *args, **kwargs) # Run python function with the given args
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.add_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
> migrator.add_constraint(model, name, sql)
> migrator.drop_index(model, *col_names)
> migrator.drop_not_null(model, *field_names)
> migrator.drop_constraints(model, *constraints)
"""
from contextlib import suppress
import peewee as pw
from peewee_migrate import Migrator
with suppress(ImportError):
import playhouse.postgres_ext as pw_pext
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your migrations here."""
# Adding fields settings to the 'user' table
migrator.add_fields("user", settings=pw.TextField(null=True))
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
"""Write your rollback migrations here."""
# Remove the settings field
migrator.remove_fields("user", "settings")

View File

@ -14,7 +14,7 @@ You will need to create a migration file to ensure that existing databases are u
2. Make your changes to the models.
3. From the `backend` directory, run the following command:
```bash
pw_migrate create --auto --auto-source apps.web.models --database sqlite:///${SQLITE_DB} --directory apps/web/internal/migrations ${MIGRATION_NAME}
pw_migrate create --auto --auto-source apps.webui.models --database sqlite:///${SQLITE_DB} --directory apps/web/internal/migrations ${MIGRATION_NAME}
```
- `$SQLITE_DB` should be the path to the database file.
- `$MIGRATION_NAME` should be a descriptive name for the migration.

View File

@ -1,19 +1,19 @@
from fastapi import FastAPI, Depends
from fastapi.routing import APIRoute
from fastapi.middleware.cors import CORSMiddleware
from apps.web.routers import (
from apps.webui.routers import (
auths,
users,
chats,
documents,
modelfiles,
models,
prompts,
configs,
memories,
utils,
)
from config import (
WEBUI_VERSION,
WEBUI_BUILD_HASH,
WEBUI_AUTH,
DEFAULT_MODELS,
DEFAULT_PROMPT_SUGGESTIONS,
@ -23,7 +23,9 @@ from config import (
WEBHOOK_URL,
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
JWT_EXPIRES_IN,
WEBUI_BANNERS,
AppConfig,
ENABLE_COMMUNITY_SHARING,
)
app = FastAPI()
@ -40,6 +42,11 @@ app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS
app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE
app.state.config.USER_PERMISSIONS = USER_PERMISSIONS
app.state.config.WEBHOOK_URL = WEBHOOK_URL
app.state.config.BANNERS = WEBUI_BANNERS
app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
app.state.MODELS = {}
app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER
@ -56,11 +63,10 @@ app.include_router(users.router, prefix="/users", tags=["users"])
app.include_router(chats.router, prefix="/chats", tags=["chats"])
app.include_router(documents.router, prefix="/documents", tags=["documents"])
app.include_router(modelfiles.router, prefix="/modelfiles", tags=["modelfiles"])
app.include_router(models.router, prefix="/models", tags=["models"])
app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
app.include_router(memories.router, prefix="/memories", tags=["memories"])
app.include_router(configs.router, prefix="/configs", tags=["configs"])
app.include_router(utils.router, prefix="/utils", tags=["utils"])

View File

@ -5,10 +5,10 @@ import uuid
import logging
from peewee import *
from apps.web.models.users import UserModel, Users
from apps.webui.models.users import UserModel, Users
from utils.utils import verify_password
from apps.web.internal.db import DB
from apps.webui.internal.db import DB
from config import SRC_LOG_LEVELS

View File

@ -7,7 +7,7 @@ import json
import uuid
import time
from apps.web.internal.db import DB
from apps.webui.internal.db import DB
####################
# Chat DB Schema
@ -191,6 +191,20 @@ class ChatTable:
except:
return None
def archive_all_chats_by_user_id(self, user_id: str) -> bool:
try:
chats = self.get_chats_by_user_id(user_id)
for chat in chats:
query = Chat.update(
archived=True,
).where(Chat.id == chat.id)
query.execute()
return True
except:
return False
def get_archived_chat_list_by_user_id(
self, user_id: str, skip: int = 0, limit: int = 50
) -> List[ChatModel]:
@ -205,17 +219,31 @@ class ChatTable:
]
def get_chat_list_by_user_id(
self, user_id: str, skip: int = 0, limit: int = 50
self,
user_id: str,
include_archived: bool = False,
skip: int = 0,
limit: int = 50,
) -> List[ChatModel]:
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.archived == False)
.where(Chat.user_id == user_id)
.order_by(Chat.updated_at.desc())
# .limit(limit)
# .offset(skip)
]
if include_archived:
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.user_id == user_id)
.order_by(Chat.updated_at.desc())
# .limit(limit)
# .offset(skip)
]
else:
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.archived == False)
.where(Chat.user_id == user_id)
.order_by(Chat.updated_at.desc())
# .limit(limit)
# .offset(skip)
]
def get_chat_list_by_chat_ids(
self, chat_ids: List[str], skip: int = 0, limit: int = 50

View File

@ -8,7 +8,7 @@ import logging
from utils.utils import decode_token
from utils.misc import get_gravatar_url
from apps.web.internal.db import DB
from apps.webui.internal.db import DB
import json

View File

@ -3,8 +3,8 @@ from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
from apps.web.internal.db import DB
from apps.web.models.chats import Chats
from apps.webui.internal.db import DB
from apps.webui.models.chats import Chats
import time
import uuid

View File

@ -0,0 +1,179 @@
import json
import logging
from typing import Optional
import peewee as pw
from peewee import *
from playhouse.shortcuts import model_to_dict
from pydantic import BaseModel, ConfigDict
from apps.webui.internal.db import DB, JSONField
from typing import List, Union, Optional
from config import SRC_LOG_LEVELS
import time
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MODELS"])
####################
# Models DB Schema
####################
# ModelParams is a model for the data stored in the params field of the Model table
class ModelParams(BaseModel):
model_config = ConfigDict(extra="allow")
pass
# ModelMeta is a model for the data stored in the meta field of the Model table
class ModelMeta(BaseModel):
profile_image_url: Optional[str] = "/favicon.png"
description: Optional[str] = None
"""
User-facing description of the model.
"""
capabilities: Optional[dict] = None
model_config = ConfigDict(extra="allow")
pass
class Model(pw.Model):
id = pw.TextField(unique=True)
"""
The model's id as used in the API. If set to an existing model, it will override the model.
"""
user_id = pw.TextField()
base_model_id = pw.TextField(null=True)
"""
An optional pointer to the actual model that should be used when proxying requests.
"""
name = pw.TextField()
"""
The human-readable display name of the model.
"""
params = JSONField()
"""
Holds a JSON encoded blob of parameters, see `ModelParams`.
"""
meta = JSONField()
"""
Holds a JSON encoded blob of metadata, see `ModelMeta`.
"""
updated_at = BigIntegerField()
created_at = BigIntegerField()
class Meta:
database = DB
class ModelModel(BaseModel):
id: str
user_id: str
base_model_id: Optional[str] = None
name: str
params: ModelParams
meta: ModelMeta
updated_at: int # timestamp in epoch
created_at: int # timestamp in epoch
####################
# Forms
####################
class ModelResponse(BaseModel):
id: str
name: str
meta: ModelMeta
updated_at: int # timestamp in epoch
created_at: int # timestamp in epoch
class ModelForm(BaseModel):
id: str
base_model_id: Optional[str] = None
name: str
meta: ModelMeta
params: ModelParams
class ModelsTable:
def __init__(
self,
db: pw.SqliteDatabase | pw.PostgresqlDatabase,
):
self.db = db
self.db.create_tables([Model])
def insert_new_model(
self, form_data: ModelForm, user_id: str
) -> Optional[ModelModel]:
model = ModelModel(
**{
**form_data.model_dump(),
"user_id": user_id,
"created_at": int(time.time()),
"updated_at": int(time.time()),
}
)
try:
result = Model.create(**model.model_dump())
if result:
return model
else:
return None
except Exception as e:
print(e)
return None
def get_all_models(self) -> List[ModelModel]:
return [ModelModel(**model_to_dict(model)) for model in Model.select()]
def get_model_by_id(self, id: str) -> Optional[ModelModel]:
try:
model = Model.get(Model.id == id)
return ModelModel(**model_to_dict(model))
except:
return None
def update_model_by_id(self, id: str, model: ModelForm) -> Optional[ModelModel]:
try:
# update only the fields that are present in the model
query = Model.update(**model.model_dump()).where(Model.id == id)
query.execute()
model = Model.get(Model.id == id)
return ModelModel(**model_to_dict(model))
except Exception as e:
print(e)
return None
def delete_model_by_id(self, id: str) -> bool:
try:
query = Model.delete().where(Model.id == id)
query.execute()
return True
except:
return False
Models = ModelsTable(DB)

View File

@ -7,7 +7,7 @@ import time
from utils.utils import decode_token
from utils.misc import get_gravatar_url
from apps.web.internal.db import DB
from apps.webui.internal.db import DB
import json

View File

@ -8,7 +8,7 @@ import uuid
import time
import logging
from apps.web.internal.db import DB
from apps.webui.internal.db import DB
from config import SRC_LOG_LEVELS

View File

@ -1,12 +1,12 @@
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
import time
from utils.misc import get_gravatar_url
from apps.web.internal.db import DB
from apps.web.models.chats import Chats
from apps.webui.internal.db import DB, JSONField
from apps.webui.models.chats import Chats
####################
# User DB Schema
@ -25,11 +25,18 @@ class User(Model):
created_at = BigIntegerField()
api_key = CharField(null=True, unique=True)
settings = JSONField(null=True)
class Meta:
database = DB
class UserSettings(BaseModel):
ui: Optional[dict] = {}
model_config = ConfigDict(extra="allow")
pass
class UserModel(BaseModel):
id: str
name: str
@ -42,6 +49,7 @@ class UserModel(BaseModel):
created_at: int # timestamp in epoch
api_key: Optional[str] = None
settings: Optional[UserSettings] = None
####################

View File

@ -10,7 +10,7 @@ import uuid
import csv
from apps.web.models.auths import (
from apps.webui.models.auths import (
SigninForm,
SignupForm,
AddUserForm,
@ -21,7 +21,7 @@ from apps.web.models.auths import (
Auths,
ApiKey,
)
from apps.web.models.users import Users
from apps.webui.models.users import Users
from utils.utils import (
get_password_hash,

View File

@ -7,8 +7,8 @@ from pydantic import BaseModel
import json
import logging
from apps.web.models.users import Users
from apps.web.models.chats import (
from apps.webui.models.users import Users
from apps.webui.models.chats import (
ChatModel,
ChatResponse,
ChatTitleForm,
@ -18,7 +18,7 @@ from apps.web.models.chats import (
)
from apps.web.models.tags import (
from apps.webui.models.tags import (
TagModel,
ChatIdTagModel,
ChatIdTagForm,
@ -78,43 +78,25 @@ async def delete_all_user_chats(request: Request, user=Depends(get_current_user)
async def get_user_chat_list_by_user_id(
user_id: str, user=Depends(get_admin_user), skip: int = 0, limit: int = 50
):
return Chats.get_chat_list_by_user_id(user_id, skip, limit)
return Chats.get_chat_list_by_user_id(
user_id, include_archived=True, skip=skip, limit=limit
)
############################
# GetArchivedChats
# CreateNewChat
############################
@router.get("/archived", response_model=List[ChatTitleIdResponse])
async def get_archived_session_user_chat_list(
user=Depends(get_current_user), skip: int = 0, limit: int = 50
):
return Chats.get_archived_chat_list_by_user_id(user.id, skip, limit)
############################
# GetSharedChatById
############################
@router.get("/share/{share_id}", response_model=Optional[ChatResponse])
async def get_shared_chat_by_id(share_id: str, user=Depends(get_current_user)):
if user.role == "pending":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
)
if user.role == "user":
chat = Chats.get_chat_by_share_id(share_id)
elif user.role == "admin":
chat = Chats.get_chat_by_id(share_id)
if chat:
@router.post("/new", response_model=Optional[ChatResponse])
async def create_new_chat(form_data: ChatForm, user=Depends(get_current_user)):
try:
chat = Chats.insert_new_chat(user.id, form_data)
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
else:
except Exception as e:
log.exception(e)
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
)
@ -150,19 +132,49 @@ async def get_all_user_chats_in_db(user=Depends(get_admin_user)):
############################
# CreateNewChat
# GetArchivedChats
############################
@router.post("/new", response_model=Optional[ChatResponse])
async def create_new_chat(form_data: ChatForm, user=Depends(get_current_user)):
try:
chat = Chats.insert_new_chat(user.id, form_data)
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
except Exception as e:
log.exception(e)
@router.get("/archived", response_model=List[ChatTitleIdResponse])
async def get_archived_session_user_chat_list(
user=Depends(get_current_user), skip: int = 0, limit: int = 50
):
return Chats.get_archived_chat_list_by_user_id(user.id, skip, limit)
############################
# ArchiveAllChats
############################
@router.post("/archive/all", response_model=List[ChatTitleIdResponse])
async def archive_all_chats(user=Depends(get_current_user)):
return Chats.archive_all_chats_by_user_id(user.id)
############################
# GetSharedChatById
############################
@router.get("/share/{share_id}", response_model=Optional[ChatResponse])
async def get_shared_chat_by_id(share_id: str, user=Depends(get_current_user)):
if user.role == "pending":
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
)
if user.role == "user":
chat = Chats.get_chat_by_share_id(share_id)
elif user.role == "admin":
chat = Chats.get_chat_by_id(share_id)
if chat:
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
)

View File

@ -8,7 +8,9 @@ from pydantic import BaseModel
import time
import uuid
from apps.web.models.users import Users
from config import BannerModel
from apps.webui.models.users import Users
from utils.utils import (
get_password_hash,
@ -57,3 +59,31 @@ async def set_global_default_suggestions(
data = form_data.model_dump()
request.app.state.config.DEFAULT_PROMPT_SUGGESTIONS = data["suggestions"]
return request.app.state.config.DEFAULT_PROMPT_SUGGESTIONS
############################
# SetBanners
############################
class SetBannersForm(BaseModel):
banners: List[BannerModel]
@router.post("/banners", response_model=List[BannerModel])
async def set_banners(
request: Request,
form_data: SetBannersForm,
user=Depends(get_admin_user),
):
data = form_data.model_dump()
request.app.state.config.BANNERS = data["banners"]
return request.app.state.config.BANNERS
@router.get("/banners", response_model=List[BannerModel])
async def get_banners(
request: Request,
user=Depends(get_current_user),
):
return request.app.state.config.BANNERS

View File

@ -6,7 +6,7 @@ from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.web.models.documents import (
from apps.webui.models.documents import (
Documents,
DocumentForm,
DocumentUpdateForm,

View File

@ -7,7 +7,7 @@ from fastapi import APIRouter
from pydantic import BaseModel
import logging
from apps.web.models.memories import Memories, MemoryModel
from apps.webui.models.memories import Memories, MemoryModel
from utils.utils import get_verified_user
from constants import ERROR_MESSAGES

View File

@ -0,0 +1,108 @@
from fastapi import Depends, FastAPI, HTTPException, status, Request
from datetime import datetime, timedelta
from typing import List, Union, Optional
from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.webui.models.models import Models, ModelModel, ModelForm, ModelResponse
from utils.utils import get_verified_user, get_admin_user
from constants import ERROR_MESSAGES
router = APIRouter()
###########################
# getModels
###########################
@router.get("/", response_model=List[ModelResponse])
async def get_models(user=Depends(get_verified_user)):
return Models.get_all_models()
############################
# AddNewModel
############################
@router.post("/add", response_model=Optional[ModelModel])
async def add_new_model(
request: Request, form_data: ModelForm, user=Depends(get_admin_user)
):
if form_data.id in request.app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.MODEL_ID_TAKEN,
)
else:
model = Models.insert_new_model(form_data, user.id)
if model:
return model
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.DEFAULT(),
)
############################
# GetModelById
############################
@router.get("/", response_model=Optional[ModelModel])
async def get_model_by_id(id: str, user=Depends(get_verified_user)):
model = Models.get_model_by_id(id)
if model:
return model
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.NOT_FOUND,
)
############################
# UpdateModelById
############################
@router.post("/update", response_model=Optional[ModelModel])
async def update_model_by_id(
request: Request, id: str, form_data: ModelForm, user=Depends(get_admin_user)
):
model = Models.get_model_by_id(id)
if model:
model = Models.update_model_by_id(id, form_data)
return model
else:
if form_data.id in request.app.state.MODELS:
model = Models.insert_new_model(form_data, user.id)
print(model)
if model:
return model
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.DEFAULT(),
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.DEFAULT(),
)
############################
# DeleteModelById
############################
@router.delete("/delete", response_model=bool)
async def delete_model_by_id(id: str, user=Depends(get_admin_user)):
result = Models.delete_model_by_id(id)
return result

View File

@ -6,7 +6,7 @@ from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.web.models.prompts import Prompts, PromptForm, PromptModel
from apps.webui.models.prompts import Prompts, PromptForm, PromptModel
from utils.utils import get_current_user, get_admin_user
from constants import ERROR_MESSAGES

View File

@ -9,9 +9,15 @@ import time
import uuid
import logging
from apps.web.models.users import UserModel, UserUpdateForm, UserRoleUpdateForm, Users
from apps.web.models.auths import Auths
from apps.web.models.chats import Chats
from apps.webui.models.users import (
UserModel,
UserUpdateForm,
UserRoleUpdateForm,
UserSettings,
Users,
)
from apps.webui.models.auths import Auths
from apps.webui.models.chats import Chats
from utils.utils import get_verified_user, get_password_hash, get_admin_user
from constants import ERROR_MESSAGES
@ -68,6 +74,42 @@ async def update_user_role(form_data: UserRoleUpdateForm, user=Depends(get_admin
)
############################
# GetUserSettingsBySessionUser
############################
@router.get("/user/settings", response_model=Optional[UserSettings])
async def get_user_settings_by_session_user(user=Depends(get_verified_user)):
user = Users.get_user_by_id(user.id)
if user:
return user.settings
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.USER_NOT_FOUND,
)
############################
# UpdateUserSettingsBySessionUser
############################
@router.post("/user/settings/update", response_model=UserSettings)
async def update_user_settings_by_session_user(
form_data: UserSettings, user=Depends(get_verified_user)
):
user = Users.update_user_by_id(user.id, {"settings": form_data.model_dump()})
if user:
return user.settings
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.USER_NOT_FOUND,
)
############################
# GetUserById
############################
@ -81,6 +123,8 @@ class UserResponse(BaseModel):
@router.get("/{user_id}", response_model=UserResponse)
async def get_user_by_id(user_id: str, user=Depends(get_verified_user)):
# Check if user_id is a shared chat
# If it is, get the user_id from the chat
if user_id.startswith("shared-"):
chat_id = user_id.replace("shared-", "")
chat = Chats.get_chat_by_id(chat_id)

View File

@ -8,7 +8,7 @@ from pydantic import BaseModel
from fpdf import FPDF
import markdown
from apps.web.internal.db import DB
from apps.webui.internal.db import DB
from utils.utils import get_admin_user
from utils.misc import calculate_sha256, get_gravatar_url

View File

@ -1,11 +1,15 @@
import os
import sys
import logging
import importlib.metadata
import pkgutil
import chromadb
from chromadb import Settings
from base64 import b64encode
from bs4 import BeautifulSoup
from typing import TypeVar, Generic, Union
from pydantic import BaseModel
from typing import Optional
from pathlib import Path
import json
@ -22,10 +26,15 @@ from constants import ERROR_MESSAGES
# Load .env file
####################################
BACKEND_DIR = Path(__file__).parent # the path containing this file
BASE_DIR = BACKEND_DIR.parent # the path containing the backend/
print(BASE_DIR)
try:
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv("../.env"))
load_dotenv(find_dotenv(str(BASE_DIR / ".env")))
except ImportError:
print("dotenv not installed, skipping...")
@ -51,7 +60,6 @@ log_sources = [
"CONFIG",
"DB",
"IMAGES",
"LITELLM",
"MAIN",
"MODELS",
"OLLAMA",
@ -87,10 +95,12 @@ WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png"
ENV = os.environ.get("ENV", "dev")
try:
with open(f"../package.json", "r") as f:
PACKAGE_DATA = json.load(f)
PACKAGE_DATA = json.loads((BASE_DIR / "package.json").read_text())
except:
PACKAGE_DATA = {"version": "0.0.0"}
try:
PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")}
except importlib.metadata.PackageNotFoundError:
PACKAGE_DATA = {"version": "0.0.0"}
VERSION = PACKAGE_DATA["version"]
@ -115,10 +125,13 @@ def parse_section(section):
try:
with open("../CHANGELOG.md", "r") as file:
changelog_path = BASE_DIR / "CHANGELOG.md"
with open(str(changelog_path.absolute()), "r", encoding="utf8") as file:
changelog_content = file.read()
except:
changelog_content = ""
changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
# Convert markdown content to HTML
html_content = markdown.markdown(changelog_content)
@ -155,21 +168,20 @@ CHANGELOG = changelog_json
####################################
# WEBUI_VERSION
# WEBUI_BUILD_HASH
####################################
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.100")
WEBUI_BUILD_HASH = os.environ.get("WEBUI_BUILD_HASH", "dev-build")
####################################
# DATA/FRONTEND BUILD DIR
####################################
DATA_DIR = str(Path(os.getenv("DATA_DIR", "./data")).resolve())
FRONTEND_BUILD_DIR = str(Path(os.getenv("FRONTEND_BUILD_DIR", "../build")))
DATA_DIR = Path(os.getenv("DATA_DIR", BACKEND_DIR / "data")).resolve()
FRONTEND_BUILD_DIR = Path(os.getenv("FRONTEND_BUILD_DIR", BASE_DIR / "build")).resolve()
try:
with open(f"{DATA_DIR}/config.json", "r") as f:
CONFIG_DATA = json.load(f)
CONFIG_DATA = json.loads((DATA_DIR / "config.json").read_text())
except:
CONFIG_DATA = {}
@ -279,11 +291,11 @@ JWT_EXPIRES_IN = PersistentConfig(
# Static DIR
####################################
STATIC_DIR = str(Path(os.getenv("STATIC_DIR", "./static")).resolve())
STATIC_DIR = Path(os.getenv("STATIC_DIR", BACKEND_DIR / "static")).resolve()
frontend_favicon = f"{FRONTEND_BUILD_DIR}/favicon.png"
if os.path.exists(frontend_favicon):
shutil.copyfile(frontend_favicon, f"{STATIC_DIR}/favicon.png")
frontend_favicon = FRONTEND_BUILD_DIR / "favicon.png"
if frontend_favicon.exists():
shutil.copyfile(frontend_favicon, STATIC_DIR / "favicon.png")
else:
logging.warning(f"Frontend favicon not found at {frontend_favicon}")
@ -368,16 +380,23 @@ def create_config_file(file_path):
LITELLM_CONFIG_PATH = f"{DATA_DIR}/litellm/config.yaml"
if not os.path.exists(LITELLM_CONFIG_PATH):
log.info("Config file doesn't exist. Creating...")
create_config_file(LITELLM_CONFIG_PATH)
log.info("Config file created successfully.")
# if not os.path.exists(LITELLM_CONFIG_PATH):
# log.info("Config file doesn't exist. Creating...")
# create_config_file(LITELLM_CONFIG_PATH)
# log.info("Config file created successfully.")
####################################
# OLLAMA_BASE_URL
####################################
ENABLE_OLLAMA_API = PersistentConfig(
"ENABLE_OLLAMA_API",
"ollama.enable",
os.environ.get("ENABLE_OLLAMA_API", "True").lower() == "true",
)
OLLAMA_API_BASE_URL = os.environ.get(
"OLLAMA_API_BASE_URL", "http://localhost:11434/api"
)
@ -549,6 +568,27 @@ WEBHOOK_URL = PersistentConfig(
ENABLE_ADMIN_EXPORT = os.environ.get("ENABLE_ADMIN_EXPORT", "True").lower() == "true"
ENABLE_COMMUNITY_SHARING = PersistentConfig(
"ENABLE_COMMUNITY_SHARING",
"ui.enable_community_sharing",
os.environ.get("ENABLE_COMMUNITY_SHARING", "True").lower() == "true",
)
class BannerModel(BaseModel):
id: str
type: str
title: Optional[str] = None
content: str
dismissible: bool
timestamp: int
WEBUI_BANNERS = PersistentConfig(
"WEBUI_BANNERS",
"ui.banners",
[BannerModel(**banner) for banner in json.loads("[]")],
)
####################################
# WEBUI_SECRET_KEY
####################################
@ -813,18 +853,6 @@ AUDIO_OPENAI_API_VOICE = PersistentConfig(
os.getenv("AUDIO_OPENAI_API_VOICE", "alloy"),
)
####################################
# LiteLLM
####################################
ENABLE_LITELLM = os.environ.get("ENABLE_LITELLM", "True").lower() == "true"
LITELLM_PROXY_PORT = int(os.getenv("LITELLM_PROXY_PORT", "14365"))
if LITELLM_PROXY_PORT < 0 or LITELLM_PROXY_PORT > 65535:
raise ValueError("Invalid port number for LITELLM_PROXY_PORT")
LITELLM_PROXY_HOST = os.getenv("LITELLM_PROXY_HOST", "127.0.0.1")
####################################
# Database

View File

@ -32,6 +32,8 @@ class ERROR_MESSAGES(str, Enum):
COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string."
FILE_EXISTS = "Uh-oh! This file is already registered. Please choose another file."
MODEL_ID_TAKEN = "Uh-oh! This model id is already registered. Please choose another model id string."
NAME_TAG_TAKEN = "Uh-oh! This name tag is already registered. Please choose another name tag string."
INVALID_TOKEN = (
"Your session has expired or the token is invalid. Please sign in again."

View File

@ -8,6 +8,7 @@ import sys
import logging
import aiohttp
import requests
import mimetypes
from fastapi import FastAPI, Request, Depends, status
from fastapi.staticfiles import StaticFiles
@ -18,27 +19,20 @@ from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.responses import StreamingResponse, Response
from apps.ollama.main import app as ollama_app
from apps.openai.main import app as openai_app
from apps.litellm.main import (
app as litellm_app,
start_litellm_background,
shutdown_litellm_background,
)
from apps.ollama.main import app as ollama_app, get_all_models as get_ollama_models
from apps.openai.main import app as openai_app, get_all_models as get_openai_models
from apps.audio.main import app as audio_app
from apps.images.main import app as images_app
from apps.rag.main import app as rag_app
from apps.web.main import app as webui_app
from apps.webui.main import app as webui_app
import asyncio
from pydantic import BaseModel
from typing import List
from typing import List, Optional
from utils.utils import get_admin_user
from apps.webui.models.models import Models, ModelModel
from utils.utils import get_admin_user, get_verified_user
from apps.rag.utils import rag_messages
from config import (
@ -52,7 +46,8 @@ from config import (
FRONTEND_BUILD_DIR,
CACHE_DIR,
STATIC_DIR,
ENABLE_LITELLM,
ENABLE_OPENAI_API,
ENABLE_OLLAMA_API,
ENABLE_MODEL_FILTER,
MODEL_FILTER_LIST,
GLOBAL_LOG_LEVEL,
@ -60,6 +55,7 @@ from config import (
WEBHOOK_URL,
ENABLE_ADMIN_EXPORT,
AppConfig,
WEBUI_BUILD_HASH,
)
from constants import ERROR_MESSAGES
@ -89,7 +85,8 @@ print(
|_|
v{VERSION} - building the best open-source AI user interface.
v{VERSION} - building the best open-source AI user interface.
{f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
https://github.com/open-webui/open-webui
"""
)
@ -97,11 +94,7 @@ https://github.com/open-webui/open-webui
@asynccontextmanager
async def lifespan(app: FastAPI):
if ENABLE_LITELLM:
asyncio.create_task(start_litellm_background())
yield
if ENABLE_LITELLM:
await shutdown_litellm_background()
app = FastAPI(
@ -109,11 +102,19 @@ app = FastAPI(
)
app.state.config = AppConfig()
app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
app.state.config.WEBHOOK_URL = WEBHOOK_URL
app.state.MODELS = {}
origins = ["*"]
@ -230,6 +231,11 @@ app.add_middleware(
@app.middleware("http")
async def check_url(request: Request, call_next):
if len(app.state.MODELS) == 0:
await get_all_models()
else:
pass
start_time = int(time.time())
response = await call_next(request)
process_time = int(time.time()) - start_time
@ -246,9 +252,8 @@ async def update_embedding_function(request: Request, call_next):
return response
app.mount("/litellm/api", litellm_app)
app.mount("/ollama", ollama_app)
app.mount("/openai/api", openai_app)
app.mount("/openai", openai_app)
app.mount("/images/api/v1", images_app)
app.mount("/audio/api/v1", audio_app)
@ -259,6 +264,87 @@ app.mount("/api/v1", webui_app)
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
async def get_all_models():
openai_models = []
ollama_models = []
if app.state.config.ENABLE_OPENAI_API:
openai_models = await get_openai_models()
openai_models = openai_models["data"]
if app.state.config.ENABLE_OLLAMA_API:
ollama_models = await get_ollama_models()
ollama_models = [
{
"id": model["model"],
"name": model["name"],
"object": "model",
"created": int(time.time()),
"owned_by": "ollama",
"ollama": model,
}
for model in ollama_models["models"]
]
models = openai_models + ollama_models
custom_models = Models.get_all_models()
for custom_model in custom_models:
if custom_model.base_model_id == None:
for model in models:
if (
custom_model.id == model["id"]
or custom_model.id == model["id"].split(":")[0]
):
model["name"] = custom_model.name
model["info"] = custom_model.model_dump()
else:
owned_by = "openai"
for model in models:
if (
custom_model.base_model_id == model["id"]
or custom_model.base_model_id == model["id"].split(":")[0]
):
owned_by = model["owned_by"]
break
models.append(
{
"id": custom_model.id,
"name": custom_model.name,
"object": "model",
"created": custom_model.created_at,
"owned_by": owned_by,
"info": custom_model.model_dump(),
"preset": True,
}
)
app.state.MODELS = {model["id"]: model for model in models}
webui_app.state.MODELS = app.state.MODELS
return models
@app.get("/api/models")
async def get_models(user=Depends(get_verified_user)):
models = await get_all_models()
if app.state.config.ENABLE_MODEL_FILTER:
if user.role == "user":
models = list(
filter(
lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
models,
)
)
return {"data": models}
return {"data": models}
@app.get("/api/config")
async def get_app_config():
# Checking and Handling the Absence of 'ui' in CONFIG_DATA
@ -272,13 +358,17 @@ async def get_app_config():
"status": True,
"name": WEBUI_NAME,
"version": VERSION,
"auth": WEBUI_AUTH,
"default_locale": default_locale,
"images": images_app.state.config.ENABLED,
"default_models": webui_app.state.config.DEFAULT_MODELS,
"default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
"trusted_header_auth": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
"admin_export_enabled": ENABLE_ADMIN_EXPORT,
"features": {
"auth": WEBUI_AUTH,
"auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
"enable_signup": webui_app.state.config.ENABLE_SIGNUP,
"enable_image_generation": images_app.state.config.ENABLED,
"enable_admin_export": ENABLE_ADMIN_EXPORT,
"enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
},
}
@ -302,15 +392,6 @@ async def update_model_filter_config(
app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
app.state.config.MODEL_FILTER_LIST = form_data.models
ollama_app.state.config.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
ollama_app.state.config.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
openai_app.state.config.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
openai_app.state.config.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
litellm_app.state.ENABLE_MODEL_FILTER = app.state.config.ENABLE_MODEL_FILTER
litellm_app.state.MODEL_FILTER_LIST = app.state.config.MODEL_FILTER_LIST
return {
"enabled": app.state.config.ENABLE_MODEL_FILTER,
"models": app.state.config.MODEL_FILTER_LIST,
@ -331,7 +412,6 @@ class UrlForm(BaseModel):
@app.post("/api/webhook")
async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
app.state.config.WEBHOOK_URL = form_data.url
webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
return {
@ -339,6 +419,19 @@ async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
}
@app.get("/api/community_sharing", response_model=bool)
async def get_community_sharing_status(request: Request, user=Depends(get_admin_user)):
return webui_app.state.config.ENABLE_COMMUNITY_SHARING
@app.get("/api/community_sharing/toggle", response_model=bool)
async def toggle_community_sharing(request: Request, user=Depends(get_admin_user)):
webui_app.state.config.ENABLE_COMMUNITY_SHARING = (
not webui_app.state.config.ENABLE_COMMUNITY_SHARING
)
return webui_app.state.config.ENABLE_COMMUNITY_SHARING
@app.get("/api/version")
async def get_app_config():
return {
@ -408,6 +501,7 @@ app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
if os.path.exists(FRONTEND_BUILD_DIR):
mimetypes.add_type("text/javascript", ".js")
app.mount(
"/",
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),

View File

@ -0,0 +1,60 @@
import base64
import os
import random
from pathlib import Path
import typer
import uvicorn
app = typer.Typer()
KEY_FILE = Path.cwd() / ".webui_secret_key"
if (frontend_build_dir := Path(__file__).parent / "frontend").exists():
os.environ["FRONTEND_BUILD_DIR"] = str(frontend_build_dir)
@app.command()
def serve(
host: str = "0.0.0.0",
port: int = 8080,
):
if os.getenv("WEBUI_SECRET_KEY") is None:
typer.echo(
"Loading WEBUI_SECRET_KEY from file, not provided as an environment variable."
)
if not KEY_FILE.exists():
typer.echo(f"Generating a new secret key and saving it to {KEY_FILE}")
KEY_FILE.write_bytes(base64.b64encode(random.randbytes(12)))
typer.echo(f"Loading WEBUI_SECRET_KEY from {KEY_FILE}")
os.environ["WEBUI_SECRET_KEY"] = KEY_FILE.read_text()
if os.getenv("USE_CUDA_DOCKER", "false") == "true":
typer.echo(
"CUDA is enabled, appending LD_LIBRARY_PATH to include torch/cudnn & cublas libraries."
)
LD_LIBRARY_PATH = os.getenv("LD_LIBRARY_PATH", "").split(":")
os.environ["LD_LIBRARY_PATH"] = ":".join(
LD_LIBRARY_PATH
+ [
"/usr/local/lib/python3.11/site-packages/torch/lib",
"/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib",
]
)
import main # we need set environment variables before importing main
uvicorn.run(main.app, host=host, port=port, forwarded_allow_ips="*")
@app.command()
def dev(
host: str = "0.0.0.0",
port: int = 8080,
reload: bool = True,
):
uvicorn.run(
"main:app", host=host, port=port, reload=reload, forwarded_allow_ips="*"
)
if __name__ == "__main__":
app()

View File

@ -1,42 +1,40 @@
fastapi==0.109.2
fastapi==0.111.0
uvicorn[standard]==0.22.0
pydantic==2.7.1
python-multipart==0.0.9
Flask==3.0.3
Flask-Cors==4.0.0
Flask-Cors==4.0.1
python-socketio==5.11.2
python-jose==3.3.0
passlib[bcrypt]==1.7.4
requests==2.31.0
requests==2.32.2
aiohttp==3.9.5
peewee==3.17.3
peewee==3.17.5
peewee-migrate==1.12.2
psycopg2-binary==2.9.9
PyMySQL==1.1.0
bcrypt==4.1.2
PyMySQL==1.1.1
bcrypt==4.1.3
litellm[proxy]==1.35.28
boto3==1.34.95
boto3==1.34.110
argon2-cffi==23.1.0
APScheduler==3.10.4
google-generativeai==0.5.2
google-generativeai==0.5.4
langchain==0.1.16
langchain-community==0.0.34
langchain-chroma==0.1.0
langchain==0.2.0
langchain-community==0.2.0
langchain-chroma==0.1.1
fake-useragent==1.5.1
chromadb==0.4.24
chromadb==0.5.0
sentence-transformers==2.7.0
pypdf==4.2.0
docx2txt==0.8
python-pptx==0.6.23
unstructured==0.11.8
unstructured==0.14.0
Markdown==3.6
pypandoc==1.13
pandas==2.2.2
@ -46,16 +44,16 @@ xlrd==2.0.1
validators==0.28.1
opencv-python-headless==4.9.0.80
rapidocr-onnxruntime==1.2.3
rapidocr-onnxruntime==1.3.22
fpdf2==2.7.8
fpdf2==2.7.9
rank-bm25==0.2.2
faster-whisper==1.0.1
faster-whisper==1.0.2
PyJWT[crypto]==2.8.0
black==24.4.2
langfuse==2.27.3
langfuse==2.33.0
youtube-transcript-api==0.6.2
pytube
pytube==15.0.0

View File

@ -30,4 +30,29 @@ if [ "$USE_CUDA_DOCKER" = "true" ]; then
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
fi
# Check if SPACE_ID is set, if so, configure for space
if [ -n "$SPACE_ID" ]; then
echo "Configuring for HuggingFace Space deployment"
if [ -n "$ADMIN_USER_EMAIL" ] && [ -n "$ADMIN_USER_PASSWORD" ]; then
echo "Admin user configured, creating"
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' &
webui_pid=$!
echo "Waiting for webui to start..."
while ! curl -s http://localhost:8080/health > /dev/null; do
sleep 1
done
echo "Creating admin user..."
curl \
-X POST "http://localhost:8080/api/v1/auths/signup" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
-d "{ \"email\": \"${ADMIN_USER_EMAIL}\", \"password\": \"${ADMIN_USER_PASSWORD}\", \"name\": \"Admin\" }"
echo "Shutting down webui..."
kill $webui_pid
fi
export WEBUI_URL=${SPACE_HOST}
fi
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'

View File

@ -1,5 +1,6 @@
from pathlib import Path
import hashlib
import json
import re
from datetime import timedelta
from typing import Optional
@ -110,3 +111,76 @@ def parse_duration(duration: str) -> Optional[timedelta]:
total_duration += timedelta(weeks=number)
return total_duration
def parse_ollama_modelfile(model_text):
parameters_meta = {
"mirostat": int,
"mirostat_eta": float,
"mirostat_tau": float,
"num_ctx": int,
"repeat_last_n": int,
"repeat_penalty": float,
"temperature": float,
"seed": int,
"stop": str,
"tfs_z": float,
"num_predict": int,
"top_k": int,
"top_p": float,
}
data = {"base_model_id": None, "params": {}}
# Parse base model
base_model_match = re.search(
r"^FROM\s+(\w+)", model_text, re.MULTILINE | re.IGNORECASE
)
if base_model_match:
data["base_model_id"] = base_model_match.group(1)
# Parse template
template_match = re.search(
r'TEMPLATE\s+"""(.+?)"""', model_text, re.DOTALL | re.IGNORECASE
)
if template_match:
data["params"] = {"template": template_match.group(1).strip()}
# Parse stops
stops = re.findall(r'PARAMETER stop "(.*?)"', model_text, re.IGNORECASE)
if stops:
data["params"]["stop"] = stops
# Parse other parameters from the provided list
for param, param_type in parameters_meta.items():
param_match = re.search(rf"PARAMETER {param} (.+)", model_text, re.IGNORECASE)
if param_match:
value = param_match.group(1)
if param_type == int:
value = int(value)
elif param_type == float:
value = float(value)
data["params"][param] = value
# Parse adapter
adapter_match = re.search(r"ADAPTER (.+)", model_text, re.IGNORECASE)
if adapter_match:
data["params"]["adapter"] = adapter_match.group(1)
# Parse system description
system_desc_match = re.search(
r'SYSTEM\s+"""(.+?)"""', model_text, re.DOTALL | re.IGNORECASE
)
if system_desc_match:
data["params"]["system"] = system_desc_match.group(1).strip()
# Parse messages
messages = []
message_matches = re.findall(r"MESSAGE (\w+) (.+)", model_text, re.IGNORECASE)
for role, content in message_matches:
messages.append({"role": role, "content": content})
if messages:
data["params"]["messages"] = messages
return data

10
backend/utils/models.py Normal file
View File

@ -0,0 +1,10 @@
from apps.webui.models.models import Models, ModelModel, ModelForm, ModelResponse
def get_model_id_from_custom_model_id(id: str):
model = Models.get_model_by_id(id)
if model:
return model.id
else:
return id

View File

@ -1,7 +1,7 @@
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from fastapi import HTTPException, status, Depends
from apps.web.models.users import Users
from apps.webui.models.users import Users
from pydantic import BaseModel
from typing import Union, Optional

View File

@ -74,5 +74,28 @@ describe('Settings', () => {
expect(spy).to.be.callCount(2);
});
});
it('user can generate image', () => {
// Click on the model selector
cy.get('button[aria-label="Select a model"]').click();
// Select the first model
cy.get('button[aria-label="model-item"]').first().click();
// Type a message
cy.get('#chat-textarea').type('Hi, what can you do? A single sentence only please.', {
force: true
});
// Send the message
cy.get('button[type="submit"]').click();
// User's message should be visible
cy.get('.chat-user').should('exist');
// Wait for the response
cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received
.find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received
.should('exist');
// Click on the generate image button
cy.get('[aria-label="Generate Image"]').click();
// Wait for image to be visible
cy.get('img[data-cy="image"]', { timeout: 60_000 }).should('be.visible');
});
});
});

View File

@ -0,0 +1,31 @@
# This is an overlay that spins up stable-diffusion-webui for integration testing
# This is not designed to be used in production
services:
stable-diffusion-webui:
# Not built for ARM64
platform: linux/amd64
image: ghcr.io/neggles/sd-webui-docker:latest
restart: unless-stopped
environment:
CLI_ARGS: "--api --use-cpu all --precision full --no-half --skip-torch-cuda-test --ckpt /empty.pt --do-not-download-clip --disable-nan-check --disable-opt-split-attention"
PYTHONUNBUFFERED: "1"
TERM: "vt100"
SD_WEBUI_VARIANT: "default"
# Hack to get container working on Apple Silicon
# Rosetta creates a conflict ${HOME}/.cache folder
entrypoint: /bin/bash
command:
- -c
- |
export HOME=/root-home
rm -rf $${HOME}/.cache
/docker/entrypoint.sh python -u webui.py --listen --port $${WEBUI_PORT} --skip-version-check $${CLI_ARGS}
volumes:
- ./test/test_files/image_gen/sd-empty.pt:/empty.pt
open-webui:
environment:
ENABLE_IMAGE_GENERATION: "true"
AUTOMATIC1111_BASE_URL: http://stable-diffusion-webui:7860
IMAGE_SIZE: "64x64"
IMAGE_STEPS: "3"

23
hatch_build.py Normal file
View File

@ -0,0 +1,23 @@
# noqa: INP001
import os
import shutil
import subprocess
from sys import stderr
from hatchling.builders.hooks.plugin.interface import BuildHookInterface
class CustomBuildHook(BuildHookInterface):
def initialize(self, version, build_data):
super().initialize(version, build_data)
stderr.write(">>> Building Open Webui frontend\n")
npm = shutil.which("npm")
if npm is None:
raise RuntimeError(
"NodeJS `npm` is required for building Open Webui but it was not found"
)
stderr.write("### npm install\n")
subprocess.run([npm, "install"], check=True) # noqa: S603
stderr.write("\n### npm run build\n")
os.environ["APP_BUILD_HASH"] = version
subprocess.run([npm, "run", "build"], check=True) # noqa: S603

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "open-webui",
"version": "0.1.125",
"version": "0.2.0.dev2",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "open-webui",
"version": "0.1.125",
"version": "0.2.0.dev2",
"dependencies": {
"@pyscript/core": "^0.4.32",
"@sveltejs/adapter-node": "^1.3.1",

View File

@ -1,6 +1,6 @@
{
"name": "open-webui",
"version": "0.1.125",
"version": "0.2.0.dev2",
"private": true,
"scripts": {
"dev": "npm run pyodide:fetch && vite dev --host",
@ -13,7 +13,7 @@
"lint:types": "npm run check",
"lint:backend": "pylint backend/",
"format": "prettier --plugin-search-dir --write \"**/*.{js,ts,svelte,css,md,html,json}\"",
"format:backend": "black . --exclude \"/venv/\"",
"format:backend": "black . --exclude \".venv/|/venv/\"",
"i18n:parse": "i18next --config i18next-parser.config.ts && prettier --write \"src/lib/i18n/**/*.{js,json}\"",
"cy:open": "cypress open",
"test:frontend": "vitest",

115
pyproject.toml Normal file
View File

@ -0,0 +1,115 @@
[project]
name = "open-webui"
description = "Open WebUI (Formerly Ollama WebUI)"
authors = [
{ name = "Timothy Jaeryang Baek", email = "tim@openwebui.com" }
]
license = { file = "LICENSE" }
dependencies = [
"fastapi==0.111.0",
"uvicorn[standard]==0.22.0",
"pydantic==2.7.1",
"python-multipart==0.0.9",
"Flask==3.0.3",
"Flask-Cors==4.0.1",
"python-socketio==5.11.2",
"python-jose==3.3.0",
"passlib[bcrypt]==1.7.4",
"requests==2.32.2",
"aiohttp==3.9.5",
"peewee==3.17.5",
"peewee-migrate==1.12.2",
"psycopg2-binary==2.9.9",
"PyMySQL==1.1.0",
"bcrypt==4.1.3",
"litellm[proxy]==1.37.20",
"boto3==1.34.110",
"argon2-cffi==23.1.0",
"APScheduler==3.10.4",
"google-generativeai==0.5.4",
"langchain==0.2.0",
"langchain-community==0.2.0",
"langchain-chroma==0.1.1",
"fake-useragent==1.5.1",
"chromadb==0.5.0",
"sentence-transformers==2.7.0",
"pypdf==4.2.0",
"docx2txt==0.8",
"unstructured==0.14.0",
"Markdown==3.6",
"pypandoc==1.13",
"pandas==2.2.2",
"openpyxl==3.1.2",
"pyxlsb==1.0.10",
"xlrd==2.0.1",
"validators==0.28.1",
"opencv-python-headless==4.9.0.80",
"rapidocr-onnxruntime==1.3.22",
"fpdf2==2.7.9",
"rank-bm25==0.2.2",
"faster-whisper==1.0.2",
"PyJWT[crypto]==2.8.0",
"black==24.4.2",
"langfuse==2.33.0",
"youtube-transcript-api==0.6.2",
"pytube==15.0.0",
]
readme = "README.md"
requires-python = ">= 3.11, < 3.12.0a1"
dynamic = ["version"]
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.11",
"Topic :: Communications :: Chat",
"Topic :: Multimedia",
]
[project.scripts]
open-webui = "open_webui:app"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.rye]
managed = true
dev-dependencies = []
[tool.hatch.metadata]
allow-direct-references = true
[tool.hatch.version]
path = "package.json"
pattern = '"version":\s*"(?P<version>[^"]+)"'
[tool.hatch.build.hooks.custom] # keep this for reading hooks from `hatch_build.py`
[tool.hatch.build.targets.wheel]
sources = ["backend"]
exclude = [
".dockerignore",
".gitignore",
".webui_secret_key",
"dev.sh",
"requirements.txt",
"start.sh",
"start_windows.bat",
"webui.db",
"chroma.sqlite3",
]
force-include = { "CHANGELOG.md" = "open_webui/CHANGELOG.md", build = "open_webui/frontend" }

688
requirements-dev.lock Normal file
View File

@ -0,0 +1,688 @@
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
-e file:.
aiohttp==3.9.5
# via langchain
# via langchain-community
# via litellm
# via open-webui
aiosignal==1.3.1
# via aiohttp
annotated-types==0.6.0
# via pydantic
anyio==4.3.0
# via httpx
# via openai
# via starlette
# via watchfiles
apscheduler==3.10.4
# via litellm
# via open-webui
argon2-cffi==23.1.0
# via open-webui
argon2-cffi-bindings==21.2.0
# via argon2-cffi
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
attrs==23.2.0
# via aiohttp
av==11.0.0
# via faster-whisper
backoff==2.2.1
# via langfuse
# via litellm
# via posthog
# via unstructured
bcrypt==4.1.3
# via chromadb
# via open-webui
# via passlib
beautifulsoup4==4.12.3
# via unstructured
bidict==0.23.1
# via python-socketio
black==24.4.2
# via open-webui
blinker==1.8.2
# via flask
boto3==1.34.110
# via open-webui
botocore==1.34.110
# via boto3
# via s3transfer
build==1.2.1
# via chromadb
cachetools==5.3.3
# via google-auth
certifi==2024.2.2
# via httpcore
# via httpx
# via kubernetes
# via requests
# via unstructured-client
cffi==1.16.0
# via argon2-cffi-bindings
# via cryptography
chardet==5.2.0
# via unstructured
charset-normalizer==3.3.2
# via requests
# via unstructured-client
chroma-hnswlib==0.7.3
# via chromadb
chromadb==0.5.0
# via langchain-chroma
# via open-webui
click==8.1.7
# via black
# via flask
# via litellm
# via nltk
# via peewee-migrate
# via rq
# via typer
# via uvicorn
coloredlogs==15.0.1
# via onnxruntime
cryptography==42.0.7
# via litellm
# via pyjwt
ctranslate2==4.2.1
# via faster-whisper
dataclasses-json==0.6.6
# via langchain
# via langchain-community
# via unstructured
# via unstructured-client
deepdiff==7.0.1
# via unstructured-client
defusedxml==0.7.1
# via fpdf2
deprecated==1.2.14
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
distro==1.9.0
# via openai
dnspython==2.6.1
# via email-validator
docx2txt==0.8
# via open-webui
ecdsa==0.19.0
# via python-jose
email-validator==2.1.1
# via fastapi
# via pydantic
emoji==2.11.1
# via unstructured
et-xmlfile==1.1.0
# via openpyxl
fake-useragent==1.5.1
# via open-webui
fastapi==0.111.0
# via chromadb
# via fastapi-sso
# via langchain-chroma
# via litellm
# via open-webui
fastapi-cli==0.0.4
# via fastapi
fastapi-sso==0.10.0
# via litellm
faster-whisper==1.0.2
# via open-webui
filelock==3.14.0
# via huggingface-hub
# via torch
# via transformers
filetype==1.2.0
# via unstructured
flask==3.0.3
# via flask-cors
# via open-webui
flask-cors==4.0.1
# via open-webui
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.51.0
# via fpdf2
fpdf2==2.7.9
# via open-webui
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.3.1
# via huggingface-hub
# via torch
google-ai-generativelanguage==0.6.4
# via google-generativeai
google-api-core==2.19.0
# via google-ai-generativelanguage
# via google-api-python-client
# via google-generativeai
google-api-python-client==2.129.0
# via google-generativeai
google-auth==2.29.0
# via google-ai-generativelanguage
# via google-api-core
# via google-api-python-client
# via google-auth-httplib2
# via google-generativeai
# via kubernetes
google-auth-httplib2==0.2.0
# via google-api-python-client
google-generativeai==0.5.4
# via open-webui
googleapis-common-protos==1.63.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio==1.63.0
# via chromadb
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.2
# via google-api-core
gunicorn==22.0.0
# via litellm
h11==0.14.0
# via httpcore
# via uvicorn
# via wsproto
httpcore==1.0.5
# via httpx
httplib2==0.22.0
# via google-api-python-client
# via google-auth-httplib2
httptools==0.6.1
# via uvicorn
httpx==0.27.0
# via fastapi
# via fastapi-sso
# via langfuse
# via openai
huggingface-hub==0.23.0
# via faster-whisper
# via sentence-transformers
# via tokenizers
# via transformers
humanfriendly==10.0
# via coloredlogs
idna==3.7
# via anyio
# via email-validator
# via httpx
# via langfuse
# via requests
# via unstructured-client
# via yarl
importlib-metadata==7.0.0
# via litellm
# via opentelemetry-api
importlib-resources==6.4.0
# via chromadb
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via fastapi
# via flask
# via litellm
# via torch
jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==2.4
# via jsonpatch
kubernetes==29.0.0
# via chromadb
langchain==0.2.0
# via langchain-community
# via open-webui
langchain-chroma==0.1.1
# via open-webui
langchain-community==0.2.0
# via open-webui
langchain-core==0.2.1
# via langchain
# via langchain-chroma
# via langchain-community
# via langchain-text-splitters
langchain-text-splitters==0.2.0
# via langchain
langdetect==1.0.9
# via unstructured
langfuse==2.33.0
# via open-webui
langsmith==0.1.57
# via langchain
# via langchain-community
# via langchain-core
litellm==1.37.20
# via open-webui
lxml==5.2.2
# via unstructured
markdown==3.6
# via open-webui
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
# via jinja2
# via werkzeug
marshmallow==3.21.2
# via dataclasses-json
# via unstructured-client
mdurl==0.1.2
# via markdown-it-py
mmh3==4.1.0
# via chromadb
monotonic==1.6
# via posthog
mpmath==1.3.0
# via sympy
multidict==6.0.5
# via aiohttp
# via yarl
mypy-extensions==1.0.0
# via black
# via typing-inspect
# via unstructured-client
networkx==3.3
# via torch
nltk==3.8.1
# via unstructured
numpy==1.26.4
# via chroma-hnswlib
# via chromadb
# via ctranslate2
# via langchain
# via langchain-chroma
# via langchain-community
# via onnxruntime
# via opencv-python
# via opencv-python-headless
# via pandas
# via rank-bm25
# via rapidocr-onnxruntime
# via scikit-learn
# via scipy
# via sentence-transformers
# via shapely
# via transformers
# via unstructured
oauthlib==3.2.2
# via fastapi-sso
# via kubernetes
# via requests-oauthlib
onnxruntime==1.17.3
# via chromadb
# via faster-whisper
# via rapidocr-onnxruntime
openai==1.28.1
# via litellm
opencv-python==4.9.0.80
# via rapidocr-onnxruntime
opencv-python-headless==4.9.0.80
# via open-webui
openpyxl==3.1.2
# via open-webui
opentelemetry-api==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-instrumentation
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-exporter-otlp-proto-common==1.24.0
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-exporter-otlp-proto-grpc==1.24.0
# via chromadb
opentelemetry-instrumentation==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-asgi==0.45b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.45b0
# via chromadb
opentelemetry-proto==1.24.0
# via opentelemetry-exporter-otlp-proto-common
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-sdk==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-semantic-conventions==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-util-http==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
ordered-set==4.1.0
# via deepdiff
orjson==3.10.3
# via chromadb
# via fastapi
# via langsmith
# via litellm
overrides==7.7.0
# via chromadb
packaging==23.2
# via black
# via build
# via gunicorn
# via huggingface-hub
# via langchain-core
# via langfuse
# via marshmallow
# via onnxruntime
# via transformers
# via unstructured-client
pandas==2.2.2
# via open-webui
passlib==1.7.4
# via open-webui
pathspec==0.12.1
# via black
peewee==3.17.5
# via open-webui
# via peewee-migrate
peewee-migrate==1.12.2
# via open-webui
pillow==10.3.0
# via fpdf2
# via rapidocr-onnxruntime
# via sentence-transformers
platformdirs==4.2.1
# via black
posthog==3.5.0
# via chromadb
proto-plus==1.23.0
# via google-ai-generativelanguage
# via google-api-core
protobuf==4.25.3
# via google-ai-generativelanguage
# via google-api-core
# via google-generativeai
# via googleapis-common-protos
# via grpcio-status
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
psycopg2-binary==2.9.9
# via open-webui
pyasn1==0.6.0
# via pyasn1-modules
# via python-jose
# via rsa
pyasn1-modules==0.4.0
# via google-auth
pyclipper==1.3.0.post5
# via rapidocr-onnxruntime
pycparser==2.22
# via cffi
pydantic==2.7.1
# via chromadb
# via fastapi
# via fastapi-sso
# via google-generativeai
# via langchain
# via langchain-core
# via langfuse
# via langsmith
# via open-webui
# via openai
pydantic-core==2.18.2
# via pydantic
pygments==2.18.0
# via rich
pyjwt==2.8.0
# via litellm
# via open-webui
pymysql==1.1.0
# via open-webui
pypandoc==1.13
# via open-webui
pyparsing==3.1.2
# via httplib2
pypdf==4.2.0
# via open-webui
# via unstructured-client
pypika==0.48.9
# via chromadb
pyproject-hooks==1.1.0
# via build
python-dateutil==2.9.0.post0
# via botocore
# via kubernetes
# via pandas
# via posthog
# via unstructured-client
python-dotenv==1.0.1
# via litellm
# via uvicorn
python-engineio==4.9.0
# via python-socketio
python-iso639==2024.4.27
# via unstructured
python-jose==3.3.0
# via open-webui
python-magic==0.4.27
# via unstructured
python-multipart==0.0.9
# via fastapi
# via litellm
# via open-webui
python-socketio==5.11.2
# via open-webui
pytube==15.0.0
# via open-webui
pytz==2024.1
# via apscheduler
# via pandas
pyxlsb==1.0.10
# via open-webui
pyyaml==6.0.1
# via chromadb
# via ctranslate2
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langchain-core
# via litellm
# via rapidocr-onnxruntime
# via transformers
# via uvicorn
rank-bm25==0.2.2
# via open-webui
rapidfuzz==3.9.0
# via unstructured
rapidocr-onnxruntime==1.3.22
# via open-webui
redis==5.0.4
# via rq
regex==2024.5.10
# via nltk
# via tiktoken
# via transformers
requests==2.32.2
# via chromadb
# via google-api-core
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langsmith
# via litellm
# via open-webui
# via posthog
# via requests-oauthlib
# via tiktoken
# via transformers
# via unstructured
# via unstructured-client
# via youtube-transcript-api
requests-oauthlib==2.0.0
# via kubernetes
rich==13.7.1
# via typer
rq==1.16.2
# via litellm
rsa==4.9
# via google-auth
# via python-jose
s3transfer==0.10.1
# via boto3
safetensors==0.4.3
# via transformers
scikit-learn==1.4.2
# via sentence-transformers
scipy==1.13.0
# via scikit-learn
# via sentence-transformers
sentence-transformers==2.7.0
# via open-webui
setuptools==69.5.1
# via ctranslate2
# via opentelemetry-instrumentation
shapely==2.0.4
# via rapidocr-onnxruntime
shellingham==1.5.4
# via typer
simple-websocket==1.0.0
# via python-engineio
six==1.16.0
# via apscheduler
# via ecdsa
# via kubernetes
# via langdetect
# via posthog
# via python-dateutil
# via rapidocr-onnxruntime
# via unstructured-client
sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.5
# via beautifulsoup4
sqlalchemy==2.0.30
# via langchain
# via langchain-community
starlette==0.37.2
# via fastapi
sympy==1.12
# via onnxruntime
# via torch
tabulate==0.9.0
# via unstructured
tenacity==8.3.0
# via chromadb
# via langchain
# via langchain-community
# via langchain-core
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.6.0
# via litellm
tokenizers==0.15.2
# via chromadb
# via faster-whisper
# via litellm
# via transformers
torch==2.3.0
# via sentence-transformers
tqdm==4.66.4
# via chromadb
# via google-generativeai
# via huggingface-hub
# via nltk
# via openai
# via sentence-transformers
# via transformers
transformers==4.39.3
# via sentence-transformers
typer==0.12.3
# via chromadb
# via fastapi-cli
typing-extensions==4.11.0
# via chromadb
# via fastapi
# via google-generativeai
# via huggingface-hub
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via sqlalchemy
# via torch
# via typer
# via typing-inspect
# via unstructured
# via unstructured-client
typing-inspect==0.9.0
# via dataclasses-json
# via unstructured-client
tzdata==2024.1
# via pandas
tzlocal==5.2
# via apscheduler
ujson==5.10.0
# via fastapi
unstructured==0.14.0
# via open-webui
unstructured-client==0.22.0
# via unstructured
uritemplate==4.1.1
# via google-api-python-client
urllib3==2.2.1
# via botocore
# via kubernetes
# via requests
# via unstructured-client
uvicorn==0.22.0
# via chromadb
# via fastapi
# via litellm
# via open-webui
uvloop==0.19.0
# via uvicorn
validators==0.28.1
# via open-webui
watchfiles==0.21.0
# via uvicorn
websocket-client==1.8.0
# via kubernetes
websockets==12.0
# via uvicorn
werkzeug==3.0.3
# via flask
wrapt==1.16.0
# via deprecated
# via langfuse
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via open-webui
yarl==1.9.4
# via aiohttp
youtube-transcript-api==0.6.2
# via open-webui
zipp==3.18.1
# via importlib-metadata

688
requirements.lock Normal file
View File

@ -0,0 +1,688 @@
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
-e file:.
aiohttp==3.9.5
# via langchain
# via langchain-community
# via litellm
# via open-webui
aiosignal==1.3.1
# via aiohttp
annotated-types==0.6.0
# via pydantic
anyio==4.3.0
# via httpx
# via openai
# via starlette
# via watchfiles
apscheduler==3.10.4
# via litellm
# via open-webui
argon2-cffi==23.1.0
# via open-webui
argon2-cffi-bindings==21.2.0
# via argon2-cffi
asgiref==3.8.1
# via opentelemetry-instrumentation-asgi
attrs==23.2.0
# via aiohttp
av==11.0.0
# via faster-whisper
backoff==2.2.1
# via langfuse
# via litellm
# via posthog
# via unstructured
bcrypt==4.1.3
# via chromadb
# via open-webui
# via passlib
beautifulsoup4==4.12.3
# via unstructured
bidict==0.23.1
# via python-socketio
black==24.4.2
# via open-webui
blinker==1.8.2
# via flask
boto3==1.34.110
# via open-webui
botocore==1.34.110
# via boto3
# via s3transfer
build==1.2.1
# via chromadb
cachetools==5.3.3
# via google-auth
certifi==2024.2.2
# via httpcore
# via httpx
# via kubernetes
# via requests
# via unstructured-client
cffi==1.16.0
# via argon2-cffi-bindings
# via cryptography
chardet==5.2.0
# via unstructured
charset-normalizer==3.3.2
# via requests
# via unstructured-client
chroma-hnswlib==0.7.3
# via chromadb
chromadb==0.5.0
# via langchain-chroma
# via open-webui
click==8.1.7
# via black
# via flask
# via litellm
# via nltk
# via peewee-migrate
# via rq
# via typer
# via uvicorn
coloredlogs==15.0.1
# via onnxruntime
cryptography==42.0.7
# via litellm
# via pyjwt
ctranslate2==4.2.1
# via faster-whisper
dataclasses-json==0.6.6
# via langchain
# via langchain-community
# via unstructured
# via unstructured-client
deepdiff==7.0.1
# via unstructured-client
defusedxml==0.7.1
# via fpdf2
deprecated==1.2.14
# via opentelemetry-api
# via opentelemetry-exporter-otlp-proto-grpc
distro==1.9.0
# via openai
dnspython==2.6.1
# via email-validator
docx2txt==0.8
# via open-webui
ecdsa==0.19.0
# via python-jose
email-validator==2.1.1
# via fastapi
# via pydantic
emoji==2.11.1
# via unstructured
et-xmlfile==1.1.0
# via openpyxl
fake-useragent==1.5.1
# via open-webui
fastapi==0.111.0
# via chromadb
# via fastapi-sso
# via langchain-chroma
# via litellm
# via open-webui
fastapi-cli==0.0.4
# via fastapi
fastapi-sso==0.10.0
# via litellm
faster-whisper==1.0.2
# via open-webui
filelock==3.14.0
# via huggingface-hub
# via torch
# via transformers
filetype==1.2.0
# via unstructured
flask==3.0.3
# via flask-cors
# via open-webui
flask-cors==4.0.1
# via open-webui
flatbuffers==24.3.25
# via onnxruntime
fonttools==4.51.0
# via fpdf2
fpdf2==2.7.9
# via open-webui
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.3.1
# via huggingface-hub
# via torch
google-ai-generativelanguage==0.6.4
# via google-generativeai
google-api-core==2.19.0
# via google-ai-generativelanguage
# via google-api-python-client
# via google-generativeai
google-api-python-client==2.129.0
# via google-generativeai
google-auth==2.29.0
# via google-ai-generativelanguage
# via google-api-core
# via google-api-python-client
# via google-auth-httplib2
# via google-generativeai
# via kubernetes
google-auth-httplib2==0.2.0
# via google-api-python-client
google-generativeai==0.5.4
# via open-webui
googleapis-common-protos==1.63.0
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio==1.63.0
# via chromadb
# via google-api-core
# via grpcio-status
# via opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.62.2
# via google-api-core
gunicorn==22.0.0
# via litellm
h11==0.14.0
# via httpcore
# via uvicorn
# via wsproto
httpcore==1.0.5
# via httpx
httplib2==0.22.0
# via google-api-python-client
# via google-auth-httplib2
httptools==0.6.1
# via uvicorn
httpx==0.27.0
# via fastapi
# via fastapi-sso
# via langfuse
# via openai
huggingface-hub==0.23.0
# via faster-whisper
# via sentence-transformers
# via tokenizers
# via transformers
humanfriendly==10.0
# via coloredlogs
idna==3.7
# via anyio
# via email-validator
# via httpx
# via langfuse
# via requests
# via unstructured-client
# via yarl
importlib-metadata==7.0.0
# via litellm
# via opentelemetry-api
importlib-resources==6.4.0
# via chromadb
itsdangerous==2.2.0
# via flask
jinja2==3.1.4
# via fastapi
# via flask
# via litellm
# via torch
jmespath==1.0.1
# via boto3
# via botocore
joblib==1.4.2
# via nltk
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpath-python==1.0.6
# via unstructured-client
jsonpointer==2.4
# via jsonpatch
kubernetes==29.0.0
# via chromadb
langchain==0.2.0
# via langchain-community
# via open-webui
langchain-chroma==0.1.1
# via open-webui
langchain-community==0.2.0
# via open-webui
langchain-core==0.2.1
# via langchain
# via langchain-chroma
# via langchain-community
# via langchain-text-splitters
langchain-text-splitters==0.2.0
# via langchain
langdetect==1.0.9
# via unstructured
langfuse==2.33.0
# via open-webui
langsmith==0.1.57
# via langchain
# via langchain-community
# via langchain-core
litellm==1.37.20
# via open-webui
lxml==5.2.2
# via unstructured
markdown==3.6
# via open-webui
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
# via jinja2
# via werkzeug
marshmallow==3.21.2
# via dataclasses-json
# via unstructured-client
mdurl==0.1.2
# via markdown-it-py
mmh3==4.1.0
# via chromadb
monotonic==1.6
# via posthog
mpmath==1.3.0
# via sympy
multidict==6.0.5
# via aiohttp
# via yarl
mypy-extensions==1.0.0
# via black
# via typing-inspect
# via unstructured-client
networkx==3.3
# via torch
nltk==3.8.1
# via unstructured
numpy==1.26.4
# via chroma-hnswlib
# via chromadb
# via ctranslate2
# via langchain
# via langchain-chroma
# via langchain-community
# via onnxruntime
# via opencv-python
# via opencv-python-headless
# via pandas
# via rank-bm25
# via rapidocr-onnxruntime
# via scikit-learn
# via scipy
# via sentence-transformers
# via shapely
# via transformers
# via unstructured
oauthlib==3.2.2
# via fastapi-sso
# via kubernetes
# via requests-oauthlib
onnxruntime==1.17.3
# via chromadb
# via faster-whisper
# via rapidocr-onnxruntime
openai==1.28.1
# via litellm
opencv-python==4.9.0.80
# via rapidocr-onnxruntime
opencv-python-headless==4.9.0.80
# via open-webui
openpyxl==3.1.2
# via open-webui
opentelemetry-api==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
# via opentelemetry-instrumentation
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-exporter-otlp-proto-common==1.24.0
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-exporter-otlp-proto-grpc==1.24.0
# via chromadb
opentelemetry-instrumentation==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-asgi==0.45b0
# via opentelemetry-instrumentation-fastapi
opentelemetry-instrumentation-fastapi==0.45b0
# via chromadb
opentelemetry-proto==1.24.0
# via opentelemetry-exporter-otlp-proto-common
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-sdk==1.24.0
# via chromadb
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-semantic-conventions==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
# via opentelemetry-sdk
opentelemetry-util-http==0.45b0
# via opentelemetry-instrumentation-asgi
# via opentelemetry-instrumentation-fastapi
ordered-set==4.1.0
# via deepdiff
orjson==3.10.3
# via chromadb
# via fastapi
# via langsmith
# via litellm
overrides==7.7.0
# via chromadb
packaging==23.2
# via black
# via build
# via gunicorn
# via huggingface-hub
# via langchain-core
# via langfuse
# via marshmallow
# via onnxruntime
# via transformers
# via unstructured-client
pandas==2.2.2
# via open-webui
passlib==1.7.4
# via open-webui
pathspec==0.12.1
# via black
peewee==3.17.5
# via open-webui
# via peewee-migrate
peewee-migrate==1.12.2
# via open-webui
pillow==10.3.0
# via fpdf2
# via rapidocr-onnxruntime
# via sentence-transformers
platformdirs==4.2.1
# via black
posthog==3.5.0
# via chromadb
proto-plus==1.23.0
# via google-ai-generativelanguage
# via google-api-core
protobuf==4.25.3
# via google-ai-generativelanguage
# via google-api-core
# via google-generativeai
# via googleapis-common-protos
# via grpcio-status
# via onnxruntime
# via opentelemetry-proto
# via proto-plus
psycopg2-binary==2.9.9
# via open-webui
pyasn1==0.6.0
# via pyasn1-modules
# via python-jose
# via rsa
pyasn1-modules==0.4.0
# via google-auth
pyclipper==1.3.0.post5
# via rapidocr-onnxruntime
pycparser==2.22
# via cffi
pydantic==2.7.1
# via chromadb
# via fastapi
# via fastapi-sso
# via google-generativeai
# via langchain
# via langchain-core
# via langfuse
# via langsmith
# via open-webui
# via openai
pydantic-core==2.18.2
# via pydantic
pygments==2.18.0
# via rich
pyjwt==2.8.0
# via litellm
# via open-webui
pymysql==1.1.0
# via open-webui
pypandoc==1.13
# via open-webui
pyparsing==3.1.2
# via httplib2
pypdf==4.2.0
# via open-webui
# via unstructured-client
pypika==0.48.9
# via chromadb
pyproject-hooks==1.1.0
# via build
python-dateutil==2.9.0.post0
# via botocore
# via kubernetes
# via pandas
# via posthog
# via unstructured-client
python-dotenv==1.0.1
# via litellm
# via uvicorn
python-engineio==4.9.0
# via python-socketio
python-iso639==2024.4.27
# via unstructured
python-jose==3.3.0
# via open-webui
python-magic==0.4.27
# via unstructured
python-multipart==0.0.9
# via fastapi
# via litellm
# via open-webui
python-socketio==5.11.2
# via open-webui
pytube==15.0.0
# via open-webui
pytz==2024.1
# via apscheduler
# via pandas
pyxlsb==1.0.10
# via open-webui
pyyaml==6.0.1
# via chromadb
# via ctranslate2
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langchain-core
# via litellm
# via rapidocr-onnxruntime
# via transformers
# via uvicorn
rank-bm25==0.2.2
# via open-webui
rapidfuzz==3.9.0
# via unstructured
rapidocr-onnxruntime==1.3.22
# via open-webui
redis==5.0.4
# via rq
regex==2024.5.10
# via nltk
# via tiktoken
# via transformers
requests==2.32.2
# via chromadb
# via google-api-core
# via huggingface-hub
# via kubernetes
# via langchain
# via langchain-community
# via langsmith
# via litellm
# via open-webui
# via posthog
# via requests-oauthlib
# via tiktoken
# via transformers
# via unstructured
# via unstructured-client
# via youtube-transcript-api
requests-oauthlib==2.0.0
# via kubernetes
rich==13.7.1
# via typer
rq==1.16.2
# via litellm
rsa==4.9
# via google-auth
# via python-jose
s3transfer==0.10.1
# via boto3
safetensors==0.4.3
# via transformers
scikit-learn==1.4.2
# via sentence-transformers
scipy==1.13.0
# via scikit-learn
# via sentence-transformers
sentence-transformers==2.7.0
# via open-webui
setuptools==69.5.1
# via ctranslate2
# via opentelemetry-instrumentation
shapely==2.0.4
# via rapidocr-onnxruntime
shellingham==1.5.4
# via typer
simple-websocket==1.0.0
# via python-engineio
six==1.16.0
# via apscheduler
# via ecdsa
# via kubernetes
# via langdetect
# via posthog
# via python-dateutil
# via rapidocr-onnxruntime
# via unstructured-client
sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.5
# via beautifulsoup4
sqlalchemy==2.0.30
# via langchain
# via langchain-community
starlette==0.37.2
# via fastapi
sympy==1.12
# via onnxruntime
# via torch
tabulate==0.9.0
# via unstructured
tenacity==8.3.0
# via chromadb
# via langchain
# via langchain-community
# via langchain-core
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.6.0
# via litellm
tokenizers==0.15.2
# via chromadb
# via faster-whisper
# via litellm
# via transformers
torch==2.3.0
# via sentence-transformers
tqdm==4.66.4
# via chromadb
# via google-generativeai
# via huggingface-hub
# via nltk
# via openai
# via sentence-transformers
# via transformers
transformers==4.39.3
# via sentence-transformers
typer==0.12.3
# via chromadb
# via fastapi-cli
typing-extensions==4.11.0
# via chromadb
# via fastapi
# via google-generativeai
# via huggingface-hub
# via openai
# via opentelemetry-sdk
# via pydantic
# via pydantic-core
# via sqlalchemy
# via torch
# via typer
# via typing-inspect
# via unstructured
# via unstructured-client
typing-inspect==0.9.0
# via dataclasses-json
# via unstructured-client
tzdata==2024.1
# via pandas
tzlocal==5.2
# via apscheduler
ujson==5.10.0
# via fastapi
unstructured==0.14.0
# via open-webui
unstructured-client==0.22.0
# via unstructured
uritemplate==4.1.1
# via google-api-python-client
urllib3==2.2.1
# via botocore
# via kubernetes
# via requests
# via unstructured-client
uvicorn==0.22.0
# via chromadb
# via fastapi
# via litellm
# via open-webui
uvloop==0.19.0
# via uvicorn
validators==0.28.1
# via open-webui
watchfiles==0.21.0
# via uvicorn
websocket-client==1.8.0
# via kubernetes
websockets==12.0
# via uvicorn
werkzeug==3.0.3
# via flask
wrapt==1.16.0
# via deprecated
# via langfuse
# via opentelemetry-instrumentation
# via unstructured
wsproto==1.2.0
# via simple-websocket
xlrd==2.0.1
# via open-webui
yarl==1.9.4
# via aiohttp
youtube-transcript-api==0.6.2
# via open-webui
zipp==3.18.1
# via importlib-metadata

View File

@ -654,3 +654,35 @@ export const deleteAllChats = async (token: string) => {
return res;
};
export const archiveAllChats = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/archive/all`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};

View File

@ -1,4 +1,5 @@
import { WEBUI_API_BASE_URL } from '$lib/constants';
import type { Banner } from '$lib/types';
export const setDefaultModels = async (token: string, models: string) => {
let error = null;
@ -59,3 +60,60 @@ export const setDefaultPromptSuggestions = async (token: string, promptSuggestio
return res;
};
export const getBanners = async (token: string): Promise<Banner[]> => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/configs/banners`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const setBanners = async (token: string, banners: Banner[]) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/configs/banners`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
banners: banners
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};

View File

@ -1,4 +1,53 @@
import { WEBUI_BASE_URL } from '$lib/constants';
import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
export const getModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
let models = res?.data ?? [];
models = models
.filter((models) => models)
.sort((a, b) => {
// Compare case-insensitively
const lowerA = a.name.toLowerCase();
const lowerB = b.name.toLowerCase();
if (lowerA < lowerB) return -1;
if (lowerA > lowerB) return 1;
// If same case-insensitively, sort by original strings,
// lowercase will come before uppercase due to ASCII values
if (a < b) return -1;
if (a > b) return 1;
return 0; // They are equal
});
console.log(models);
return models;
};
export const getBackendConfig = async () => {
let error = null;
@ -196,3 +245,131 @@ export const updateWebhookUrl = async (token: string, url: string) => {
return res.url;
};
export const getCommunitySharingEnabledStatus = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/community_sharing`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};
export const toggleCommunitySharingEnabledStatus = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/community_sharing/toggle`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const getModelConfig = async (token: string): Promise<GlobalModelConfig> => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res.models;
};
export interface ModelConfig {
id: string;
name: string;
meta: ModelMeta;
base_model_id?: string;
params: ModelParams;
}
export interface ModelMeta {
description?: string;
capabilities?: object;
}
export interface ModelParams {}
export type GlobalModelConfig = ModelConfig[];
export const updateModelConfig = async (token: string, config: GlobalModelConfig) => {
let error = null;
const res = await fetch(`${WEBUI_BASE_URL}/api/config/models`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
models: config
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};

View File

@ -1,150 +0,0 @@
import { LITELLM_API_BASE_URL } from '$lib/constants';
export const getLiteLLMModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/v1/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
? models
.map((model) => ({
id: model.id,
name: model.name ?? model.id,
external: true,
source: 'LiteLLM'
}))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})
: models;
};
export const getLiteLLMModelInfo = async (token: string = '') => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/info`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models;
};
type AddLiteLLMModelForm = {
name: string;
model: string;
api_base: string;
api_key: string;
rpm: string;
max_tokens: string;
};
export const addLiteLLMModel = async (token: string = '', payload: AddLiteLLMModelForm) => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/new`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
model_name: payload.name,
litellm_params: {
model: payload.model,
...(payload.api_base === '' ? {} : { api_base: payload.api_base }),
...(payload.api_key === '' ? {} : { api_key: payload.api_key }),
...(isNaN(parseInt(payload.rpm)) ? {} : { rpm: parseInt(payload.rpm) }),
...(payload.max_tokens === '' ? {} : { max_tokens: payload.max_tokens })
}
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
return res;
};
export const deleteLiteLLMModel = async (token: string = '', id: string) => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/delete`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
id: id
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
return res;
};

View File

@ -1,18 +1,16 @@
import { WEBUI_API_BASE_URL } from '$lib/constants';
export const createNewModelfile = async (token: string, modelfile: object) => {
export const addNewModel = async (token: string, model: object) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/create`, {
const res = await fetch(`${WEBUI_API_BASE_URL}/models/add`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
modelfile: modelfile
})
body: JSON.stringify(model)
})
.then(async (res) => {
if (!res.ok) throw await res.json();
@ -31,10 +29,10 @@ export const createNewModelfile = async (token: string, modelfile: object) => {
return res;
};
export const getModelfiles = async (token: string = '') => {
export const getModelInfos = async (token: string = '') => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/`, {
const res = await fetch(`${WEBUI_API_BASE_URL}/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
@ -59,62 +57,22 @@ export const getModelfiles = async (token: string = '') => {
throw error;
}
return res.map((modelfile) => modelfile.modelfile);
return res;
};
export const getModelfileByTagName = async (token: string, tagName: string) => {
export const getModelById = async (token: string, id: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/`, {
method: 'POST',
const searchParams = new URLSearchParams();
searchParams.append('id', id);
const res = await fetch(`${WEBUI_API_BASE_URL}/models?${searchParams.toString()}`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
tag_name: tagName
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res.modelfile;
};
export const updateModelfileByTagName = async (
token: string,
tagName: string,
modelfile: object
) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
tag_name: tagName,
modelfile: modelfile
})
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
@ -137,19 +95,55 @@ export const updateModelfileByTagName = async (
return res;
};
export const deleteModelfileByTagName = async (token: string, tagName: string) => {
export const updateModelById = async (token: string, id: string, model: object) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/modelfiles/delete`, {
const searchParams = new URLSearchParams();
searchParams.append('id', id);
const res = await fetch(`${WEBUI_API_BASE_URL}/models/update?${searchParams.toString()}`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify(model)
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const deleteModelById = async (token: string, id: string) => {
let error = null;
const searchParams = new URLSearchParams();
searchParams.append('id', id);
const res = await fetch(`${WEBUI_API_BASE_URL}/models/delete?${searchParams.toString()}`, {
method: 'DELETE',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
tag_name: tagName
})
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();

View File

@ -1,6 +1,73 @@
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { promptTemplate } from '$lib/utils';
export const getOllamaConfig = async (token: string = '') => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/config`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const updateOllamaConfig = async (token: string = '', enable_ollama_api: boolean) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/config/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
enable_ollama_api: enable_ollama_api
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res;
};
export const getOllamaUrls = async (token: string = '') => {
let error = null;
@ -97,7 +164,7 @@ export const getOllamaVersion = async (token: string = '') => {
throw error;
}
return res?.version ?? '';
return res?.version ?? false;
};
export const getOllamaModels = async (token: string = '') => {

View File

@ -230,7 +230,12 @@ export const getOpenAIModels = async (token: string = '') => {
return models
? models
.map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
.map((model) => ({
id: model.id,
name: model.name ?? model.id,
external: true,
custom_info: model.custom_info
}))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})

View File

@ -115,6 +115,62 @@ export const getUsers = async (token: string) => {
return res ? res : [];
};
export const getUserSettings = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/settings`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const updateUserSettings = async (token: string, settings: object) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/users/user/settings/update`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
...settings
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const getUserById = async (token: string, userId: string) => {
let error = null;

View File

@ -0,0 +1,137 @@
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { getContext, onMount } from 'svelte';
import { banners as _banners } from '$lib/stores';
import type { Banner } from '$lib/types';
import { getBanners, setBanners } from '$lib/apis/configs';
import type { Writable } from 'svelte/store';
import type { i18n as i18nType } from 'i18next';
import Tooltip from '$lib/components/common/Tooltip.svelte';
import Switch from '$lib/components/common/Switch.svelte';
const i18n: Writable<i18nType> = getContext('i18n');
export let saveHandler: Function;
let banners: Banner[] = [];
onMount(async () => {
banners = await getBanners(localStorage.token);
});
const updateBanners = async () => {
_banners.set(await setBanners(localStorage.token, banners));
};
</script>
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={async () => {
updateBanners();
saveHandler();
}}
>
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80 h-full">
<div class=" space-y-3 pr-1.5">
<div class="flex w-full justify-between mb-2">
<div class=" self-center text-sm font-semibold">
{$i18n.t('Banners')}
</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
if (banners.length === 0 || banners.at(-1).content !== '') {
banners = [
...banners,
{
id: uuidv4(),
type: '',
title: '',
content: '',
dismissible: true,
timestamp: Math.floor(Date.now() / 1000)
}
];
}
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M10.75 4.75a.75.75 0 00-1.5 0v4.5h-4.5a.75.75 0 000 1.5h4.5v4.5a.75.75 0 001.5 0v-4.5h4.5a.75.75 0 000-1.5h-4.5v-4.5z"
/>
</svg>
</button>
</div>
<div class="flex flex-col space-y-1">
{#each banners as banner, bannerIdx}
<div class=" flex justify-between">
<div class="flex flex-row flex-1 border rounded-xl dark:border-gray-800">
<select
class="w-fit capitalize rounded-xl py-2 px-4 text-xs bg-transparent outline-none"
bind:value={banner.type}
>
{#if banner.type == ''}
<option value="" selected disabled class="text-gray-900">{$i18n.t('Type')}</option
>
{/if}
<option value="info" class="text-gray-900">{$i18n.t('Info')}</option>
<option value="warning" class="text-gray-900">{$i18n.t('Warning')}</option>
<option value="error" class="text-gray-900">{$i18n.t('Error')}</option>
<option value="success" class="text-gray-900">{$i18n.t('Success')}</option>
</select>
<input
class="pr-5 py-1.5 text-xs w-full bg-transparent outline-none"
placeholder={$i18n.t('Content')}
bind:value={banner.content}
/>
<div class="relative top-1.5 -left-2">
<Tooltip content="Dismissible" className="flex h-fit items-center">
<Switch bind:state={banner.dismissible} />
</Tooltip>
</div>
</div>
<button
class="px-2"
type="button"
on:click={() => {
banners.splice(bannerIdx, 1);
banners = banners;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
{/each}
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
type="submit"
>
Save
</button>
</div>
</form>

View File

@ -1,13 +1,24 @@
<script lang="ts">
import fileSaver from 'file-saver';
const { saveAs } = fileSaver;
import { downloadDatabase } from '$lib/apis/utils';
import { onMount, getContext } from 'svelte';
import { config } from '$lib/stores';
import { config, user } from '$lib/stores';
import { toast } from 'svelte-sonner';
import { getAllUserChats } from '$lib/apis/chats';
const i18n = getContext('i18n');
export let saveHandler: Function;
const exportAllUserChats = async () => {
let blob = new Blob([JSON.stringify(await getAllUserChats(localStorage.token))], {
type: 'application/json'
});
saveAs(blob, `all-chats-export-${Date.now()}.json`);
};
onMount(async () => {
// permissions = await getUserPermissions(localStorage.token);
});
@ -23,10 +34,10 @@
<div>
<div class=" mb-2 text-sm font-medium">{$i18n.t('Database')}</div>
<div class=" flex w-full justify-between">
<!-- <div class=" self-center text-xs font-medium">{$i18n.t('Allow Chat Deletion')}</div> -->
{#if $config?.features.enable_admin_export ?? true}
<div class=" flex w-full justify-between">
<!-- <div class=" self-center text-xs font-medium">{$i18n.t('Allow Chat Deletion')}</div> -->
{#if $config?.admin_export_enabled ?? true}
<button
class=" flex rounded-md py-1.5 px-3 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
type="button"
@ -55,8 +66,36 @@
</div>
<div class=" self-center text-sm font-medium">{$i18n.t('Download Database')}</div>
</button>
{/if}
</div>
</div>
<hr class=" dark:border-gray-700 my-1" />
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={() => {
exportAllUserChats();
}}
>
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
<path
fill-rule="evenodd"
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM8.75 7.75a.75.75 0 0 0-1.5 0v2.69L6.03 9.22a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l2.5-2.5a.75.75 0 1 0-1.06-1.06l-1.22 1.22V7.75Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center text-sm font-medium">
{$i18n.t('Export All Chats (All Users)')}
</div>
</button>
{/if}
</div>
</div>

View File

@ -1,5 +1,10 @@
<script lang="ts">
import { getWebhookUrl, updateWebhookUrl } from '$lib/apis';
import {
getCommunitySharingEnabledStatus,
getWebhookUrl,
toggleCommunitySharingEnabledStatus,
updateWebhookUrl
} from '$lib/apis';
import {
getDefaultUserRole,
getJWTExpiresDuration,
@ -18,6 +23,7 @@
let JWTExpiresIn = '';
let webhookUrl = '';
let communitySharingEnabled = true;
const toggleSignUpEnabled = async () => {
signUpEnabled = await toggleSignUpEnabledStatus(localStorage.token);
@ -35,11 +41,28 @@
webhookUrl = await updateWebhookUrl(localStorage.token, webhookUrl);
};
const toggleCommunitySharingEnabled = async () => {
communitySharingEnabled = await toggleCommunitySharingEnabledStatus(localStorage.token);
};
onMount(async () => {
signUpEnabled = await getSignUpEnabledStatus(localStorage.token);
defaultUserRole = await getDefaultUserRole(localStorage.token);
JWTExpiresIn = await getJWTExpiresDuration(localStorage.token);
webhookUrl = await getWebhookUrl(localStorage.token);
await Promise.all([
(async () => {
signUpEnabled = await getSignUpEnabledStatus(localStorage.token);
})(),
(async () => {
defaultUserRole = await getDefaultUserRole(localStorage.token);
})(),
(async () => {
JWTExpiresIn = await getJWTExpiresDuration(localStorage.token);
})(),
(async () => {
webhookUrl = await getWebhookUrl(localStorage.token);
})(),
(async () => {
communitySharingEnabled = await getCommunitySharingEnabledStatus(localStorage.token);
})()
]);
});
</script>
@ -114,6 +137,47 @@
</div>
</div>
<div class=" flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Enable Community Sharing')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleCommunitySharingEnabled();
}}
type="button"
>
{#if communitySharingEnabled}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M11.5 1A3.5 3.5 0 0 0 8 4.5V7H2.5A1.5 1.5 0 0 0 1 8.5v5A1.5 1.5 0 0 0 2.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 9.5 7V4.5a2 2 0 1 1 4 0v1.75a.75.75 0 0 0 1.5 0V4.5A3.5 3.5 0 0 0 11.5 1Z"
/>
</svg>
<span class="ml-2 self-center">{$i18n.t('Enabled')}</span>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M8 1a3.5 3.5 0 0 0-3.5 3.5V7A1.5 1.5 0 0 0 3 8.5v5A1.5 1.5 0 0 0 4.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 11.5 7V4.5A3.5 3.5 0 0 0 8 1Zm2 6V4.5a2 2 0 1 0-4 0V7h4Z"
clip-rule="evenodd"
/>
</svg>
<span class="ml-2 self-center">{$i18n.t('Disabled')}</span>
{/if}
</button>
</div>
<hr class=" dark:border-gray-700 my-3" />
<div class=" w-full justify-between">

View File

@ -1,15 +1,19 @@
<script lang="ts">
import { getModelFilterConfig, updateModelFilterConfig } from '$lib/apis';
import { getBackendConfig, getModelFilterConfig, updateModelFilterConfig } from '$lib/apis';
import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths';
import { getUserPermissions, updateUserPermissions } from '$lib/apis/users';
import { onMount, getContext } from 'svelte';
import { models } from '$lib/stores';
import { models, config } from '$lib/stores';
import Switch from '$lib/components/common/Switch.svelte';
import { setDefaultModels } from '$lib/apis/configs';
const i18n = getContext('i18n');
export let saveHandler: Function;
let defaultModelId = '';
let whitelistEnabled = false;
let whitelistModels = [''];
let permissions = {
@ -24,9 +28,10 @@
const res = await getModelFilterConfig(localStorage.token);
if (res) {
whitelistEnabled = res.enabled;
whitelistModels = res.models.length > 0 ? res.models : [''];
}
defaultModelId = $config.default_models ? $config?.default_models.split(',')[0] : '';
});
</script>
@ -34,10 +39,13 @@
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={async () => {
// console.log('submit');
await updateUserPermissions(localStorage.token, permissions);
await setDefaultModels(localStorage.token, defaultModelId);
await updateUserPermissions(localStorage.token, permissions);
await updateModelFilterConfig(localStorage.token, whitelistEnabled, whitelistModels);
saveHandler();
await config.set(await getBackendConfig());
}}
>
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80">
@ -88,26 +96,40 @@
<hr class=" dark:border-gray-700 my-2" />
<div class="mt-2 space-y-3 pr-1.5">
<div class="mt-2 space-y-3">
<div>
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('Manage Models')}</div>
</div>
</div>
<div class=" space-y-1 mb-3">
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-xs font-medium">{$i18n.t('Default Model')}</div>
</div>
</div>
<div class=" space-y-3">
<div>
<div class="flex-1 mr-2">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={defaultModelId}
placeholder="Select a model"
>
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{#each $models.filter((model) => model.id) as model}
<option value={model.id} class="bg-gray-100 dark:bg-gray-700">{model.name}</option>
{/each}
</select>
</div>
</div>
<div class=" space-y-1">
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-xs font-medium">{$i18n.t('Model Whitelisting')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
whitelistEnabled = !whitelistEnabled;
}}>{whitelistEnabled ? $i18n.t('On') : $i18n.t('Off')}</button
>
<Switch bind:state={whitelistEnabled} />
</div>
</div>

View File

@ -6,6 +6,9 @@
import General from './Settings/General.svelte';
import Users from './Settings/Users.svelte';
import Banners from '$lib/components/admin/Settings/Banners.svelte';
import { toast } from 'svelte-sonner';
const i18n = getContext('i18n');
export let show = false;
@ -117,24 +120,63 @@
</div>
<div class=" self-center">{$i18n.t('Database')}</div>
</button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'banners'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'banners';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
class="size-4"
>
<path
d="M5.85 3.5a.75.75 0 0 0-1.117-1 9.719 9.719 0 0 0-2.348 4.876.75.75 0 0 0 1.479.248A8.219 8.219 0 0 1 5.85 3.5ZM19.267 2.5a.75.75 0 1 0-1.118 1 8.22 8.22 0 0 1 1.987 4.124.75.75 0 0 0 1.48-.248A9.72 9.72 0 0 0 19.266 2.5Z"
/>
<path
fill-rule="evenodd"
d="M12 2.25A6.75 6.75 0 0 0 5.25 9v.75a8.217 8.217 0 0 1-2.119 5.52.75.75 0 0 0 .298 1.206c1.544.57 3.16.99 4.831 1.243a3.75 3.75 0 1 0 7.48 0 24.583 24.583 0 0 0 4.83-1.244.75.75 0 0 0 .298-1.205 8.217 8.217 0 0 1-2.118-5.52V9A6.75 6.75 0 0 0 12 2.25ZM9.75 18c0-.034 0-.067.002-.1a25.05 25.05 0 0 0 4.496 0l.002.1a2.25 2.25 0 1 1-4.5 0Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center">{$i18n.t('Banners')}</div>
</button>
</div>
<div class="flex-1 md:min-h-[380px]">
{#if selectedTab === 'general'}
<General
saveHandler={() => {
show = false;
toast.success($i18n.t('Settings saved successfully!'));
}}
/>
{:else if selectedTab === 'users'}
<Users
saveHandler={() => {
show = false;
toast.success($i18n.t('Settings saved successfully!'));
}}
/>
{:else if selectedTab === 'db'}
<Database
saveHandler={() => {
show = false;
toast.success($i18n.t('Settings saved successfully!'));
}}
/>
{:else if selectedTab === 'banners'}
<Banners
saveHandler={() => {
show = false;
toast.success($i18n.t('Settings saved successfully!'));
}}
/>
{/if}

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
<script lang="ts">
import { toast } from 'svelte-sonner';
import { onMount, tick, getContext } from 'svelte';
import { mobile, modelfiles, settings, showSidebar } from '$lib/stores';
import { type Model, mobile, settings, showSidebar, models } from '$lib/stores';
import { blobToFile, calculateSHA256, findWordIndices } from '$lib/utils';
import {
@ -27,7 +27,9 @@
export let stopResponse: Function;
export let autoScroll = true;
export let selectedModel = '';
export let atSelectedModel: Model | undefined;
export let selectedModels: [''];
let chatTextAreaElement: HTMLTextAreaElement;
let filesInputElement;
@ -52,6 +54,11 @@
let speechRecognition;
let visionCapableModels = [];
$: visionCapableModels = [...(atSelectedModel ? [atSelectedModel] : selectedModels)].filter(
(model) => $models.find((m) => m.id === model)?.info?.meta?.capabilities?.vision ?? true
);
$: if (prompt) {
if (chatTextAreaElement) {
chatTextAreaElement.style.height = '';
@ -358,6 +365,10 @@
inputFiles.forEach((file) => {
console.log(file, file.name.split('.').at(-1));
if (['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])) {
if (visionCapableModels.length === 0) {
toast.error($i18n.t('Selected model(s) do not support image inputs'));
return;
}
let reader = new FileReader();
reader.onload = (event) => {
files = [
@ -429,8 +440,8 @@
<div class="fixed bottom-0 {$showSidebar ? 'left-0 md:left-[260px]' : 'left-0'} right-0">
<div class="w-full">
<div class="px-2.5 md:px-16 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
<div class="flex flex-col max-w-5xl w-full">
<div class=" -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
<div class="flex flex-col max-w-6xl px-2.5 md:px-6 w-full">
<div class="relative">
{#if autoScroll === false && messages.length > 0}
<div class=" absolute -top-12 left-0 right-0 flex justify-center z-30">
@ -494,12 +505,12 @@
bind:chatInputPlaceholder
{messages}
on:select={(e) => {
selectedModel = e.detail;
atSelectedModel = e.detail;
chatTextAreaElement?.focus();
}}
/>
{#if selectedModel !== ''}
{#if atSelectedModel !== undefined}
<div
class="px-3 py-2.5 text-left w-full flex justify-between items-center absolute bottom-0 left-0 right-0 bg-gradient-to-t from-50% from-white dark:from-gray-900"
>
@ -508,21 +519,21 @@
crossorigin="anonymous"
alt="model profile"
class="size-5 max-w-[28px] object-cover rounded-full"
src={$modelfiles.find((modelfile) => modelfile.tagName === selectedModel.id)
?.imageUrl ??
src={$models.find((model) => model.id === atSelectedModel.id)?.info?.meta
?.profile_image_url ??
($i18n.language === 'dg-DG'
? `/doge.png`
: `${WEBUI_BASE_URL}/static/favicon.png`)}
/>
<div>
Talking to <span class=" font-medium">{selectedModel.name} </span>
Talking to <span class=" font-medium">{atSelectedModel.name}</span>
</div>
</div>
<div>
<button
class="flex items-center"
on:click={() => {
selectedModel = '';
atSelectedModel = undefined;
}}
>
<XMark />
@ -535,7 +546,7 @@
</div>
<div class="bg-white dark:bg-gray-900">
<div class="max-w-6xl px-2.5 md:px-16 mx-auto inset-x-0">
<div class="max-w-6xl px-2.5 md:px-6 mx-auto inset-x-0">
<div class=" pb-2">
<input
bind:this={filesInputElement}
@ -550,6 +561,12 @@
if (
['image/gif', 'image/webp', 'image/jpeg', 'image/png'].includes(file['type'])
) {
if (visionCapableModels.length === 0) {
toast.error($i18n.t('Selected model(s) do not support image inputs'));
inputFiles = null;
filesInputElement.value = '';
return;
}
let reader = new FileReader();
reader.onload = (event) => {
files = [
@ -589,6 +606,7 @@
dir={$settings?.chatDirection ?? 'LTR'}
class=" flex flex-col relative w-full rounded-3xl px-1.5 bg-gray-50 dark:bg-gray-850 dark:text-gray-100"
on:submit|preventDefault={() => {
// check if selectedModels support image input
submitPrompt(prompt, user);
}}
>
@ -597,7 +615,36 @@
{#each files as file, fileIdx}
<div class=" relative group">
{#if file.type === 'image'}
<img src={file.url} alt="input" class=" h-16 w-16 rounded-xl object-cover" />
<div class="relative">
<img
src={file.url}
alt="input"
class=" h-16 w-16 rounded-xl object-cover"
/>
{#if atSelectedModel ? visionCapableModels.length === 0 : selectedModels.length !== visionCapableModels.length}
<Tooltip
className=" absolute top-1 left-1"
content={$i18n.t('{{ models }}', {
models: [...(atSelectedModel ? [atSelectedModel] : selectedModels)]
.filter((id) => !visionCapableModels.includes(id))
.join(', ')
})}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
class="size-4 fill-yellow-300"
>
<path
fill-rule="evenodd"
d="M9.401 3.003c1.155-2 4.043-2 5.197 0l7.355 12.748c1.154 2-.29 4.5-2.599 4.5H4.645c-2.309 0-3.752-2.5-2.598-4.5L9.4 3.003ZM12 8.25a.75.75 0 0 1 .75.75v3.75a.75.75 0 0 1-1.5 0V9a.75.75 0 0 1 .75-.75Zm0 8.25a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5Z"
clip-rule="evenodd"
/>
</svg>
</Tooltip>
{/if}
</div>
{:else if file.type === 'doc'}
<div
class="h-16 w-[15rem] flex items-center space-x-3 px-2.5 dark:bg-gray-600 rounded-xl border border-gray-200 dark:border-none"
@ -883,7 +930,7 @@
if (e.key === 'Escape') {
console.log('Escape');
selectedModel = '';
atSelectedModel = undefined;
}
}}
rows="1"

View File

@ -1,7 +1,7 @@
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { chats, config, modelfiles, settings, user as _user, mobile } from '$lib/stores';
import { chats, config, settings, user as _user, mobile } from '$lib/stores';
import { tick, getContext } from 'svelte';
import { toast } from 'svelte-sonner';
@ -26,7 +26,6 @@
export let user = $_user;
export let prompt;
export let suggestionPrompts = [];
export let processing = '';
export let bottomPadding = false;
export let autoScroll;
@ -34,7 +33,6 @@
export let messages = [];
export let selectedModels;
export let selectedModelfiles = [];
$: if (autoScroll && bottomPadding) {
(async () => {
@ -247,9 +245,7 @@
<div class="h-full flex mb-16">
{#if messages.length == 0}
<Placeholder
models={selectedModels}
modelfiles={selectedModelfiles}
{suggestionPrompts}
modelIds={selectedModels}
submitPrompt={async (p) => {
let text = p;
@ -316,7 +312,6 @@
{#key message.id}
<ResponseMessage
{message}
modelfiles={selectedModelfiles}
siblings={history.messages[message.parentId]?.childrenIds ?? []}
isLastMessage={messageIdx + 1 === messages.length}
{readOnly}
@ -348,7 +343,6 @@
{chatId}
parentMessage={history.messages[message.parentId]}
{messageIdx}
{selectedModelfiles}
{updateChatMessages}
{confirmEditResponseMessage}
{rateMessage}

View File

@ -4,7 +4,7 @@
import hljs from 'highlight.js';
import 'highlight.js/styles/github-dark.min.css';
import { loadPyodide } from 'pyodide';
import { tick } from 'svelte';
import { onMount, tick } from 'svelte';
import PyodideWorker from '$lib/workers/pyodide.worker?worker';
export let id = '';
@ -12,6 +12,7 @@
export let lang = '';
export let code = '';
let highlightedCode = null;
let executing = false;
let stdout = null;
@ -202,60 +203,60 @@ __builtins__.input = input`);
};
};
$: highlightedCode = code ? hljs.highlightAuto(code, hljs.getLanguage(lang)?.aliases).value : '';
$: if (code) {
highlightedCode = hljs.highlightAuto(code, hljs.getLanguage(lang)?.aliases).value || code;
}
</script>
{#if code}
<div class="mb-4" dir="ltr">
<div
class="flex justify-between bg-[#202123] text-white text-xs px-4 pt-1 pb-0.5 rounded-t-lg overflow-x-auto"
>
<div class="p-1">{@html lang}</div>
<div class="mb-4" dir="ltr">
<div
class="flex justify-between bg-[#202123] text-white text-xs px-4 pt-1 pb-0.5 rounded-t-lg overflow-x-auto"
>
<div class="p-1">{@html lang}</div>
<div class="flex items-center">
{#if lang.toLowerCase() === 'python' || lang.toLowerCase() === 'py' || (lang === '' && checkPythonCode(code))}
{#if executing}
<div class="copy-code-button bg-none border-none p-1 cursor-not-allowed">Running</div>
{:else}
<button
class="copy-code-button bg-none border-none p-1"
on:click={() => {
executePython(code);
}}>Run</button
>
{/if}
<div class="flex items-center">
{#if lang.toLowerCase() === 'python' || lang.toLowerCase() === 'py' || (lang === '' && checkPythonCode(code))}
{#if executing}
<div class="copy-code-button bg-none border-none p-1 cursor-not-allowed">Running</div>
{:else}
<button
class="copy-code-button bg-none border-none p-1"
on:click={() => {
executePython(code);
}}>Run</button
>
{/if}
<button class="copy-code-button bg-none border-none p-1" on:click={copyCode}
>{copied ? 'Copied' : 'Copy Code'}</button
>
</div>
{/if}
<button class="copy-code-button bg-none border-none p-1" on:click={copyCode}
>{copied ? 'Copied' : 'Copy Code'}</button
>
</div>
<pre
class=" hljs p-4 px-5 overflow-x-auto"
style="border-top-left-radius: 0px; border-top-right-radius: 0px; {(executing ||
stdout ||
stderr ||
result) &&
'border-bottom-left-radius: 0px; border-bottom-right-radius: 0px;'}"><code
class="language-{lang} rounded-t-none whitespace-pre">{@html highlightedCode || code}</code
></pre>
<div
id="plt-canvas-{id}"
class="bg-[#202123] text-white max-w-full overflow-x-auto scrollbar-hidden"
/>
{#if executing}
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
<div class=" text-gray-500 text-xs mb-1">STDOUT/STDERR</div>
<div class="text-sm">Running...</div>
</div>
{:else if stdout || stderr || result}
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
<div class=" text-gray-500 text-xs mb-1">STDOUT/STDERR</div>
<div class="text-sm">{stdout || stderr || result}</div>
</div>
{/if}
</div>
{/if}
<pre
class=" hljs p-4 px-5 overflow-x-auto"
style="border-top-left-radius: 0px; border-top-right-radius: 0px; {(executing ||
stdout ||
stderr ||
result) &&
'border-bottom-left-radius: 0px; border-bottom-right-radius: 0px;'}"><code
class="language-{lang} rounded-t-none whitespace-pre">{@html highlightedCode || code}</code
></pre>
<div
id="plt-canvas-{id}"
class="bg-[#202123] text-white max-w-full overflow-x-auto scrollbar-hidden"
/>
{#if executing}
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
<div class=" text-gray-500 text-xs mb-1">STDOUT/STDERR</div>
<div class="text-sm">Running...</div>
</div>
{:else if stdout || stderr || result}
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
<div class=" text-gray-500 text-xs mb-1">STDOUT/STDERR</div>
<div class="text-sm">{stdout || stderr || result}</div>
</div>
{/if}
</div>

View File

@ -13,8 +13,6 @@
export let parentMessage;
export let selectedModelfiles;
export let updateChatMessages: Function;
export let confirmEditResponseMessage: Function;
export let rateMessage: Function;
@ -130,7 +128,6 @@
>
<ResponseMessage
message={groupedMessages[model].messages[groupedMessagesIdx[model]]}
modelfiles={selectedModelfiles}
siblings={groupedMessages[model].messages.map((m) => m.id)}
isLastMessage={true}
{updateChatMessages}

View File

@ -1,6 +1,6 @@
<script lang="ts">
import { WEBUI_BASE_URL } from '$lib/constants';
import { user } from '$lib/stores';
import { config, user, models as _models } from '$lib/stores';
import { onMount, getContext } from 'svelte';
import { blur, fade } from 'svelte/transition';
@ -9,23 +9,20 @@
const i18n = getContext('i18n');
export let modelIds = [];
export let models = [];
export let modelfiles = [];
export let submitPrompt;
export let suggestionPrompts;
let mounted = false;
let modelfile = null;
let selectedModelIdx = 0;
$: modelfile =
models[selectedModelIdx] in modelfiles ? modelfiles[models[selectedModelIdx]] : null;
$: if (models.length > 0) {
$: if (modelIds.length > 0) {
selectedModelIdx = models.length - 1;
}
$: models = modelIds.map((id) => $_models.find((m) => m.id === id));
onMount(() => {
mounted = true;
});
@ -41,25 +38,14 @@
selectedModelIdx = modelIdx;
}}
>
{#if model in modelfiles}
<img
crossorigin="anonymous"
src={modelfiles[model]?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`}
alt="modelfile"
class=" size-[2.7rem] rounded-full border-[1px] border-gray-200 dark:border-none"
draggable="false"
/>
{:else}
<img
crossorigin="anonymous"
src={$i18n.language === 'dg-DG'
? `/doge.png`
: `${WEBUI_BASE_URL}/static/favicon.png`}
class=" size-[2.7rem] rounded-full border-[1px] border-gray-200 dark:border-none"
alt="logo"
draggable="false"
/>
{/if}
<img
crossorigin="anonymous"
src={model?.info?.meta?.profile_image_url ??
($i18n.language === 'dg-DG' ? `/doge.png` : `${WEBUI_BASE_URL}/static/favicon.png`)}
class=" size-[2.7rem] rounded-full border-[1px] border-gray-200 dark:border-none"
alt="logo"
draggable="false"
/>
</button>
{/each}
</div>
@ -70,23 +56,32 @@
>
<div>
<div class=" capitalize line-clamp-1" in:fade={{ duration: 200 }}>
{#if modelfile}
{modelfile.title}
{#if models[selectedModelIdx]?.info}
{models[selectedModelIdx]?.info?.name}
{:else}
{$i18n.t('Hello, {{name}}', { name: $user.name })}
{/if}
</div>
<div in:fade={{ duration: 200, delay: 200 }}>
{#if modelfile}
<div class="mt-0.5 text-base font-normal text-gray-500 dark:text-gray-400">
{modelfile.desc}
{#if models[selectedModelIdx]?.info}
<div class="mt-0.5 text-base font-normal text-gray-500 dark:text-gray-400 line-clamp-3">
{models[selectedModelIdx]?.info?.meta?.description}
</div>
{#if modelfile.user}
{#if models[selectedModelIdx]?.info?.meta?.user}
<div class="mt-0.5 text-sm font-normal text-gray-400 dark:text-gray-500">
By <a href="https://openwebui.com/m/{modelfile.user.username}"
>{modelfile.user.name ? modelfile.user.name : `@${modelfile.user.username}`}</a
>
By
{#if models[selectedModelIdx]?.info?.meta?.user.community}
<a
href="https://openwebui.com/m/{models[selectedModelIdx]?.info?.meta?.user
.username}"
>{models[selectedModelIdx]?.info?.meta?.user.name
? models[selectedModelIdx]?.info?.meta?.user.name
: `@${models[selectedModelIdx]?.info?.meta?.user.username}`}</a
>
{:else}
{models[selectedModelIdx]?.info?.meta?.user.name}
{/if}
</div>
{/if}
{:else}
@ -99,7 +94,11 @@
</div>
<div class=" w-full" in:fade={{ duration: 200, delay: 300 }}>
<Suggestions {suggestionPrompts} {submitPrompt} />
<Suggestions
suggestionPrompts={models[selectedModelIdx]?.info?.meta?.suggestion_prompts ??
$config.default_prompt_suggestions}
{submitPrompt}
/>
</div>
</div>
{/key}

View File

@ -14,7 +14,7 @@
const dispatch = createEventDispatcher();
import { config, settings } from '$lib/stores';
import { config, models, settings } from '$lib/stores';
import { synthesizeOpenAISpeech } from '$lib/apis/audio';
import { imageGenerations } from '$lib/apis/images';
import {
@ -34,7 +34,6 @@
import RateComment from './RateComment.svelte';
import CitationsModal from '$lib/components/chat/Messages/CitationsModal.svelte';
export let modelfiles = [];
export let message;
export let siblings;
@ -52,6 +51,9 @@
export let continueGeneration: Function;
export let regenerateResponse: Function;
let model = null;
$: model = $models.find((m) => m.id === message.model);
let edit = false;
let editedContent = '';
let editTextAreaElement: HTMLTextAreaElement;
@ -78,6 +80,13 @@
return `<code>${code.replaceAll('&amp;', '&')}</code>`;
};
// Open all links in a new tab/window (from https://github.com/markedjs/marked/issues/655#issuecomment-383226346)
const origLinkRenderer = renderer.link;
renderer.link = (href, title, text) => {
const html = origLinkRenderer.call(renderer, href, title, text);
return html.replace(/^<a /, '<a target="_blank" rel="nofollow" ');
};
const { extensions, ...defaults } = marked.getDefaults() as marked.MarkedOptions & {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
extensions: any;
@ -338,17 +347,13 @@
dir={$settings.chatDirection}
>
<ProfileImage
src={modelfiles[message.model]?.imageUrl ??
src={model?.info?.meta?.profile_image_url ??
($i18n.language === 'dg-DG' ? `/doge.png` : `${WEBUI_BASE_URL}/static/favicon.png`)}
/>
<div class="w-full overflow-hidden pl-1">
<Name>
{#if message.model in modelfiles}
{modelfiles[message.model]?.title}
{:else}
{message.model ? ` ${message.model}` : ''}
{/if}
{model?.name ?? message.model}
{#if message.timestamp}
<span
@ -391,7 +396,7 @@
<div class=" mt-2 mb-1 flex justify-end space-x-1.5 text-sm font-medium">
<button
id="close-edit-message-button"
class=" px-4 py-2 bg-gray-900 hover:bg-gray-850 text-gray-100 transition rounded-3xl"
class="px-4 py-2 bg-white hover:bg-gray-100 text-gray-800 transition rounded-3xl"
on:click={() => {
cancelEditMessage();
}}
@ -401,7 +406,7 @@
<button
id="save-edit-message-button"
class="px-4 py-2 bg-white hover:bg-gray-100 text-gray-800 transition rounded-3xl"
class=" px-4 py-2 bg-gray-900 hover:bg-gray-850 text-gray-100 transition rounded-3xl"
on:click={() => {
editMessageConfirmHandler();
}}
@ -442,8 +447,8 @@
{#if token.type === 'code'}
<CodeBlock
id={`${message.id}-${tokenIdx}`}
lang={token.lang}
code={revertSanitizedResponseContent(token.text)}
lang={token?.lang ?? ''}
code={revertSanitizedResponseContent(token?.text ?? '')}
/>
{:else}
{@html marked.parse(token.raw, {
@ -688,7 +693,7 @@
</button>
</Tooltip>
{#if $config.images && !readOnly}
{#if $config?.features.enable_image_generation && !readOnly}
<Tooltip content="Generate Image" placement="bottom">
<button
class="{isLastMessage

View File

@ -4,7 +4,7 @@
import { tick, createEventDispatcher, getContext } from 'svelte';
import Name from './Name.svelte';
import ProfileImage from './ProfileImage.svelte';
import { modelfiles, settings } from '$lib/stores';
import { models, settings } from '$lib/stores';
import Tooltip from '$lib/components/common/Tooltip.svelte';
import { user as _user } from '$lib/stores';
@ -60,8 +60,7 @@
{#if !($settings?.chatBubble ?? true)}
<ProfileImage
src={message.user
? $modelfiles.find((modelfile) => modelfile.tagName === message.user)?.imageUrl ??
'/user.png'
? $models.find((m) => m.id === message.user)?.info?.meta?.profile_image_url ?? '/user.png'
: user?.profile_image_url ?? '/user.png'}
/>
{/if}
@ -70,12 +69,8 @@
<div>
<Name>
{#if message.user}
{#if $modelfiles.map((modelfile) => modelfile.tagName).includes(message.user)}
{$modelfiles.find((modelfile) => modelfile.tagName === message.user)?.title}
{:else}
{$i18n.t('You')}
<span class=" text-gray-500 text-sm font-medium">{message?.user ?? ''}</span>
{/if}
{$i18n.t('You')}
<span class=" text-gray-500 text-sm font-medium">{message?.user ?? ''}</span>
{:else if $settings.showUsername || $_user.name !== user.name}
{user.name}
{:else}
@ -201,7 +196,7 @@
<div class=" mt-2 mb-1 flex justify-end space-x-1.5 text-sm font-medium">
<button
id="close-edit-message-button"
class=" px-4 py-2 bg-gray-900 hover:bg-gray-850 text-gray-100 transition rounded-3xl"
class="px-4 py-2 bg-white hover:bg-gray-100 text-gray-800 transition rounded-3xl"
on:click={() => {
cancelEditMessage();
}}
@ -211,7 +206,7 @@
<button
id="save-edit-message-button"
class="px-4 py-2 bg-white hover:bg-gray-100 text-gray-800 transition rounded-3xl"
class=" px-4 py-2 bg-gray-900 hover:bg-gray-850 text-gray-100 transition rounded-3xl"
on:click={() => {
editMessageConfirmHandler();
}}

View File

@ -1,13 +1,13 @@
<script lang="ts">
import { Collapsible } from 'bits-ui';
import { setDefaultModels } from '$lib/apis/configs';
import { models, showSettings, settings, user, mobile } from '$lib/stores';
import { onMount, tick, getContext } from 'svelte';
import { toast } from 'svelte-sonner';
import Selector from './ModelSelector/Selector.svelte';
import Tooltip from '../common/Tooltip.svelte';
import { setDefaultModels } from '$lib/apis/configs';
import { updateUserSettings } from '$lib/apis/users';
const i18n = getContext('i18n');
export let selectedModels = [''];
@ -22,12 +22,8 @@
return;
}
settings.set({ ...$settings, models: selectedModels });
localStorage.setItem('settings', JSON.stringify($settings));
await updateUserSettings(localStorage.token, { ui: $settings });
if ($user.role === 'admin') {
console.log('setting default models globally');
await setDefaultModels(localStorage.token, selectedModels.join(','));
}
toast.success($i18n.t('Default model updated'));
};
@ -45,13 +41,11 @@
<div class="mr-1 max-w-full">
<Selector
placeholder={$i18n.t('Select a model')}
items={$models
.filter((model) => model.name !== 'hr')
.map((model) => ({
value: model.id,
label: model.name,
info: model
}))}
items={$models.map((model) => ({
value: model.id,
label: model.name,
model: model
}))}
bind:value={selectedModel}
/>
</div>

View File

@ -12,7 +12,9 @@
import { user, MODEL_DOWNLOAD_POOL, models, mobile } from '$lib/stores';
import { toast } from 'svelte-sonner';
import { capitalizeFirstLetter, getModels, splitStream } from '$lib/utils';
import { capitalizeFirstLetter, sanitizeResponseContent, splitStream } from '$lib/utils';
import { getModels } from '$lib/apis';
import Tooltip from '$lib/components/common/Tooltip.svelte';
const i18n = getContext('i18n');
@ -23,7 +25,12 @@
export let searchEnabled = true;
export let searchPlaceholder = $i18n.t('Search a model');
export let items = [{ value: 'mango', label: 'Mango' }];
export let items: {
label: string;
value: string;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[key: string]: any;
} = [];
export let className = 'w-[30rem]';
@ -239,19 +246,37 @@
}}
>
<div class="flex items-center gap-2">
<div class="line-clamp-1">
{item.label}
<span class=" text-xs font-medium text-gray-600 dark:text-gray-400"
>{item.info?.details?.parameter_size ?? ''}</span
>
<div class="flex items-center">
<div class="line-clamp-1">
{item.label}
</div>
{#if item.model.owned_by === 'ollama' && (item.model.ollama?.details?.parameter_size ?? '') !== ''}
<div class="flex ml-1 items-center">
<Tooltip
content={`${
item.model.ollama?.details?.quantization_level
? item.model.ollama?.details?.quantization_level + ' '
: ''
}${
item.model.ollama?.size
? `(${(item.model.ollama?.size / 1024 ** 3).toFixed(1)}GB)`
: ''
}`}
className="self-end"
>
<span class=" text-xs font-medium text-gray-600 dark:text-gray-400"
>{item.model.ollama?.details?.parameter_size ?? ''}</span
>
</Tooltip>
</div>
{/if}
</div>
<!-- {JSON.stringify(item.info)} -->
{#if item.info.external}
<Tooltip content={item.info?.source ?? 'External'}>
<div class=" mr-2">
{#if item.model.owned_by === 'openai'}
<Tooltip content={`${'External'}`}>
<div class="">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
@ -271,15 +296,15 @@
</svg>
</div>
</Tooltip>
{:else}
{/if}
{#if item.model?.info?.meta?.description}
<Tooltip
content={`${
item.info?.details?.quantization_level
? item.info?.details?.quantization_level + ' '
: ''
}${item.info.size ? `(${(item.info.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}
content={`${sanitizeResponseContent(
item.model?.info?.meta?.description
).replaceAll('\n', '<br>')}`}
>
<div class=" mr-2">
<div class="">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"

View File

@ -1,7 +1,7 @@
<script lang="ts">
import { getVersionUpdates } from '$lib/apis';
import { getOllamaVersion } from '$lib/apis/ollama';
import { WEBUI_VERSION } from '$lib/constants';
import { WEBUI_BUILD_HASH, WEBUI_VERSION } from '$lib/constants';
import { WEBUI_NAME, config, showChangelog } from '$lib/stores';
import { compareVersion } from '$lib/utils';
import { onMount, getContext } from 'svelte';
@ -54,7 +54,7 @@
<div class="flex w-full justify-between items-center">
<div class="flex flex-col text-xs text-gray-700 dark:text-gray-200">
<div class="flex gap-1">
<Tooltip content={WEBUI_VERSION === '0.1.117' ? "🪖 We're just getting started." : ''}>
<Tooltip content={WEBUI_BUILD_HASH}>
v{WEBUI_VERSION}
</Tooltip>

View File

@ -1,155 +0,0 @@
<script lang="ts">
import { createEventDispatcher, onMount, getContext } from 'svelte';
import AdvancedParams from './Advanced/AdvancedParams.svelte';
const i18n = getContext('i18n');
const dispatch = createEventDispatcher();
export let saveSettings: Function;
// Advanced
let requestFormat = '';
let keepAlive = null;
let options = {
// Advanced
seed: 0,
temperature: '',
repeat_penalty: '',
repeat_last_n: '',
mirostat: '',
mirostat_eta: '',
mirostat_tau: '',
top_k: '',
top_p: '',
stop: '',
tfs_z: '',
num_ctx: '',
num_predict: ''
};
const toggleRequestFormat = async () => {
if (requestFormat === '') {
requestFormat = 'json';
} else {
requestFormat = '';
}
saveSettings({ requestFormat: requestFormat !== '' ? requestFormat : undefined });
};
onMount(() => {
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
requestFormat = settings.requestFormat ?? '';
keepAlive = settings.keepAlive ?? null;
options.seed = settings.seed ?? 0;
options.temperature = settings.temperature ?? '';
options.repeat_penalty = settings.repeat_penalty ?? '';
options.top_k = settings.top_k ?? '';
options.top_p = settings.top_p ?? '';
options.num_ctx = settings.num_ctx ?? '';
options = { ...options, ...settings.options };
options.stop = (settings?.options?.stop ?? []).join(',');
});
</script>
<div class="flex flex-col h-full justify-between text-sm">
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80">
<div class=" text-sm font-medium">{$i18n.t('Parameters')}</div>
<AdvancedParams bind:options />
<hr class=" dark:border-gray-700" />
<div class=" py-1 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Keep Alive')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
keepAlive = keepAlive === null ? '5m' : null;
}}
>
{#if keepAlive === null}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if keepAlive !== null}
<div class="flex mt-1 space-x-2">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="text"
placeholder={$i18n.t("e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.")}
bind:value={keepAlive}
/>
</div>
{/if}
</div>
<div>
<div class=" py-1 flex w-full justify-between">
<div class=" self-center text-sm font-medium">{$i18n.t('Request Mode')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleRequestFormat();
}}
>
{#if requestFormat === ''}
<span class="ml-2 self-center"> {$i18n.t('Default')} </span>
{:else if requestFormat === 'json'}
<!-- <svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4 self-center"
>
<path
d="M10 2a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 2zM10 15a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 15zM10 7a3 3 0 100 6 3 3 0 000-6zM15.657 5.404a.75.75 0 10-1.06-1.06l-1.061 1.06a.75.75 0 001.06 1.06l1.06-1.06zM6.464 14.596a.75.75 0 10-1.06-1.06l-1.06 1.06a.75.75 0 001.06 1.06l1.06-1.06zM18 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 0118 10zM5 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 015 10zM14.596 15.657a.75.75 0 001.06-1.06l-1.06-1.061a.75.75 0 10-1.06 1.06l1.06 1.06zM5.404 6.464a.75.75 0 001.06-1.06l-1.06-1.06a.75.75 0 10-1.061 1.06l1.06 1.06z"
/>
</svg> -->
<span class="ml-2 self-center">{$i18n.t('JSON')}</span>
{/if}
</button>
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
on:click={() => {
saveSettings({
options: {
seed: (options.seed !== 0 ? options.seed : undefined) ?? undefined,
stop: options.stop !== '' ? options.stop.split(',').filter((e) => e) : undefined,
temperature: options.temperature !== '' ? options.temperature : undefined,
repeat_penalty: options.repeat_penalty !== '' ? options.repeat_penalty : undefined,
repeat_last_n: options.repeat_last_n !== '' ? options.repeat_last_n : undefined,
mirostat: options.mirostat !== '' ? options.mirostat : undefined,
mirostat_eta: options.mirostat_eta !== '' ? options.mirostat_eta : undefined,
mirostat_tau: options.mirostat_tau !== '' ? options.mirostat_tau : undefined,
top_k: options.top_k !== '' ? options.top_k : undefined,
top_p: options.top_p !== '' ? options.top_p : undefined,
tfs_z: options.tfs_z !== '' ? options.tfs_z : undefined,
num_ctx: options.num_ctx !== '' ? options.num_ctx : undefined,
num_predict: options.num_predict !== '' ? options.num_predict : undefined
},
keepAlive: keepAlive ? (isNaN(keepAlive) ? keepAlive : parseInt(keepAlive)) : undefined
});
dispatch('save');
}}
>
{$i18n.t('Save')}
</button>
</div>
</div>

View File

@ -1,14 +1,16 @@
<script lang="ts">
import { getContext } from 'svelte';
import { getContext, createEventDispatcher } from 'svelte';
const dispatch = createEventDispatcher();
const i18n = getContext('i18n');
export let options = {
export let params = {
// Advanced
seed: 0,
stop: '',
stop: null,
temperature: '',
repeat_penalty: '',
frequency_penalty: '',
repeat_last_n: '',
mirostat: '',
mirostat_eta: '',
@ -17,40 +19,86 @@
top_p: '',
tfs_z: '',
num_ctx: '',
num_predict: ''
max_tokens: '',
template: null
};
let customFieldName = '';
let customFieldValue = '';
$: if (params) {
dispatch('change', params);
}
</script>
<div class=" space-y-3 text-xs">
<div>
<div class=" py-0.5 flex w-full justify-between">
<div class=" w-20 text-xs font-medium self-center">{$i18n.t('Seed')}</div>
<div class=" flex-1 self-center">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="number"
placeholder="Enter Seed"
bind:value={options.seed}
autocomplete="off"
min="0"
/>
</div>
<div class=" space-y-1 text-xs">
<div class=" py-0.5 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Seed')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
params.seed = (params?.seed ?? null) === null ? 0 : null;
}}
>
{#if (params?.seed ?? null) === null}
<span class="ml-2 self-center"> {$i18n.t('Default')} </span>
{:else}
<span class="ml-2 self-center"> {$i18n.t('Custom')} </span>
{/if}
</button>
</div>
{#if (params?.seed ?? null) !== null}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="number"
placeholder="Enter Seed"
bind:value={params.seed}
autocomplete="off"
min="0"
/>
</div>
</div>
{/if}
</div>
<div>
<div class=" py-0.5 flex w-full justify-between">
<div class=" w-20 text-xs font-medium self-center">{$i18n.t('Stop Sequence')}</div>
<div class=" flex-1 self-center">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="text"
placeholder={$i18n.t('Enter stop sequence')}
bind:value={options.stop}
autocomplete="off"
/>
</div>
<div class=" py-0.5 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Stop Sequence')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
params.stop = (params?.stop ?? null) === null ? '' : null;
}}
>
{#if (params?.stop ?? null) === null}
<span class="ml-2 self-center"> {$i18n.t('Default')} </span>
{:else}
<span class="ml-2 self-center"> {$i18n.t('Custom')} </span>
{/if}
</button>
</div>
{#if (params?.stop ?? null) !== null}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
type="text"
placeholder={$i18n.t('Enter stop sequence')}
bind:value={params.stop}
autocomplete="off"
/>
</div>
</div>
{/if}
</div>
<div class=" py-0.5 w-full justify-between">
@ -61,10 +109,10 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.temperature = options.temperature === '' ? 0.8 : '';
params.temperature = (params?.temperature ?? '') === '' ? 0.8 : '';
}}
>
{#if options.temperature === ''}
{#if (params?.temperature ?? '') === ''}
<span class="ml-2 self-center"> {$i18n.t('Default')} </span>
{:else}
<span class="ml-2 self-center"> {$i18n.t('Custom')} </span>
@ -72,7 +120,7 @@
</button>
</div>
{#if options.temperature !== ''}
{#if (params?.temperature ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -81,13 +129,13 @@
min="0"
max="1"
step="0.05"
bind:value={options.temperature}
bind:value={params.temperature}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.temperature}
bind:value={params.temperature}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -107,18 +155,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.mirostat = options.mirostat === '' ? 0 : '';
params.mirostat = (params?.mirostat ?? '') === '' ? 0 : '';
}}
>
{#if options.mirostat === ''}
{#if (params?.mirostat ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.mirostat !== ''}
{#if (params?.mirostat ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -127,13 +175,13 @@
min="0"
max="2"
step="1"
bind:value={options.mirostat}
bind:value={params.mirostat}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.mirostat}
bind:value={params.mirostat}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -153,18 +201,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.mirostat_eta = options.mirostat_eta === '' ? 0.1 : '';
params.mirostat_eta = (params?.mirostat_eta ?? '') === '' ? 0.1 : '';
}}
>
{#if options.mirostat_eta === ''}
{#if (params?.mirostat_eta ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.mirostat_eta !== ''}
{#if (params?.mirostat_eta ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -173,13 +221,13 @@
min="0"
max="1"
step="0.05"
bind:value={options.mirostat_eta}
bind:value={params.mirostat_eta}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.mirostat_eta}
bind:value={params.mirostat_eta}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -199,10 +247,10 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.mirostat_tau = options.mirostat_tau === '' ? 5.0 : '';
params.mirostat_tau = (params?.mirostat_tau ?? '') === '' ? 5.0 : '';
}}
>
{#if options.mirostat_tau === ''}
{#if (params?.mirostat_tau ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
@ -210,7 +258,7 @@
</button>
</div>
{#if options.mirostat_tau !== ''}
{#if (params?.mirostat_tau ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -219,13 +267,13 @@
min="0"
max="10"
step="0.5"
bind:value={options.mirostat_tau}
bind:value={params.mirostat_tau}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.mirostat_tau}
bind:value={params.mirostat_tau}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -245,18 +293,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.top_k = options.top_k === '' ? 40 : '';
params.top_k = (params?.top_k ?? '') === '' ? 40 : '';
}}
>
{#if options.top_k === ''}
{#if (params?.top_k ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.top_k !== ''}
{#if (params?.top_k ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -265,13 +313,13 @@
min="0"
max="100"
step="0.5"
bind:value={options.top_k}
bind:value={params.top_k}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.top_k}
bind:value={params.top_k}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -291,18 +339,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.top_p = options.top_p === '' ? 0.9 : '';
params.top_p = (params?.top_p ?? '') === '' ? 0.9 : '';
}}
>
{#if options.top_p === ''}
{#if (params?.top_p ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.top_p !== ''}
{#if (params?.top_p ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -311,13 +359,13 @@
min="0"
max="1"
step="0.05"
bind:value={options.top_p}
bind:value={params.top_p}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.top_p}
bind:value={params.top_p}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -331,24 +379,24 @@
<div class=" py-0.5 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Repeat Penalty')}</div>
<div class=" self-center text-xs font-medium">{$i18n.t('Frequencey Penalty')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.repeat_penalty = options.repeat_penalty === '' ? 1.1 : '';
params.frequency_penalty = (params?.frequency_penalty ?? '') === '' ? 1.1 : '';
}}
>
{#if options.repeat_penalty === ''}
{#if (params?.frequency_penalty ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.repeat_penalty !== ''}
{#if (params?.frequency_penalty ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -357,13 +405,13 @@
min="0"
max="2"
step="0.05"
bind:value={options.repeat_penalty}
bind:value={params.frequency_penalty}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.repeat_penalty}
bind:value={params.frequency_penalty}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -383,18 +431,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.repeat_last_n = options.repeat_last_n === '' ? 64 : '';
params.repeat_last_n = (params?.repeat_last_n ?? '') === '' ? 64 : '';
}}
>
{#if options.repeat_last_n === ''}
{#if (params?.repeat_last_n ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.repeat_last_n !== ''}
{#if (params?.repeat_last_n ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -403,13 +451,13 @@
min="-1"
max="128"
step="1"
bind:value={options.repeat_last_n}
bind:value={params.repeat_last_n}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.repeat_last_n}
bind:value={params.repeat_last_n}
type="number"
class=" bg-transparent text-center w-14"
min="-1"
@ -429,18 +477,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.tfs_z = options.tfs_z === '' ? 1 : '';
params.tfs_z = (params?.tfs_z ?? '') === '' ? 1 : '';
}}
>
{#if options.tfs_z === ''}
{#if (params?.tfs_z ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.tfs_z !== ''}
{#if (params?.tfs_z ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -449,13 +497,13 @@
min="0"
max="2"
step="0.05"
bind:value={options.tfs_z}
bind:value={params.tfs_z}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div>
<input
bind:value={options.tfs_z}
bind:value={params.tfs_z}
type="number"
class=" bg-transparent text-center w-14"
min="0"
@ -475,18 +523,18 @@
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.num_ctx = options.num_ctx === '' ? 2048 : '';
params.num_ctx = (params?.num_ctx ?? '') === '' ? 2048 : '';
}}
>
{#if options.num_ctx === ''}
{#if (params?.num_ctx ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.num_ctx !== ''}
{#if (params?.num_ctx ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -495,13 +543,13 @@
min="-1"
max="10240000"
step="1"
bind:value={options.num_ctx}
bind:value={params.num_ctx}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div class="">
<input
bind:value={options.num_ctx}
bind:value={params.num_ctx}
type="number"
class=" bg-transparent text-center w-14"
min="-1"
@ -513,24 +561,24 @@
</div>
<div class=" py-0.5 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Max Tokens')}</div>
<div class=" self-center text-xs font-medium">{$i18n.t('Max Tokens (num_predict)')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.num_predict = options.num_predict === '' ? 128 : '';
params.max_tokens = (params?.max_tokens ?? '') === '' ? 128 : '';
}}
>
{#if options.num_predict === ''}
{#if (params?.max_tokens ?? '') === ''}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if options.num_predict !== ''}
{#if (params?.max_tokens ?? '') !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
@ -539,13 +587,13 @@
min="-2"
max="16000"
step="1"
bind:value={options.num_predict}
bind:value={params.max_tokens}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div class="">
<input
bind:value={options.num_predict}
bind:value={params.max_tokens}
type="number"
class=" bg-transparent text-center w-14"
min="-2"
@ -556,4 +604,36 @@
</div>
{/if}
</div>
<div class=" py-0.5 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">{$i18n.t('Template')}</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
params.template = (params?.template ?? null) === null ? '' : null;
}}
>
{#if (params?.template ?? null) === null}
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
{/if}
</button>
</div>
{#if (params?.template ?? null) !== null}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<textarea
class="px-3 py-1.5 text-sm w-full bg-transparent border dark:border-gray-600 outline-none rounded-lg -mb-1"
placeholder="Write your model template content here"
rows="4"
bind:value={params.template}
/>
</div>
</div>
{/if}
</div>
</div>

View File

@ -1,6 +1,6 @@
<script lang="ts">
import { getAudioConfig, updateAudioConfig } from '$lib/apis/audio';
import { user } from '$lib/stores';
import { user, settings } from '$lib/stores';
import { createEventDispatcher, onMount, getContext } from 'svelte';
import { toast } from 'svelte-sonner';
const dispatch = createEventDispatcher();
@ -99,16 +99,14 @@
};
onMount(async () => {
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
conversationMode = $settings.conversationMode ?? false;
speechAutoSend = $settings.speechAutoSend ?? false;
responseAutoPlayback = $settings.responseAutoPlayback ?? false;
conversationMode = settings.conversationMode ?? false;
speechAutoSend = settings.speechAutoSend ?? false;
responseAutoPlayback = settings.responseAutoPlayback ?? false;
STTEngine = settings?.audio?.STTEngine ?? '';
TTSEngine = settings?.audio?.TTSEngine ?? '';
speaker = settings?.audio?.speaker ?? '';
model = settings?.audio?.model ?? '';
STTEngine = $settings?.audio?.STTEngine ?? '';
TTSEngine = $settings?.audio?.TTSEngine ?? '';
speaker = $settings?.audio?.speaker ?? '';
model = $settings?.audio?.model ?? '';
if (TTSEngine === 'openai') {
getOpenAIVoices();

View File

@ -2,9 +2,10 @@
import fileSaver from 'file-saver';
const { saveAs } = fileSaver;
import { chats, user, config } from '$lib/stores';
import { chats, user, settings } from '$lib/stores';
import {
archiveAllChats,
createNewChat,
deleteAllChats,
getAllChats,
@ -22,7 +23,10 @@
// Chats
let saveChatHistory = true;
let importFiles;
let showArchiveConfirm = false;
let showDeleteConfirm = false;
let chatImportInputElement: HTMLInputElement;
$: if (importFiles) {
@ -68,14 +72,15 @@
saveAs(blob, `chat-export-${Date.now()}.json`);
};
const exportAllUserChats = async () => {
let blob = new Blob([JSON.stringify(await getAllUserChats(localStorage.token))], {
type: 'application/json'
const archiveAllChatsHandler = async () => {
await goto('/');
await archiveAllChats(localStorage.token).catch((error) => {
toast.error(error);
});
saveAs(blob, `all-chats-export-${Date.now()}.json`);
await chats.set(await getChatList(localStorage.token));
};
const deleteChats = async () => {
const deleteAllChatsHandler = async () => {
await goto('/');
await deleteAllChats(localStorage.token).catch((error) => {
toast.error(error);
@ -94,9 +99,7 @@
};
onMount(async () => {
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
saveChatHistory = settings.saveChatHistory ?? true;
saveChatHistory = $settings.saveChatHistory ?? true;
});
</script>
@ -217,118 +220,177 @@
<hr class=" dark:border-gray-700" />
{#if showDeleteConfirm}
<div class="flex justify-between rounded-md items-center py-2 px-3.5 w-full transition">
<div class="flex items-center space-x-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
<path
fill-rule="evenodd"
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM5.72 7.47a.75.75 0 0 1 1.06 0L8 8.69l1.22-1.22a.75.75 0 1 1 1.06 1.06L9.06 9.75l1.22 1.22a.75.75 0 1 1-1.06 1.06L8 10.81l-1.22 1.22a.75.75 0 0 1-1.06-1.06l1.22-1.22-1.22-1.22a.75.75 0 0 1 0-1.06Z"
clip-rule="evenodd"
/>
</svg>
<span>{$i18n.t('Are you sure?')}</span>
</div>
<div class="flex space-x-1.5 items-center">
<button
class="hover:text-white transition"
on:click={() => {
deleteChats();
showDeleteConfirm = false;
}}
>
<div class="flex flex-col">
{#if showArchiveConfirm}
<div class="flex justify-between rounded-md items-center py-2 px-3.5 w-full transition">
<div class="flex items-center space-x-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
<path
fill-rule="evenodd"
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM5.72 7.47a.75.75 0 0 1 1.06 0L8 8.69l1.22-1.22a.75.75 0 1 1 1.06 1.06L9.06 9.75l1.22 1.22a.75.75 0 1 1-1.06 1.06L8 10.81l-1.22 1.22a.75.75 0 0 1-1.06-1.06l1.22-1.22-1.22-1.22a.75.75 0 0 1 0-1.06Z"
clip-rule="evenodd"
/>
</svg>
<span>{$i18n.t('Are you sure?')}</span>
</div>
<div class="flex space-x-1.5 items-center">
<button
class="hover:text-white transition"
on:click={() => {
archiveAllChatsHandler();
showArchiveConfirm = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class="hover:text-white transition"
on:click={() => {
showArchiveConfirm = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
</div>
{:else}
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={() => {
showArchiveConfirm = true;
}}
>
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 24 24"
fill="currentColor"
class="size-4"
>
<path
d="M3.375 3C2.339 3 1.5 3.84 1.5 4.875v.75c0 1.036.84 1.875 1.875 1.875h17.25c1.035 0 1.875-.84 1.875-1.875v-.75C22.5 3.839 21.66 3 20.625 3H3.375Z"
/>
<path
fill-rule="evenodd"
d="m3.087 9 .54 9.176A3 3 0 0 0 6.62 21h10.757a3 3 0 0 0 2.995-2.824L20.913 9H3.087Zm6.163 3.75A.75.75 0 0 1 10 12h4a.75.75 0 0 1 0 1.5h-4a.75.75 0 0 1-.75-.75Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center text-sm font-medium">{$i18n.t('Archive All Chats')}</div>
</button>
{/if}
{#if showDeleteConfirm}
<div class="flex justify-between rounded-md items-center py-2 px-3.5 w-full transition">
<div class="flex items-center space-x-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
<path
fill-rule="evenodd"
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM5.72 7.47a.75.75 0 0 1 1.06 0L8 8.69l1.22-1.22a.75.75 0 1 1 1.06 1.06L9.06 9.75l1.22 1.22a.75.75 0 1 1-1.06 1.06L8 10.81l-1.22 1.22a.75.75 0 0 1-1.06-1.06l1.22-1.22-1.22-1.22a.75.75 0 0 1 0-1.06Z"
clip-rule="evenodd"
/>
</svg>
<span>{$i18n.t('Are you sure?')}</span>
</div>
<div class="flex space-x-1.5 items-center">
<button
class="hover:text-white transition"
on:click={() => {
deleteAllChatsHandler();
showDeleteConfirm = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class="hover:text-white transition"
on:click={() => {
showDeleteConfirm = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
</div>
{:else}
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={() => {
showDeleteConfirm = true;
}}
>
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
d="M4 2a1.5 1.5 0 0 0-1.5 1.5v9A1.5 1.5 0 0 0 4 14h8a1.5 1.5 0 0 0 1.5-1.5V6.621a1.5 1.5 0 0 0-.44-1.06L9.94 2.439A1.5 1.5 0 0 0 8.878 2H4Zm7 7a.75.75 0 0 1-.75.75h-4.5a.75.75 0 0 1 0-1.5h4.5A.75.75 0 0 1 11 9Z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class="hover:text-white transition"
on:click={() => {
showDeleteConfirm = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
</div>
{:else}
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={() => {
showDeleteConfirm = true;
}}
>
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M4 2a1.5 1.5 0 0 0-1.5 1.5v9A1.5 1.5 0 0 0 4 14h8a1.5 1.5 0 0 0 1.5-1.5V6.621a1.5 1.5 0 0 0-.44-1.06L9.94 2.439A1.5 1.5 0 0 0 8.878 2H4Zm7 7a.75.75 0 0 1-.75.75h-4.5a.75.75 0 0 1 0-1.5h4.5A.75.75 0 0 1 11 9Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center text-sm font-medium">{$i18n.t('Delete Chats')}</div>
</button>
{/if}
{#if $user?.role === 'admin' && ($config?.admin_export_enabled ?? true)}
<hr class=" dark:border-gray-700" />
<button
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
on:click={() => {
exportAllUserChats();
}}
>
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
<path
fill-rule="evenodd"
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM8.75 7.75a.75.75 0 0 0-1.5 0v2.69L6.03 9.22a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l2.5-2.5a.75.75 0 1 0-1.06-1.06l-1.22 1.22V7.75Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center text-sm font-medium">
{$i18n.t('Export All Chats (All Users)')}
</div>
</button>
{/if}
</div>
<div class=" self-center text-sm font-medium">{$i18n.t('Delete All Chats')}</div>
</button>
{/if}
</div>
</div>
</div>

View File

@ -3,7 +3,13 @@
import { createEventDispatcher, onMount, getContext } from 'svelte';
const dispatch = createEventDispatcher();
import { getOllamaUrls, getOllamaVersion, updateOllamaUrls } from '$lib/apis/ollama';
import {
getOllamaConfig,
getOllamaUrls,
getOllamaVersion,
updateOllamaConfig,
updateOllamaUrls
} from '$lib/apis/ollama';
import {
getOpenAIConfig,
getOpenAIKeys,
@ -14,6 +20,7 @@
} from '$lib/apis/openai';
import { toast } from 'svelte-sonner';
import Switch from '$lib/components/common/Switch.svelte';
import Spinner from '$lib/components/common/Spinner.svelte';
const i18n = getContext('i18n');
@ -25,7 +32,8 @@
let OPENAI_API_KEYS = [''];
let OPENAI_API_BASE_URLS = [''];
let ENABLE_OPENAI_API = false;
let ENABLE_OPENAI_API = null;
let ENABLE_OLLAMA_API = null;
const updateOpenAIHandler = async () => {
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
@ -50,13 +58,23 @@
onMount(async () => {
if ($user.role === 'admin') {
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
await Promise.all([
(async () => {
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
})(),
(async () => {
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
})(),
(async () => {
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
})()
]);
const config = await getOpenAIConfig(localStorage.token);
ENABLE_OPENAI_API = config.ENABLE_OPENAI_API;
const ollamaConfig = await getOllamaConfig(localStorage.token);
const openaiConfig = await getOpenAIConfig(localStorage.token);
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
ENABLE_OPENAI_API = openaiConfig.ENABLE_OPENAI_API;
ENABLE_OLLAMA_API = ollamaConfig.ENABLE_OLLAMA_API;
}
});
</script>
@ -68,189 +86,212 @@
dispatch('save');
}}
>
<div class=" pr-1.5 overflow-y-scroll max-h-[25rem] space-y-3">
<div class=" space-y-3">
<div class="mt-2 space-y-2 pr-1.5">
<div class="space-y-3 pr-1.5 overflow-y-scroll h-[24rem] max-h-[25rem]">
{#if ENABLE_OPENAI_API !== null && ENABLE_OLLAMA_API !== null}
<div class=" space-y-3">
<div class="mt-2 space-y-2 pr-1.5">
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">{$i18n.t('OpenAI API')}</div>
<div class="mt-1">
<Switch
bind:state={ENABLE_OPENAI_API}
on:change={async () => {
updateOpenAIConfig(localStorage.token, ENABLE_OPENAI_API);
}}
/>
</div>
</div>
{#if ENABLE_OPENAI_API}
<div class="flex flex-col gap-1">
{#each OPENAI_API_BASE_URLS as url, idx}
<div class="flex w-full gap-2">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('API Base URL')}
bind:value={url}
autocomplete="off"
/>
</div>
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('API Key')}
bind:value={OPENAI_API_KEYS[idx]}
autocomplete="off"
/>
</div>
<div class="self-center flex items-center">
{#if idx === 0}
<button
class="px-1"
on:click={() => {
OPENAI_API_BASE_URLS = [...OPENAI_API_BASE_URLS, ''];
OPENAI_API_KEYS = [...OPENAI_API_KEYS, ''];
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
{:else}
<button
class="px-1"
on:click={() => {
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.filter(
(url, urlIdx) => idx !== urlIdx
);
OPENAI_API_KEYS = OPENAI_API_KEYS.filter((key, keyIdx) => idx !== keyIdx);
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
</svg>
</button>
{/if}
</div>
</div>
<div class=" mb-1 text-xs text-gray-400 dark:text-gray-500">
{$i18n.t('WebUI will make requests to')}
<span class=" text-gray-200">'{url}/models'</span>
</div>
{/each}
</div>
{/if}
</div>
</div>
<hr class=" dark:border-gray-700" />
<div class="pr-1.5 space-y-2">
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">{$i18n.t('OpenAI API')}</div>
<div class=" font-medium">{$i18n.t('Ollama API')}</div>
<div class="mt-1">
<Switch
bind:state={ENABLE_OPENAI_API}
bind:state={ENABLE_OLLAMA_API}
on:change={async () => {
updateOpenAIConfig(localStorage.token, ENABLE_OPENAI_API);
updateOllamaConfig(localStorage.token, ENABLE_OLLAMA_API);
}}
/>
</div>
</div>
{#if ENABLE_OPENAI_API}
<div class="flex flex-col gap-1">
{#each OPENAI_API_BASE_URLS as url, idx}
<div class="flex w-full gap-2">
<div class="flex-1">
{#if ENABLE_OLLAMA_API}
<div class="flex w-full gap-1.5">
<div class="flex-1 flex flex-col gap-2">
{#each OLLAMA_BASE_URLS as url, idx}
<div class="flex gap-1.5">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('API Base URL')}
placeholder={$i18n.t('Enter URL (e.g. http://localhost:11434)')}
bind:value={url}
autocomplete="off"
/>
</div>
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('API Key')}
bind:value={OPENAI_API_KEYS[idx]}
autocomplete="off"
<div class="self-center flex items-center">
{#if idx === 0}
<button
class="px-1"
on:click={() => {
OLLAMA_BASE_URLS = [...OLLAMA_BASE_URLS, ''];
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
{:else}
<button
class="px-1"
on:click={() => {
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter(
(url, urlIdx) => idx !== urlIdx
);
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
</svg>
</button>
{/if}
</div>
</div>
{/each}
</div>
<div class="flex">
<button
class="self-center p-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
on:click={() => {
updateOllamaUrlsHandler();
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
clip-rule="evenodd"
/>
</div>
<div class="self-center flex items-center">
{#if idx === 0}
<button
class="px-1"
on:click={() => {
OPENAI_API_BASE_URLS = [...OPENAI_API_BASE_URLS, ''];
OPENAI_API_KEYS = [...OPENAI_API_KEYS, ''];
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
{:else}
<button
class="px-1"
on:click={() => {
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.filter(
(url, urlIdx) => idx !== urlIdx
);
OPENAI_API_KEYS = OPENAI_API_KEYS.filter((key, keyIdx) => idx !== keyIdx);
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
</svg>
</button>
{/if}
</div>
</div>
<div class=" mb-1 text-xs text-gray-400 dark:text-gray-500">
{$i18n.t('WebUI will make requests to')}
<span class=" text-gray-200">'{url}/models'</span>
</div>
{/each}
</svg>
</button>
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
{$i18n.t('Trouble accessing Ollama?')}
<a
class=" text-gray-300 font-medium underline"
href="https://github.com/open-webui/open-webui#troubleshooting"
target="_blank"
>
{$i18n.t('Click here for help.')}
</a>
</div>
{/if}
</div>
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Ollama Base URL')}</div>
<div class="flex w-full gap-1.5">
<div class="flex-1 flex flex-col gap-2">
{#each OLLAMA_BASE_URLS as url, idx}
<div class="flex gap-1.5">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('Enter URL (e.g. http://localhost:11434)')}
bind:value={url}
/>
<div class="self-center flex items-center">
{#if idx === 0}
<button
class="px-1"
on:click={() => {
OLLAMA_BASE_URLS = [...OLLAMA_BASE_URLS, ''];
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
{:else}
<button
class="px-1"
on:click={() => {
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter((url, urlIdx) => idx !== urlIdx);
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
</svg>
</button>
{/if}
</div>
</div>
{/each}
</div>
<div class="">
<button
class="p-2.5 bg-gray-200 hover:bg-gray-300 dark:bg-gray-850 dark:hover:bg-gray-800 rounded-lg transition"
on:click={() => {
updateOllamaUrlsHandler();
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
clip-rule="evenodd"
/>
</svg>
</button>
{:else}
<div class="flex h-full justify-center">
<div class="my-auto">
<Spinner className="size-6" />
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
{$i18n.t('Trouble accessing Ollama?')}
<a
class=" text-gray-300 font-medium underline"
href="https://github.com/open-webui/open-webui#troubleshooting"
target="_blank"
>
{$i18n.t('Click here for help.')}
</a>
</div>
</div>
{/if}
</div>
<div class="flex justify-end pt-3 text-sm font-medium">

View File

@ -4,7 +4,7 @@
import { getLanguages } from '$lib/i18n';
const dispatch = createEventDispatcher();
import { models, user, theme } from '$lib/stores';
import { models, settings, theme } from '$lib/stores';
const i18n = getContext('i18n');
@ -41,21 +41,21 @@
let requestFormat = '';
let keepAlive = null;
let options = {
let params = {
// Advanced
seed: 0,
temperature: '',
repeat_penalty: '',
frequency_penalty: '',
repeat_last_n: '',
mirostat: '',
mirostat_eta: '',
mirostat_tau: '',
top_k: '',
top_p: '',
stop: '',
stop: null,
tfs_z: '',
num_ctx: '',
num_predict: ''
max_tokens: ''
};
const toggleRequestFormat = async () => {
@ -71,23 +71,22 @@
onMount(async () => {
selectedTheme = localStorage.theme ?? 'system';
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
languages = await getLanguages();
notificationEnabled = settings.notificationEnabled ?? false;
system = settings.system ?? '';
notificationEnabled = $settings.notificationEnabled ?? false;
system = $settings.system ?? '';
requestFormat = settings.requestFormat ?? '';
keepAlive = settings.keepAlive ?? null;
requestFormat = $settings.requestFormat ?? '';
keepAlive = $settings.keepAlive ?? null;
options.seed = settings.seed ?? 0;
options.temperature = settings.temperature ?? '';
options.repeat_penalty = settings.repeat_penalty ?? '';
options.top_k = settings.top_k ?? '';
options.top_p = settings.top_p ?? '';
options.num_ctx = settings.num_ctx ?? '';
options = { ...options, ...settings.options };
options.stop = (settings?.options?.stop ?? []).join(',');
params.seed = $settings.seed ?? 0;
params.temperature = $settings.temperature ?? '';
params.frequency_penalty = $settings.frequency_penalty ?? '';
params.top_k = $settings.top_k ?? '';
params.top_p = $settings.top_p ?? '';
params.num_ctx = $settings.num_ctx ?? '';
params = { ...params, ...$settings.params };
params.stop = $settings?.params?.stop ? ($settings?.params?.stop ?? []).join(',') : null;
});
const applyTheme = (_theme: string) => {
@ -228,7 +227,7 @@
</div>
{#if showAdvanced}
<AdvancedParams bind:options />
<AdvancedParams bind:params />
<hr class=" dark:border-gray-700" />
<div class=" py-1 w-full justify-between">
@ -300,20 +299,21 @@
on:click={() => {
saveSettings({
system: system !== '' ? system : undefined,
options: {
seed: (options.seed !== 0 ? options.seed : undefined) ?? undefined,
stop: options.stop !== '' ? options.stop.split(',').filter((e) => e) : undefined,
temperature: options.temperature !== '' ? options.temperature : undefined,
repeat_penalty: options.repeat_penalty !== '' ? options.repeat_penalty : undefined,
repeat_last_n: options.repeat_last_n !== '' ? options.repeat_last_n : undefined,
mirostat: options.mirostat !== '' ? options.mirostat : undefined,
mirostat_eta: options.mirostat_eta !== '' ? options.mirostat_eta : undefined,
mirostat_tau: options.mirostat_tau !== '' ? options.mirostat_tau : undefined,
top_k: options.top_k !== '' ? options.top_k : undefined,
top_p: options.top_p !== '' ? options.top_p : undefined,
tfs_z: options.tfs_z !== '' ? options.tfs_z : undefined,
num_ctx: options.num_ctx !== '' ? options.num_ctx : undefined,
num_predict: options.num_predict !== '' ? options.num_predict : undefined
params: {
seed: (params.seed !== 0 ? params.seed : undefined) ?? undefined,
stop: params.stop ? params.stop.split(',').filter((e) => e) : undefined,
temperature: params.temperature !== '' ? params.temperature : undefined,
frequency_penalty:
params.frequency_penalty !== '' ? params.frequency_penalty : undefined,
repeat_last_n: params.repeat_last_n !== '' ? params.repeat_last_n : undefined,
mirostat: params.mirostat !== '' ? params.mirostat : undefined,
mirostat_eta: params.mirostat_eta !== '' ? params.mirostat_eta : undefined,
mirostat_tau: params.mirostat_tau !== '' ? params.mirostat_tau : undefined,
top_k: params.top_k !== '' ? params.top_k : undefined,
top_p: params.top_p !== '' ? params.top_p : undefined,
tfs_z: params.tfs_z !== '' ? params.tfs_z : undefined,
num_ctx: params.num_ctx !== '' ? params.num_ctx : undefined,
max_tokens: params.max_tokens !== '' ? params.max_tokens : undefined
},
keepAlive: keepAlive ? (isNaN(keepAlive) ? keepAlive : parseInt(keepAlive)) : undefined
});

View File

@ -104,23 +104,18 @@
promptSuggestions = $config?.default_prompt_suggestions;
}
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
titleAutoGenerate = settings?.title?.auto ?? true;
titleAutoGenerateModel = settings?.title?.model ?? '';
titleAutoGenerateModelExternal = settings?.title?.modelExternal ?? '';
titleAutoGenerate = $settings?.title?.auto ?? true;
titleAutoGenerateModel = $settings?.title?.model ?? '';
titleAutoGenerateModelExternal = $settings?.title?.modelExternal ?? '';
titleGenerationPrompt =
settings?.title?.prompt ??
$i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}';
responseAutoCopy = settings.responseAutoCopy ?? false;
showUsername = settings.showUsername ?? false;
chatBubble = settings.chatBubble ?? true;
fullScreenMode = settings.fullScreenMode ?? false;
splitLargeChunks = settings.splitLargeChunks ?? false;
chatDirection = settings.chatDirection ?? 'LTR';
$settings?.title?.prompt ??
`Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title': {{prompt}}`;
responseAutoCopy = $settings.responseAutoCopy ?? false;
showUsername = $settings.showUsername ?? false;
chatBubble = $settings.chatBubble ?? true;
fullScreenMode = $settings.fullScreenMode ?? false;
splitLargeChunks = $settings.splitLargeChunks ?? false;
chatDirection = $settings.chatDirection ?? 'LTR';
});
</script>

View File

@ -1,5 +1,4 @@
<script lang="ts">
import queue from 'async/queue';
import { toast } from 'svelte-sonner';
import {
@ -12,32 +11,20 @@
cancelOllamaRequest,
uploadModel
} from '$lib/apis/ollama';
import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user } from '$lib/stores';
import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user, config } from '$lib/stores';
import { splitStream } from '$lib/utils';
import { onMount, getContext } from 'svelte';
import { addLiteLLMModel, deleteLiteLLMModel, getLiteLLMModelInfo } from '$lib/apis/litellm';
import Tooltip from '$lib/components/common/Tooltip.svelte';
import Spinner from '$lib/components/common/Spinner.svelte';
const i18n = getContext('i18n');
export let getModels: Function;
let showLiteLLM = false;
let showLiteLLMParams = false;
let modelUploadInputElement: HTMLInputElement;
let liteLLMModelInfo = [];
let liteLLMModel = '';
let liteLLMModelName = '';
let liteLLMAPIBase = '';
let liteLLMAPIKey = '';
let liteLLMRPM = '';
let liteLLMMaxTokens = '';
let deleteLiteLLMModelName = '';
$: liteLLMModelName = liteLLMModel;
// Models
@ -48,7 +35,8 @@
let updateProgress = null;
let showExperimentalOllama = false;
let ollamaVersion = '';
let ollamaVersion = null;
const MAX_PARALLEL_DOWNLOADS = 3;
let modelTransferring = false;
@ -70,8 +58,11 @@
const updateModelsHandler = async () => {
for (const model of $models.filter(
(m) =>
m.size != null &&
(selectedOllamaUrlIdx === null ? true : (m?.urls ?? []).includes(selectedOllamaUrlIdx))
!(m?.preset ?? false) &&
m.owned_by === 'ollama' &&
(selectedOllamaUrlIdx === null
? true
: (m?.ollama?.urls ?? []).includes(selectedOllamaUrlIdx))
)) {
console.log(model);
@ -439,77 +430,28 @@
}
};
const addLiteLLMModelHandler = async () => {
if (!liteLLMModelInfo.find((info) => info.model_name === liteLLMModelName)) {
const res = await addLiteLLMModel(localStorage.token, {
name: liteLLMModelName,
model: liteLLMModel,
api_base: liteLLMAPIBase,
api_key: liteLLMAPIKey,
rpm: liteLLMRPM,
max_tokens: liteLLMMaxTokens
}).catch((error) => {
toast.error(error);
return null;
});
if (res) {
if (res.message) {
toast.success(res.message);
}
}
} else {
toast.error($i18n.t(`Model {{modelName}} already exists.`, { modelName: liteLLMModelName }));
}
liteLLMModelName = '';
liteLLMModel = '';
liteLLMAPIBase = '';
liteLLMAPIKey = '';
liteLLMRPM = '';
liteLLMMaxTokens = '';
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
models.set(await getModels());
};
const deleteLiteLLMModelHandler = async () => {
const res = await deleteLiteLLMModel(localStorage.token, deleteLiteLLMModelName).catch(
(error) => {
toast.error(error);
return null;
}
);
if (res) {
if (res.message) {
toast.success(res.message);
}
}
deleteLiteLLMModelName = '';
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
models.set(await getModels());
};
onMount(async () => {
OLLAMA_URLS = await getOllamaUrls(localStorage.token).catch((error) => {
toast.error(error);
return [];
});
await Promise.all([
(async () => {
OLLAMA_URLS = await getOllamaUrls(localStorage.token).catch((error) => {
toast.error(error);
return [];
});
if (OLLAMA_URLS.length > 0) {
selectedOllamaUrlIdx = 0;
}
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
if (OLLAMA_URLS.length > 0) {
selectedOllamaUrlIdx = 0;
}
})(),
(async () => {
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
})()
]);
});
</script>
<div class="flex flex-col h-full justify-between text-sm">
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-[24rem]">
{#if ollamaVersion}
{#if ollamaVersion !== null}
<div class="space-y-2 pr-1.5">
<div class="text-sm font-medium">{$i18n.t('Manage Ollama Models')}</div>
@ -587,24 +529,28 @@
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
>
<style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
</style>
<path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
/>
<path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
/>
</svg>
</div>
{:else}
<svg
@ -703,9 +649,12 @@
{#if !deleteModelTag}
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{/if}
{#each $models.filter((m) => m.size != null && (selectedOllamaUrlIdx === null ? true : (m?.urls ?? []).includes(selectedOllamaUrlIdx))) as model}
{#each $models.filter((m) => !(m?.preset ?? false) && m.owned_by === 'ollama' && (selectedOllamaUrlIdx === null ? true : (m?.ollama?.urls ?? []).includes(selectedOllamaUrlIdx))) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
>{model.name +
' (' +
(model.ollama.size / 1024 ** 3).toFixed(1) +
' GB)'}</option
>
{/each}
</select>
@ -833,24 +782,28 @@
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
>
<style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
</style>
<path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
/>
<path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
/>
</svg>
</div>
{:else}
<svg
@ -929,203 +882,14 @@
{/if}
</div>
</div>
<hr class=" dark:border-gray-700 my-2" />
{/if}
<div class=" space-y-3">
<div class="mt-2 space-y-3 pr-1.5">
<div>
<div class="mb-2">
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('Manage LiteLLM Models')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLM = !showLiteLLM;
}}>{showLiteLLM ? $i18n.t('Hide') : $i18n.t('Show')}</button
>
</div>
</div>
{#if showLiteLLM}
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">{$i18n.t('Add a model')}</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}
>{showLiteLLMParams
? $i18n.t('Hide Additional Params')
: $i18n.t('Show Additional Params')}</button
>
</div>
</div>
<div class="my-2 space-y-2">
<div class="flex w-full mb-1.5">
<div class="flex-1 mr-2">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('Enter LiteLLM Model (litellm_params.model)')}
bind:value={liteLLMModel}
autocomplete="off"
/>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
addLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('Model Name')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Model Name (model_name)"
bind:value={liteLLMModelName}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('API Base URL')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t(
'Enter LiteLLM API Base URL (litellm_params.api_base)'
)}
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">{$i18n.t('API Key')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('Enter LiteLLM API Key (litellm_params.api_key)')}
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class="mb-1.5 text-sm font-medium">{$i18n.t('API RPM')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('Enter LiteLLM API RPM (litellm_params.rpm)')}
bind:value={liteLLMRPM}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class="mb-1.5 text-sm font-medium">{$i18n.t('Max Tokens')}</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder={$i18n.t('Enter Max Tokens (litellm_params.max_tokens)')}
bind:value={liteLLMMaxTokens}
type="number"
min="1"
autocomplete="off"
/>
</div>
</div>
</div>
{/if}
</div>
<div class="mb-2 text-xs text-gray-400 dark:text-gray-500">
{$i18n.t('Not sure what to add?')}
<a
class=" text-gray-300 font-medium underline"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
{$i18n.t('Click here for help.')}
</a>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Delete a model')}</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={deleteLiteLLMModelName}
placeholder={$i18n.t('Select a model')}
>
{#if !deleteLiteLLMModelName}
<option value="" disabled selected>{$i18n.t('Select a model')}</option>
{/if}
{#each liteLLMModelInfo as model}
<option value={model.model_name} class="bg-gray-100 dark:bg-gray-700"
>{model.model_name}</option
>
{/each}
</select>
</div>
<button
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
on:click={() => {
deleteLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
{/if}
{:else if ollamaVersion === false}
<div>Ollama Not Detected</div>
{:else}
<div class="flex h-full justify-center">
<div class="my-auto">
<Spinner className="size-6" />
</div>
</div>
</div>
{/if}
</div>
</div>

View File

@ -19,8 +19,7 @@
let enableMemory = false;
onMount(async () => {
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
enableMemory = settings?.memory ?? false;
enableMemory = $settings?.memory ?? false;
});
</script>

View File

@ -54,7 +54,7 @@
class=" flex flex-col w-full sm:flex-row sm:justify-center sm:space-x-6 h-[28rem] max-h-screen outline outline-1 rounded-xl outline-gray-100 dark:outline-gray-800 mb-4 mt-1"
>
{#if memories.length > 0}
<div class="text-left text-sm w-full mb-4 max-h-[22rem] overflow-y-scroll">
<div class="text-left text-sm w-full mb-4 overflow-y-scroll">
<div class="relative overflow-x-auto">
<table class="w-full text-sm text-left text-gray-600 dark:text-gray-400 table-auto">
<thead

View File

@ -3,7 +3,7 @@
import { toast } from 'svelte-sonner';
import { models, settings, user } from '$lib/stores';
import { getModels as _getModels } from '$lib/utils';
import { getModels as _getModels } from '$lib/apis';
import Modal from '../common/Modal.svelte';
import Account from './Settings/Account.svelte';
@ -17,6 +17,7 @@
import Images from './Settings/Images.svelte';
import User from '../icons/User.svelte';
import Personalization from './Settings/Personalization.svelte';
import { updateUserSettings } from '$lib/apis/users';
const i18n = getContext('i18n');
@ -26,7 +27,7 @@
console.log(updated);
await settings.set({ ...$settings, ...updated });
await models.set(await getModels());
localStorage.setItem('settings', JSON.stringify($settings));
await updateUserSettings(localStorage.token, { ui: $settings });
};
const getModels = async () => {

View File

@ -1,9 +1,9 @@
<script lang="ts">
import { getContext, onMount } from 'svelte';
import { models, config } from '$lib/stores';
import { toast } from 'svelte-sonner';
import { deleteSharedChatById, getChatById, shareChatById } from '$lib/apis/chats';
import { modelfiles } from '$lib/stores';
import { copyToClipboard } from '$lib/utils';
import Modal from '../common/Modal.svelte';
@ -43,9 +43,7 @@
tab.postMessage(
JSON.stringify({
chat: _chat,
modelfiles: $modelfiles.filter((modelfile) =>
_chat.models.includes(modelfile.tagName)
)
models: $models.filter((m) => _chat.models.includes(m.id))
}),
'*'
);
@ -136,16 +134,18 @@
<div class="flex justify-end">
<div class="flex flex-col items-end space-x-1 mt-1.5">
<div class="flex gap-1">
<button
class=" self-center px-3.5 py-2 rounded-xl text-sm font-medium bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-white"
type="button"
on:click={() => {
shareChat();
show = false;
}}
>
{$i18n.t('Share to OpenWebUI Community')}
</button>
{#if $config?.features.enable_community_sharing}
<button
class=" self-center px-3.5 py-2 rounded-xl text-sm font-medium bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-white"
type="button"
on:click={() => {
shareChat();
show = false;
}}
>
{$i18n.t('Share to OpenWebUI Community')}
</button>
{/if}
<button
class=" self-center flex items-center gap-1 px-3.5 py-2 rounded-xl text-sm font-medium bg-emerald-600 hover:bg-emerald-500 text-white"

Some files were not shown because too many files have changed in this diff Show More