diff --git a/backend/open_webui/utils/task.py b/backend/open_webui/utils/task.py
index ebb7483ba..f5ba75ebe 100644
--- a/backend/open_webui/utils/task.py
+++ b/backend/open_webui/utils/task.py
@@ -217,6 +217,24 @@ def tags_generation_template(
return template
+def image_prompt_generation_template(
+ template: str, messages: list[dict], user: Optional[dict] = None
+) -> str:
+ prompt = get_last_user_message(messages)
+ template = replace_prompt_variable(template, prompt)
+ template = replace_messages_variable(template, messages)
+
+ template = prompt_template(
+ template,
+ **(
+ {"user_name": user.get("name"), "user_location": user.get("location")}
+ if user
+ else {}
+ ),
+ )
+ return template
+
+
def emoji_generation_template(
template: str, prompt: str, user: Optional[dict] = None
) -> str:
diff --git a/backend/requirements.txt b/backend/requirements.txt
index f951d78db..eecb9c4a5 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -40,14 +40,15 @@ tiktoken
langchain==0.3.7
langchain-community==0.3.7
-langchain-chroma==0.1.4
fake-useragent==1.5.1
-chromadb==0.5.15
+chromadb==0.6.2
pymilvus==2.5.0
qdrant-client~=1.12.0
opensearch-py==2.7.1
+
+transformers
sentence-transformers==3.3.1
colbert-ai==0.2.21
einops==0.8.0
@@ -88,7 +89,7 @@ pytube==15.0.0
extract_msg
pydub
-duckduckgo-search~=6.3.5
+duckduckgo-search~=7.2.1
## Google Drive
google-api-python-client
@@ -101,6 +102,7 @@ pytest~=8.3.2
pytest-docker~=3.1.1
googleapis-common-protos==1.63.2
+google-cloud-storage==2.19.0
## LDAP
ldap3==2.9.1
diff --git a/docs/apache.md b/docs/apache.md
index ebbcc17f4..1bd920593 100644
--- a/docs/apache.md
+++ b/docs/apache.md
@@ -16,6 +16,9 @@ For the UI configuration, you can set up the Apache VirtualHost as follows:
ProxyPass / http://server.com:3000/ nocanon
ProxyPassReverse / http://server.com:3000/
+ # Needed after 0.5
+ ProxyPass / ws://server.com:3000/ nocanon
+ ProxyPassReverse / ws://server.com:3000/
```
@@ -32,6 +35,9 @@ Enable the site first before you can request SSL:
ProxyPass / http://server.com:3000/ nocanon
ProxyPassReverse / http://server.com:3000/
+ # Needed after 0.5
+ ProxyPass / ws://server.com:3000/ nocanon
+ ProxyPassReverse / ws://server.com:3000/
SSLEngine on
SSLCertificateFile /etc/ssl/virtualmin/170514456861234/ssl.cert
diff --git a/package-lock.json b/package-lock.json
index 0d78397ef..c98e814d9 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "open-webui",
- "version": "0.5.4",
+ "version": "0.5.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "open-webui",
- "version": "0.5.4",
+ "version": "0.5.7",
"dependencies": {
"@codemirror/lang-javascript": "^6.2.2",
"@codemirror/lang-python": "^6.1.6",
@@ -41,7 +41,7 @@
"i18next-resources-to-backend": "^1.2.0",
"idb": "^7.1.1",
"js-sha256": "^0.10.1",
- "katex": "^0.16.9",
+ "katex": "^0.16.21",
"marked": "^9.1.0",
"mermaid": "^10.9.3",
"paneforge": "^0.0.6",
@@ -89,7 +89,7 @@
"tailwindcss": "^3.3.3",
"tslib": "^2.4.1",
"typescript": "^5.5.4",
- "vite": "^5.3.5",
+ "vite": "^5.4.14",
"vitest": "^1.6.0"
},
"engines": {
@@ -7110,13 +7110,14 @@
}
},
"node_modules/katex": {
- "version": "0.16.10",
- "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz",
- "integrity": "sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==",
+ "version": "0.16.21",
+ "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.21.tgz",
+ "integrity": "sha512-XvqR7FgOHtWupfMiigNzmh+MgUVmDGU2kXZm899ZkPfcuoPuFxyHmXsgATDpFZDAXCI8tvinaVcDo8PIIJSo4A==",
"funding": [
"https://opencollective.com/katex",
"https://github.com/sponsors/katex"
],
+ "license": "MIT",
"dependencies": {
"commander": "^8.3.0"
},
@@ -11677,9 +11678,10 @@
}
},
"node_modules/vite": {
- "version": "5.4.6",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.6.tgz",
- "integrity": "sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==",
+ "version": "5.4.14",
+ "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.14.tgz",
+ "integrity": "sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==",
+ "license": "MIT",
"dependencies": {
"esbuild": "^0.21.3",
"postcss": "^8.4.43",
diff --git a/package.json b/package.json
index 6a0f451fc..a2463d9e3 100644
--- a/package.json
+++ b/package.json
@@ -1,9 +1,10 @@
{
"name": "open-webui",
- "version": "0.5.4",
+ "version": "0.5.7",
"private": true,
"scripts": {
"dev": "npm run pyodide:fetch && vite dev --host",
+ "dev:5050": "npm run pyodide:fetch && vite dev --port 5050",
"build": "npm run pyodide:fetch && vite build",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
@@ -44,7 +45,7 @@
"tailwindcss": "^3.3.3",
"tslib": "^2.4.1",
"typescript": "^5.5.4",
- "vite": "^5.3.5",
+ "vite": "^5.4.14",
"vitest": "^1.6.0"
},
"type": "module",
@@ -82,7 +83,7 @@
"i18next-resources-to-backend": "^1.2.0",
"idb": "^7.1.1",
"js-sha256": "^0.10.1",
- "katex": "^0.16.9",
+ "katex": "^0.16.21",
"marked": "^9.1.0",
"mermaid": "^10.9.3",
"paneforge": "^0.0.6",
diff --git a/pyproject.toml b/pyproject.toml
index 63a97e69a..edd01db8f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -47,14 +47,14 @@ dependencies = [
"langchain==0.3.7",
"langchain-community==0.3.7",
- "langchain-chroma==0.1.4",
"fake-useragent==1.5.1",
- "chromadb==0.5.15",
+ "chromadb==0.6.2",
"pymilvus==2.5.0",
"qdrant-client~=1.12.0",
"opensearch-py==2.7.1",
+ "transformers",
"sentence-transformers==3.3.1",
"colbert-ai==0.2.21",
"einops==0.8.0",
@@ -94,15 +94,22 @@ dependencies = [
"extract_msg",
"pydub",
- "duckduckgo-search~=6.3.5",
+ "duckduckgo-search~=7.2.1",
+
+ "google-api-python-client",
+ "google-auth-httplib2",
+ "google-auth-oauthlib",
"docker~=7.1.0",
"pytest~=8.3.2",
"pytest-docker~=3.1.1",
+ "moto[s3]>=5.0.26",
"googleapis-common-protos==1.63.2",
+ "google-cloud-storage==2.19.0",
- "ldap3==2.9.1"
+ "ldap3==2.9.1",
+ "gcp-storage-emulator>=2024.8.3",
]
readme = "README.md"
requires-python = ">= 3.11, < 3.13.0a1"
diff --git a/src/app.css b/src/app.css
index fcc438bea..dadfda78f 100644
--- a/src/app.css
+++ b/src/app.css
@@ -53,11 +53,11 @@ math {
}
.markdown-prose {
- @apply prose dark:prose-invert prose-headings:font-semibold prose-hr:my-4 prose-hr:border-gray-100 prose-hr:dark:border-gray-800 prose-p:my-0 prose-img:my-1 prose-headings:my-1 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-ul:-my-0 prose-ol:-my-0 prose-li:-my-0 whitespace-pre-line;
+ @apply prose dark:prose-invert prose-blockquote:border-gray-100 prose-blockquote:dark:border-gray-800 prose-blockquote:border-l-2 prose-blockquote:not-italic prose-blockquote:font-normal prose-headings:font-semibold prose-hr:my-4 prose-hr:border-gray-100 prose-hr:dark:border-gray-800 prose-p:my-0 prose-img:my-1 prose-headings:my-1 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-ul:-my-0 prose-ol:-my-0 prose-li:-my-0 whitespace-pre-line;
}
.markdown-prose-xs {
- @apply text-xs prose dark:prose-invert prose-headings:font-semibold prose-hr:my-0 prose-hr:border-gray-100 prose-hr:dark:border-gray-800 prose-p:my-0 prose-img:my-1 prose-headings:my-1 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-ul:-my-0 prose-ol:-my-0 prose-li:-my-0 whitespace-pre-line;
+ @apply text-xs prose dark:prose-invert prose-blockquote:border-gray-100 prose-blockquote:dark:border-gray-800 prose-blockquote:border-l-2 prose-blockquote:not-italic prose-blockquote:font-normal prose-headings:font-semibold prose-hr:my-0 prose-hr:border-gray-100 prose-hr:dark:border-gray-800 prose-p:my-0 prose-img:my-1 prose-headings:my-1 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-ul:-my-0 prose-ol:-my-0 prose-li:-my-0 whitespace-pre-line;
}
.markdown a {
@@ -68,6 +68,19 @@ math {
font-family: 'Archivo', sans-serif;
}
+.drag-region {
+ -webkit-app-region: drag;
+}
+
+.drag-region a,
+.drag-region button {
+ -webkit-app-region: no-drag;
+}
+
+.no-drag-region {
+ -webkit-app-region: no-drag;
+}
+
iframe {
@apply rounded-lg;
}
@@ -102,18 +115,62 @@ li p {
select {
background-image: url("data:image/svg+xml;charset=utf-8,%3Csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3E%3Cpath stroke='%236B7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='m6 8 4 4 4-4'/%3E%3C/svg%3E");
- background-position: right 0.5rem center;
+ background-position: right 0rem center;
background-repeat: no-repeat;
background-size: 1.5em 1.5em;
- padding-right: 2.5rem;
-webkit-print-color-adjust: exact;
print-color-adjust: exact;
+ /* padding-right: 2.5rem; */
/* for Firefox */
-moz-appearance: none;
/* for Chrome */
-webkit-appearance: none;
}
+@keyframes shimmer {
+ 0% {
+ background-position: 200% 0;
+ }
+ 100% {
+ background-position: -200% 0;
+ }
+}
+
+.shimmer {
+ background: linear-gradient(90deg, #9a9b9e 25%, #2a2929 50%, #9a9b9e 75%);
+ background-size: 200% 100%;
+ background-clip: text;
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ animation: shimmer 4s linear infinite;
+ color: #818286; /* Fallback color */
+}
+
+:global(.dark) .shimmer {
+ background: linear-gradient(90deg, #818286 25%, #eae5e5 50%, #818286 75%);
+ background-size: 200% 100%;
+ background-clip: text;
+ -webkit-background-clip: text;
+ -webkit-text-fill-color: transparent;
+ animation: shimmer 4s linear infinite;
+ color: #a1a3a7; /* Darker fallback color for dark mode */
+}
+
+@keyframes smoothFadeIn {
+ 0% {
+ opacity: 0;
+ transform: translateY(-10px);
+ }
+ 100% {
+ opacity: 1;
+ transform: translateY(0);
+ }
+}
+
+.status-description {
+ animation: smoothFadeIn 0.2s forwards;
+}
+
.katex-mathml {
display: none;
}
diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts
index 16eed9f21..b96567e63 100644
--- a/src/lib/apis/ollama/index.ts
+++ b/src/lib/apis/ollama/index.ts
@@ -360,12 +360,7 @@ export const generateChatCompletion = async (token: string = '', body: object) =
return [res, controller];
};
-export const createModel = async (
- token: string,
- tagName: string,
- content: string,
- urlIdx: string | null = null
-) => {
+export const createModel = async (token: string, payload: object, urlIdx: string | null = null) => {
let error = null;
const res = await fetch(
@@ -377,10 +372,7 @@ export const createModel = async (
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
- body: JSON.stringify({
- name: tagName,
- modelfile: content
- })
+ body: JSON.stringify(payload)
}
).catch((err) => {
error = err;
diff --git a/src/lib/components/admin/Functions.svelte b/src/lib/components/admin/Functions.svelte
index 03da04ea7..80c7d11cd 100644
--- a/src/lib/components/admin/Functions.svelte
+++ b/src/lib/components/admin/Functions.svelte
@@ -61,7 +61,7 @@
const shareHandler = async (func) => {
const item = await getFunctionById(localStorage.token, func.id).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
@@ -88,7 +88,7 @@
const cloneHandler = async (func) => {
const _function = await getFunctionById(localStorage.token, func.id).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
@@ -104,7 +104,7 @@
const exportHandler = async (func) => {
const _function = await getFunctionById(localStorage.token, func.id).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
@@ -118,7 +118,7 @@
const deleteHandler = async (func) => {
const res = await deleteFunctionById(localStorage.token, func.id).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
@@ -132,7 +132,7 @@
const toggleGlobalHandler = async (func) => {
const res = await toggleGlobalById(localStorage.token, func.id).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
});
if (res) {
@@ -418,7 +418,7 @@
class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition"
on:click={async () => {
const _functions = await exportFunctions(localStorage.token).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
@@ -510,7 +510,7 @@
for (const func of _functions) {
const res = await createNewFunction(localStorage.token, func).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
}
diff --git a/src/lib/components/admin/Settings/Connections.svelte b/src/lib/components/admin/Settings/Connections.svelte
index ddc19bb8f..35e6e0293 100644
--- a/src/lib/components/admin/Settings/Connections.svelte
+++ b/src/lib/components/admin/Settings/Connections.svelte
@@ -43,9 +43,8 @@
const updateOpenAIHandler = async () => {
if (ENABLE_OPENAI_API !== null) {
- OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.filter(
- (url, urlIdx) => OPENAI_API_BASE_URLS.indexOf(url) === urlIdx && url !== ''
- ).map((url) => url.replace(/\/$/, ''));
+ // Remove trailing slashes
+ OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.map((url) => url.replace(/\/$/, ''));
// Check if API KEYS length is same than API URLS length
if (OPENAI_API_KEYS.length !== OPENAI_API_BASE_URLS.length) {
@@ -69,7 +68,7 @@
OPENAI_API_KEYS: OPENAI_API_KEYS,
OPENAI_API_CONFIGS: OPENAI_API_CONFIGS
}).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
});
if (res) {
@@ -81,24 +80,15 @@
const updateOllamaHandler = async () => {
if (ENABLE_OLLAMA_API !== null) {
- // Remove duplicate URLs
- OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter(
- (url, urlIdx) => OLLAMA_BASE_URLS.indexOf(url) === urlIdx && url !== ''
- ).map((url) => url.replace(/\/$/, ''));
-
- console.log(OLLAMA_BASE_URLS);
-
- if (OLLAMA_BASE_URLS.length === 0) {
- ENABLE_OLLAMA_API = false;
- toast.info($i18n.t('Ollama API disabled'));
- }
+ // Remove trailing slashes
+ OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.map((url) => url.replace(/\/$/, ''));
const res = await updateOllamaConfig(localStorage.token, {
ENABLE_OLLAMA_API: ENABLE_OLLAMA_API,
OLLAMA_BASE_URLS: OLLAMA_BASE_URLS,
OLLAMA_API_CONFIGS: OLLAMA_API_CONFIGS
}).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
});
if (res) {
@@ -111,14 +101,14 @@
const addOpenAIConnectionHandler = async (connection) => {
OPENAI_API_BASE_URLS = [...OPENAI_API_BASE_URLS, connection.url];
OPENAI_API_KEYS = [...OPENAI_API_KEYS, connection.key];
- OPENAI_API_CONFIGS[connection.url] = connection.config;
+ OPENAI_API_CONFIGS[OPENAI_API_BASE_URLS.length] = connection.config;
await updateOpenAIHandler();
};
const addOllamaConnectionHandler = async (connection) => {
OLLAMA_BASE_URLS = [...OLLAMA_BASE_URLS, connection.url];
- OLLAMA_API_CONFIGS[connection.url] = connection.config;
+ OLLAMA_API_CONFIGS[OLLAMA_BASE_URLS.length] = connection.config;
await updateOllamaHandler();
};
@@ -148,15 +138,17 @@
OLLAMA_API_CONFIGS = ollamaConfig.OLLAMA_API_CONFIGS;
if (ENABLE_OPENAI_API) {
- for (const url of OPENAI_API_BASE_URLS) {
- if (!OPENAI_API_CONFIGS[url]) {
- OPENAI_API_CONFIGS[url] = {};
+ // get url and idx
+ for (const [idx, url] of OPENAI_API_BASE_URLS.entries()) {
+ if (!OPENAI_API_CONFIGS[idx]) {
+ // Legacy support, url as key
+ OPENAI_API_CONFIGS[idx] = OPENAI_API_CONFIGS[url] || {};
}
}
OPENAI_API_BASE_URLS.forEach(async (url, idx) => {
- OPENAI_API_CONFIGS[url] = OPENAI_API_CONFIGS[url] || {};
- if (!(OPENAI_API_CONFIGS[url]?.enable ?? true)) {
+ OPENAI_API_CONFIGS[idx] = OPENAI_API_CONFIGS[idx] || {};
+ if (!(OPENAI_API_CONFIGS[idx]?.enable ?? true)) {
return;
}
const res = await getOpenAIModels(localStorage.token, idx);
@@ -167,9 +159,9 @@
}
if (ENABLE_OLLAMA_API) {
- for (const url of OLLAMA_BASE_URLS) {
- if (!OLLAMA_API_CONFIGS[url]) {
- OLLAMA_API_CONFIGS[url] = {};
+ for (const [idx, url] of OLLAMA_BASE_URLS.entries()) {
+ if (!OLLAMA_API_CONFIGS[idx]) {
+ OLLAMA_API_CONFIGS[idx] = OLLAMA_API_CONFIGS[url] || {};
}
}
}
@@ -242,7 +234,7 @@
pipeline={pipelineUrls[url] ? true : false}
bind:url
bind:key={OPENAI_API_KEYS[idx]}
- bind:config={OPENAI_API_CONFIGS[url]}
+ bind:config={OPENAI_API_CONFIGS[idx]}
onSubmit={() => {
updateOpenAIHandler();
}}
@@ -251,6 +243,8 @@
(url, urlIdx) => idx !== urlIdx
);
OPENAI_API_KEYS = OPENAI_API_KEYS.filter((key, keyIdx) => idx !== keyIdx);
+
+ delete OPENAI_API_CONFIGS[idx];
}}
/>
{/each}
@@ -301,13 +295,14 @@
{#each OLLAMA_BASE_URLS as url, idx}
{
updateOllamaHandler();
}}
onDelete={() => {
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter((url, urlIdx) => idx !== urlIdx);
+ delete OLLAMA_API_CONFIGS[idx];
}}
/>
{/each}
diff --git a/src/lib/components/admin/Settings/Connections/AddConnectionModal.svelte b/src/lib/components/admin/Settings/Connections/AddConnectionModal.svelte
index 3f24dc6d7..a8726a546 100644
--- a/src/lib/components/admin/Settings/Connections/AddConnectionModal.svelte
+++ b/src/lib/components/admin/Settings/Connections/AddConnectionModal.svelte
@@ -37,7 +37,7 @@
const verifyOllamaHandler = async () => {
const res = await verifyOllamaConnection(localStorage.token, url, key).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
});
if (res) {
@@ -47,7 +47,7 @@
const verifyOpenAIHandler = async () => {
const res = await verifyOpenAIConnection(localStorage.token, url, key).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
});
if (res) {
diff --git a/src/lib/components/admin/Settings/Connections/ManageOllamaModal.svelte b/src/lib/components/admin/Settings/Connections/ManageOllamaModal.svelte
index 220214ed1..543db060e 100644
--- a/src/lib/components/admin/Settings/Connections/ManageOllamaModal.svelte
+++ b/src/lib/components/admin/Settings/Connections/ManageOllamaModal.svelte
@@ -3,527 +3,13 @@
import { getContext, onMount } from 'svelte';
const i18n = getContext('i18n');
- import { WEBUI_NAME, models, MODEL_DOWNLOAD_POOL, user, config } from '$lib/stores';
- import { splitStream } from '$lib/utils';
-
- import {
- createModel,
- deleteModel,
- downloadModel,
- getOllamaUrls,
- getOllamaVersion,
- pullModel,
- uploadModel,
- getOllamaConfig,
- getOllamaModels
- } from '$lib/apis/ollama';
- import { getModels } from '$lib/apis';
-
import Modal from '$lib/components/common/Modal.svelte';
- import Tooltip from '$lib/components/common/Tooltip.svelte';
- import ModelDeleteConfirmDialog from '$lib/components/common/ConfirmDialog.svelte';
- import Spinner from '$lib/components/common/Spinner.svelte';
+ import ManageOllama from '../Models/Manage/ManageOllama.svelte';
export let show = false;
-
- let modelUploadInputElement: HTMLInputElement;
- let showModelDeleteConfirm = false;
-
- let loading = true;
-
- // Models
export let urlIdx: number | null = null;
-
- let ollamaModels = [];
-
- let updateModelId = null;
- let updateProgress = null;
- let showExperimentalOllama = false;
-
- const MAX_PARALLEL_DOWNLOADS = 3;
-
- let modelTransferring = false;
- let modelTag = '';
-
- let createModelLoading = false;
- let createModelTag = '';
- let createModelContent = '';
- let createModelDigest = '';
- let createModelPullProgress = null;
-
- let digest = '';
- let pullProgress = null;
-
- let modelUploadMode = 'file';
- let modelInputFile: File[] | null = null;
- let modelFileUrl = '';
- let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop ""\nPARAMETER stop "USER:"\nPARAMETER stop "ASSISTANT:"`;
- let modelFileDigest = '';
-
- let uploadProgress = null;
- let uploadMessage = '';
-
- let deleteModelTag = '';
-
- const updateModelsHandler = async () => {
- for (const model of ollamaModels) {
- console.log(model);
-
- updateModelId = model.id;
- const [res, controller] = await pullModel(localStorage.token, model.id, urlIdx).catch(
- (error) => {
- toast.error(error);
- return null;
- }
- );
-
- if (res) {
- const reader = res.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
-
- while (true) {
- try {
- const { value, done } = await reader.read();
- if (done) break;
-
- let lines = value.split('\n');
-
- for (const line of lines) {
- if (line !== '') {
- let data = JSON.parse(line);
-
- console.log(data);
- if (data.error) {
- throw data.error;
- }
- if (data.detail) {
- throw data.detail;
- }
- if (data.status) {
- if (data.digest) {
- updateProgress = 0;
- if (data.completed) {
- updateProgress = Math.round((data.completed / data.total) * 1000) / 10;
- } else {
- updateProgress = 100;
- }
- } else {
- toast.success(data.status);
- }
- }
- }
- }
- } catch (error) {
- console.log(error);
- }
- }
- }
- }
-
- updateModelId = null;
- updateProgress = null;
- };
-
- const pullModelHandler = async () => {
- const sanitizedModelTag = modelTag.trim().replace(/^ollama\s+(run|pull)\s+/, '');
- console.log($MODEL_DOWNLOAD_POOL);
- if ($MODEL_DOWNLOAD_POOL[sanitizedModelTag]) {
- toast.error(
- $i18n.t(`Model '{{modelTag}}' is already in queue for downloading.`, {
- modelTag: sanitizedModelTag
- })
- );
- return;
- }
- if (Object.keys($MODEL_DOWNLOAD_POOL).length === MAX_PARALLEL_DOWNLOADS) {
- toast.error(
- $i18n.t('Maximum of 3 models can be downloaded simultaneously. Please try again later.')
- );
- return;
- }
-
- const [res, controller] = await pullModel(localStorage.token, sanitizedModelTag, urlIdx).catch(
- (error) => {
- toast.error(error);
- return null;
- }
- );
-
- if (res) {
- const reader = res.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
-
- MODEL_DOWNLOAD_POOL.set({
- ...$MODEL_DOWNLOAD_POOL,
- [sanitizedModelTag]: {
- ...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
- abortController: controller,
- reader,
- done: false
- }
- });
-
- while (true) {
- try {
- const { value, done } = await reader.read();
- if (done) break;
-
- let lines = value.split('\n');
-
- for (const line of lines) {
- if (line !== '') {
- let data = JSON.parse(line);
- console.log(data);
- if (data.error) {
- throw data.error;
- }
- if (data.detail) {
- throw data.detail;
- }
-
- if (data.status) {
- if (data.digest) {
- let downloadProgress = 0;
- if (data.completed) {
- downloadProgress = Math.round((data.completed / data.total) * 1000) / 10;
- } else {
- downloadProgress = 100;
- }
-
- MODEL_DOWNLOAD_POOL.set({
- ...$MODEL_DOWNLOAD_POOL,
- [sanitizedModelTag]: {
- ...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
- pullProgress: downloadProgress,
- digest: data.digest
- }
- });
- } else {
- toast.success(data.status);
-
- MODEL_DOWNLOAD_POOL.set({
- ...$MODEL_DOWNLOAD_POOL,
- [sanitizedModelTag]: {
- ...$MODEL_DOWNLOAD_POOL[sanitizedModelTag],
- done: data.status === 'success'
- }
- });
- }
- }
- }
- }
- } catch (error) {
- console.log(error);
- if (typeof error !== 'string') {
- error = error.message;
- }
-
- toast.error(error);
- // opts.callback({ success: false, error, modelName: opts.modelName });
- }
- }
-
- console.log($MODEL_DOWNLOAD_POOL[sanitizedModelTag]);
-
- if ($MODEL_DOWNLOAD_POOL[sanitizedModelTag].done) {
- toast.success(
- $i18n.t(`Model '{{modelName}}' has been successfully downloaded.`, {
- modelName: sanitizedModelTag
- })
- );
-
- models.set(await getModels(localStorage.token));
- } else {
- toast.error($i18n.t('Download canceled'));
- }
-
- delete $MODEL_DOWNLOAD_POOL[sanitizedModelTag];
-
- MODEL_DOWNLOAD_POOL.set({
- ...$MODEL_DOWNLOAD_POOL
- });
- }
-
- modelTag = '';
- modelTransferring = false;
- };
-
- const uploadModelHandler = async () => {
- modelTransferring = true;
-
- let uploaded = false;
- let fileResponse = null;
- let name = '';
-
- if (modelUploadMode === 'file') {
- const file = modelInputFile ? modelInputFile[0] : null;
-
- if (file) {
- uploadMessage = 'Uploading...';
-
- fileResponse = await uploadModel(localStorage.token, file, urlIdx).catch((error) => {
- toast.error(error);
- return null;
- });
- }
- } else {
- uploadProgress = 0;
- fileResponse = await downloadModel(localStorage.token, modelFileUrl, urlIdx).catch(
- (error) => {
- toast.error(error);
- return null;
- }
- );
- }
-
- if (fileResponse && fileResponse.ok) {
- const reader = fileResponse.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
-
- while (true) {
- const { value, done } = await reader.read();
- if (done) break;
-
- try {
- let lines = value.split('\n');
-
- for (const line of lines) {
- if (line !== '') {
- let data = JSON.parse(line.replace(/^data: /, ''));
-
- if (data.progress) {
- if (uploadMessage) {
- uploadMessage = '';
- }
- uploadProgress = data.progress;
- }
-
- if (data.error) {
- throw data.error;
- }
-
- if (data.done) {
- modelFileDigest = data.blob;
- name = data.name;
- uploaded = true;
- }
- }
- }
- } catch (error) {
- console.log(error);
- }
- }
- } else {
- const error = await fileResponse?.json();
- toast.error(error?.detail ?? error);
- }
-
- if (uploaded) {
- const res = await createModel(
- localStorage.token,
- `${name}:latest`,
- `FROM @${modelFileDigest}\n${modelFileContent}`
- );
-
- if (res && res.ok) {
- const reader = res.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
-
- while (true) {
- const { value, done } = await reader.read();
- if (done) break;
-
- try {
- let lines = value.split('\n');
-
- for (const line of lines) {
- if (line !== '') {
- console.log(line);
- let data = JSON.parse(line);
- console.log(data);
-
- if (data.error) {
- throw data.error;
- }
- if (data.detail) {
- throw data.detail;
- }
-
- if (data.status) {
- if (
- !data.digest &&
- !data.status.includes('writing') &&
- !data.status.includes('sha256')
- ) {
- toast.success(data.status);
- } else {
- if (data.digest) {
- digest = data.digest;
-
- if (data.completed) {
- pullProgress = Math.round((data.completed / data.total) * 1000) / 10;
- } else {
- pullProgress = 100;
- }
- }
- }
- }
- }
- }
- } catch (error) {
- console.log(error);
- toast.error(error);
- }
- }
- }
- }
-
- modelFileUrl = '';
-
- if (modelUploadInputElement) {
- modelUploadInputElement.value = '';
- }
- modelInputFile = null;
- modelTransferring = false;
- uploadProgress = null;
-
- models.set(await getModels(localStorage.token));
- };
-
- const deleteModelHandler = async () => {
- const res = await deleteModel(localStorage.token, deleteModelTag, urlIdx).catch((error) => {
- toast.error(error);
- });
-
- if (res) {
- toast.success($i18n.t(`Deleted {{deleteModelTag}}`, { deleteModelTag }));
- }
-
- deleteModelTag = '';
- models.set(await getModels(localStorage.token));
- };
-
- const cancelModelPullHandler = async (model: string) => {
- const { reader, abortController } = $MODEL_DOWNLOAD_POOL[model];
- if (abortController) {
- abortController.abort();
- }
- if (reader) {
- await reader.cancel();
- delete $MODEL_DOWNLOAD_POOL[model];
- MODEL_DOWNLOAD_POOL.set({
- ...$MODEL_DOWNLOAD_POOL
- });
- await deleteModel(localStorage.token, model);
- toast.success(`${model} download has been canceled`);
- }
- };
-
- const createModelHandler = async () => {
- createModelLoading = true;
- const res = await createModel(
- localStorage.token,
- createModelTag,
- createModelContent,
- urlIdx
- ).catch((error) => {
- toast.error(error);
- return null;
- });
-
- if (res && res.ok) {
- const reader = res.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
-
- while (true) {
- const { value, done } = await reader.read();
- if (done) break;
-
- try {
- let lines = value.split('\n');
-
- for (const line of lines) {
- if (line !== '') {
- console.log(line);
- let data = JSON.parse(line);
- console.log(data);
-
- if (data.error) {
- throw data.error;
- }
- if (data.detail) {
- throw data.detail;
- }
-
- if (data.status) {
- if (
- !data.digest &&
- !data.status.includes('writing') &&
- !data.status.includes('sha256')
- ) {
- toast.success(data.status);
- } else {
- if (data.digest) {
- createModelDigest = data.digest;
-
- if (data.completed) {
- createModelPullProgress =
- Math.round((data.completed / data.total) * 1000) / 10;
- } else {
- createModelPullProgress = 100;
- }
- }
- }
- }
- }
- }
- } catch (error) {
- console.log(error);
- toast.error(error);
- }
- }
- }
-
- models.set(await getModels(localStorage.token));
-
- createModelLoading = false;
-
- createModelTag = '';
- createModelContent = '';
- createModelDigest = '';
- createModelPullProgress = null;
- };
-
- const init = async () => {
- loading = true;
- ollamaModels = await getOllamaModels(localStorage.token, urlIdx);
-
- console.log(ollamaModels);
- loading = false;
- };
-
- $: if (show) {
- init();
- }
- {
- deleteModelHandler();
- }}
-/>
-
@@ -533,31 +19,6 @@
{$i18n.t('Manage Ollama')}
-
-
-
-
-
-
diff --git a/src/lib/components/admin/Settings/Connections/OllamaConnection.svelte b/src/lib/components/admin/Settings/Connections/OllamaConnection.svelte
index dec88ccc7..45aaa8efc 100644
--- a/src/lib/components/admin/Settings/Connections/OllamaConnection.svelte
+++ b/src/lib/components/admin/Settings/Connections/OllamaConnection.svelte
@@ -9,6 +9,7 @@
import Cog6 from '$lib/components/icons/Cog6.svelte';
import Wrench from '$lib/components/icons/Wrench.svelte';
import ManageOllamaModal from './ManageOllamaModal.svelte';
+ import ArrowDownTray from '$lib/components/icons/ArrowDownTray.svelte';
export let onDelete = () => {};
export let onSubmit = () => {};
@@ -70,7 +71,7 @@
}}
type="button"
>
-
+
diff --git a/src/lib/components/admin/Settings/Database.svelte b/src/lib/components/admin/Settings/Database.svelte
index 71fda7642..a35d0b83e 100644
--- a/src/lib/components/admin/Settings/Database.svelte
+++ b/src/lib/components/admin/Settings/Database.svelte
@@ -47,7 +47,7 @@
reader.onload = async (e) => {
const res = await importConfig(localStorage.token, JSON.parse(e.target.result)).catch(
(error) => {
- toast.error(error);
+ toast.error(`${error}`);
}
);
@@ -132,7 +132,7 @@
// exportAllUserChats();
downloadDatabase(localStorage.token).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
});
}}
>
diff --git a/src/lib/components/admin/Settings/Documents.svelte b/src/lib/components/admin/Settings/Documents.svelte
index f14401d99..d3b7cfa01 100644
--- a/src/lib/components/admin/Settings/Documents.svelte
+++ b/src/lib/components/admin/Settings/Documents.svelte
@@ -119,7 +119,7 @@
url: OpenAIUrl
}
}).catch(async (error) => {
- toast.error(error);
+ toast.error(`${error}`);
await setEmbeddingConfig();
return null;
});
@@ -142,7 +142,7 @@
const res = await updateRerankingConfig(localStorage.token, {
reranking_model: rerankingModel
}).catch(async (error) => {
- toast.error(error);
+ toast.error(`${error}`);
await setRerankingConfig();
return null;
});
@@ -258,7 +258,7 @@
bind:show={showResetUploadDirConfirm}
on:confirm={async () => {
const res = await deleteAllFiles(localStorage.token).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
@@ -272,7 +272,7 @@
bind:show={showResetConfirm}
on:confirm={() => {
const res = resetVectorDB(localStorage.token).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
diff --git a/src/lib/components/admin/Settings/General.svelte b/src/lib/components/admin/Settings/General.svelte
index a77bd0fd5..0dea5a1e8 100644
--- a/src/lib/components/admin/Settings/General.svelte
+++ b/src/lib/components/admin/Settings/General.svelte
@@ -28,6 +28,7 @@
label: '',
host: '',
port: '',
+ attribute_for_mail: 'mail',
attribute_for_username: 'uid',
app_dn: '',
app_dn_password: '',
@@ -41,7 +42,7 @@
const updateLdapServerHandler = async () => {
if (!ENABLE_LDAP) return;
const res = await updateLdapServer(localStorage.token, LDAP_SERVER).catch((error) => {
- toast.error(error);
+ toast.error(`${error}`);
return null;
});
if (res) {
@@ -342,6 +343,26 @@
/>