diff --git a/backend/open_webui/config.py b/backend/open_webui/config.py index beeac535e..8b50df69b 100644 --- a/backend/open_webui/config.py +++ b/backend/open_webui/config.py @@ -1317,6 +1317,20 @@ Your task is to synthesize these responses into a single, high-quality response. Responses from models: {{responses}}""" + +DEFAULT_CODE_INTERPRETER_PROMPT = """ +#### Tools Available + +1. **Code Interpreter**: `` + - You have access to a Python shell that runs directly in the user's browser, enabling fast execution of code for analysis, calculations, or problem-solving. Use it in this response. + - The Python code you write can incorporate a wide array of libraries, handle data manipulation or visualization, perform API calls for web-related tasks, or tackle virtually any computational challenge. Use this flexibility to **think outside the box, craft elegant solutions, and harness Python's full potential**. + - To use it, **you must enclose your code within `` XML tags** and stop right away. If you don't, the code won't execute. Do NOT use triple backticks. + - When coding, **always aim to print meaningful outputs** (e.g., results, tables, summaries, or visuals) to better interpret and verify the findings. Avoid relying on implicit outputs; prioritize explicit and clear print statements so the results are effectively communicated to the user. + - After obtaining the printed output, **always provide a concise analysis, interpretation, or next steps to help the user understand the findings or refine the outcome further.** + - If the results are unclear, unexpected, or require validation, refine the code and execute it again as needed. Always aim to deliver meaningful insights from the results, iterating if necessary. + +Ensure that the tools are effectively utilized to achieve the highest-quality analysis for the user.""" + #################################### # Vector Database #################################### diff --git a/backend/open_webui/env.py b/backend/open_webui/env.py index b589e9490..00605e15d 100644 --- a/backend/open_webui/env.py +++ b/backend/open_webui/env.py @@ -358,14 +358,21 @@ WEBUI_SECRET_KEY = os.environ.get( WEBUI_SESSION_COOKIE_SAME_SITE = os.environ.get("WEBUI_SESSION_COOKIE_SAME_SITE", "lax") -WEBUI_SESSION_COOKIE_SECURE = os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false").lower() == "true" +WEBUI_SESSION_COOKIE_SECURE = ( + os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false").lower() == "true" +) -WEBUI_AUTH_COOKIE_SAME_SITE = os.environ.get("WEBUI_AUTH_COOKIE_SAME_SITE", WEBUI_SESSION_COOKIE_SAME_SITE) +WEBUI_AUTH_COOKIE_SAME_SITE = os.environ.get( + "WEBUI_AUTH_COOKIE_SAME_SITE", WEBUI_SESSION_COOKIE_SAME_SITE +) -WEBUI_AUTH_COOKIE_SECURE = os.environ.get( - "WEBUI_AUTH_COOKIE_SECURE", - os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false") -).lower() == "true" +WEBUI_AUTH_COOKIE_SECURE = ( + os.environ.get( + "WEBUI_AUTH_COOKIE_SECURE", + os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false"), + ).lower() + == "true" +) if WEBUI_AUTH and WEBUI_SECRET_KEY == "": raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND) diff --git a/backend/open_webui/main.py b/backend/open_webui/main.py index fd8a4c957..94aef214c 100644 --- a/backend/open_webui/main.py +++ b/backend/open_webui/main.py @@ -1012,10 +1012,6 @@ async def get_app_config(request: Request): else {} ), }, - "google_drive": { - "client_id": GOOGLE_DRIVE_CLIENT_ID.value, - "api_key": GOOGLE_DRIVE_API_KEY.value, - }, **( { "default_models": app.state.config.DEFAULT_MODELS, @@ -1035,6 +1031,10 @@ async def get_app_config(request: Request): "max_count": app.state.config.FILE_MAX_COUNT, }, "permissions": {**app.state.config.USER_PERMISSIONS}, + "google_drive": { + "client_id": GOOGLE_DRIVE_CLIENT_ID.value, + "api_key": GOOGLE_DRIVE_API_KEY.value, + }, } if user is not None else {} @@ -1068,7 +1068,7 @@ async def get_app_version(): @app.get("/api/version/updates") -async def get_app_latest_release_version(): +async def get_app_latest_release_version(user=Depends(get_verified_user)): if OFFLINE_MODE: log.debug( f"Offline mode is enabled, returning current version as latest version" diff --git a/backend/open_webui/socket/main.py b/backend/open_webui/socket/main.py index 2d12f5803..3788139ea 100644 --- a/backend/open_webui/socket/main.py +++ b/backend/open_webui/socket/main.py @@ -325,7 +325,7 @@ def get_event_emitter(request_info): def get_event_call(request_info): - async def __event_call__(event_data): + async def __event_caller__(event_data): response = await sio.call( "chat-events", { @@ -337,7 +337,10 @@ def get_event_call(request_info): ) return response - return __event_call__ + return __event_caller__ + + +get_event_caller = get_event_call def get_user_id_from_session_pool(sid): diff --git a/backend/open_webui/utils/middleware.py b/backend/open_webui/utils/middleware.py index be301ec10..961e57b9e 100644 --- a/backend/open_webui/utils/middleware.py +++ b/backend/open_webui/utils/middleware.py @@ -7,7 +7,10 @@ from aiocache import cached from typing import Any, Optional import random import json +import html import inspect +import re + from uuid import uuid4 from concurrent.futures import ThreadPoolExecutor @@ -54,6 +57,7 @@ from open_webui.utils.task import ( from open_webui.utils.misc import ( get_message_list, add_or_update_system_message, + add_or_update_user_message, get_last_user_message, get_last_assistant_message, prepend_to_first_user_message_content, @@ -64,7 +68,10 @@ from open_webui.utils.plugin import load_function_module_by_id from open_webui.tasks import create_task -from open_webui.config import DEFAULT_TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE +from open_webui.config import ( + DEFAULT_TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, + DEFAULT_CODE_INTERPRETER_PROMPT, +) from open_webui.env import ( SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL, @@ -768,6 +775,11 @@ async def process_chat_payload(request, form_data, metadata, user, model): request, form_data, extra_params, user ) + if "code_interpreter" in features and features["code_interpreter"]: + form_data["messages"] = add_or_update_user_message( + DEFAULT_CODE_INTERPRETER_PROMPT, form_data["messages"] + ) + try: form_data, flags = await chat_completion_filter_functions_handler( request, form_data, model, extra_params @@ -982,6 +994,7 @@ async def process_chat_response( pass event_emitter = None + event_caller = None if ( "session_id" in metadata and metadata["session_id"] @@ -991,10 +1004,11 @@ async def process_chat_response( and metadata["message_id"] ): event_emitter = get_event_emitter(metadata) + event_caller = get_event_call(metadata) + # Non-streaming response if not isinstance(response, StreamingResponse): if event_emitter: - if "selected_model_id" in response: Chats.upsert_message_to_chat_by_id_and_message_id( metadata["chat_id"], @@ -1059,22 +1073,156 @@ async def process_chat_response( else: return response + # Non standard response if not any( content_type in response.headers["Content-Type"] for content_type in ["text/event-stream", "application/x-ndjson"] ): return response - if event_emitter: - + # Streaming response + if event_emitter and event_caller: task_id = str(uuid4()) # Create a unique task ID. + model_id = form_data.get("model", "") # Handle as a background task async def post_response_handler(response, events): + def serialize_content_blocks(content_blocks, raw=False): + content = "" + + for block in content_blocks: + if block["type"] == "text": + content = f"{content}{block['content'].strip()}\n" + elif block["type"] == "reasoning": + reasoning_display_content = "\n".join( + (f"> {line}" if not line.startswith(">") else line) + for line in block["content"].splitlines() + ) + + reasoning_duration = block.get("duration", None) + + if reasoning_duration: + content = f'{content}
\nThought for {reasoning_duration} seconds\n{reasoning_display_content}\n
\n' + else: + content = f'{content}
\nThinking…\n{reasoning_display_content}\n
\n' + + elif block["type"] == "code_interpreter": + attributes = block.get("attributes", {}) + output = block.get("output", None) + lang = attributes.get("lang", "") + + if output: + output = html.escape(json.dumps(output)) + + if raw: + content = f'{content}
\nAnalyzed\n```{lang}\n{block["content"]}\n```\n```output\n{output}\n```\n
\n' + else: + content = f'{content}
\nAnalyzed\n```{lang}\n{block["content"]}\n```\n
\n' + else: + content = f'{content}
\nAnalyzing...\n```{lang}\n{block["content"]}\n```\n
\n' + + else: + block_content = str(block["content"]).strip() + content = f"{content}{block['type']}: {block_content}\n" + + return content + + def tag_content_handler(content_type, tags, content, content_blocks): + end_flag = False + + def extract_attributes(tag_content): + """Extract attributes from a tag if they exist.""" + attributes = {} + # Match attributes in the format: key="value" (ignores single quotes for simplicity) + matches = re.findall(r'(\w+)\s*=\s*"([^"]+)"', tag_content) + for key, value in matches: + attributes[key] = value + return attributes + + if content_blocks[-1]["type"] == "text": + for tag in tags: + # Match start tag e.g., or + start_tag_pattern = rf"<{tag}(.*?)>" + match = re.search(start_tag_pattern, content) + if match: + # Extract attributes in the tag (if present) + attributes = extract_attributes(match.group(1)) + # Remove the start tag from the currently handling text block + content_blocks[-1]["content"] = content_blocks[-1][ + "content" + ].replace(match.group(0), "") + if not content_blocks[-1]["content"]: + content_blocks.pop() + # Append the new block + content_blocks.append( + { + "type": content_type, + "tag": tag, + "attributes": attributes, + "content": "", + "started_at": time.time(), + } + ) + break + elif content_blocks[-1]["type"] == content_type: + tag = content_blocks[-1]["tag"] + # Match end tag e.g., + end_tag_pattern = rf"" + if re.search(end_tag_pattern, content): + block_content = content_blocks[-1]["content"] + # Strip start and end tags from the content + start_tag_pattern = rf"<{tag}(.*?)>" + block_content = re.sub( + start_tag_pattern, "", block_content + ).strip() + block_content = re.sub( + end_tag_pattern, "", block_content + ).strip() + if block_content: + end_flag = True + content_blocks[-1]["content"] = block_content + content_blocks[-1]["ended_at"] = time.time() + content_blocks[-1]["duration"] = int( + content_blocks[-1]["ended_at"] + - content_blocks[-1]["started_at"] + ) + # Reset the content_blocks by appending a new text block + content_blocks.append( + { + "type": "text", + "content": "", + } + ) + # Clean processed content + content = re.sub( + rf"<{tag}(.*?)>(.|\n)*?", + "", + content, + flags=re.DOTALL, + ) + else: + # Remove the block if content is empty + content_blocks.pop() + return content, content_blocks, end_flag + message = Chats.get_message_by_id_and_message_id( metadata["chat_id"], metadata["message_id"] ) + content = message.get("content", "") if message else "" + content_blocks = [ + { + "type": "text", + "content": content, + } + ] + + # We might want to disable this by default + DETECT_REASONING = True + DETECT_CODE_INTERPRETER = True + + reasoning_tags = ["think", "reason", "reasoning", "thought", "Thought"] + code_interpreter_tags = ["code_interpreter"] try: for event in events: @@ -1094,148 +1242,193 @@ async def process_chat_response( }, ) - # We might want to disable this by default - detect_reasoning = True - reasoning_tags = ["think", "reason", "reasoning", "thought", "Thought"] - current_tag = None + async def stream_body_handler(response): + nonlocal content + nonlocal content_blocks - reasoning_start_time = None + async for line in response.body_iterator: + line = line.decode("utf-8") if isinstance(line, bytes) else line + data = line - reasoning_content = "" - ongoing_content = "" - - async for line in response.body_iterator: - line = line.decode("utf-8") if isinstance(line, bytes) else line - data = line - - # Skip empty lines - if not data.strip(): - continue - - # "data:" is the prefix for each event - if not data.startswith("data:"): - continue - - # Remove the prefix - data = data[len("data:") :].strip() - - try: - data = json.loads(data) - - if "selected_model_id" in data: - Chats.upsert_message_to_chat_by_id_and_message_id( - metadata["chat_id"], - metadata["message_id"], - { - "selectedModelId": data["selected_model_id"], - }, - ) - else: - value = ( - data.get("choices", [])[0] - .get("delta", {}) - .get("content") - ) - - if value: - content = f"{content}{value}" - - if detect_reasoning: - for tag in reasoning_tags: - start_tag = f"<{tag}>\n" - end_tag = f"\n" - - if start_tag in content: - # Remove the start tag - content = content.replace(start_tag, "") - ongoing_content = content - - reasoning_start_time = time.time() - reasoning_content = "" - - current_tag = tag - break - - if reasoning_start_time is not None: - # Remove the last value from the content - content = content[: -len(value)] - - reasoning_content += value - - end_tag = f"\n" - if end_tag in reasoning_content: - reasoning_end_time = time.time() - reasoning_duration = int( - reasoning_end_time - - reasoning_start_time - ) - reasoning_content = ( - reasoning_content.strip( - f"<{current_tag}>\n" - ) - .strip(end_tag) - .strip() - ) - - if reasoning_content: - reasoning_display_content = "\n".join( - ( - f"> {line}" - if not line.startswith(">") - else line - ) - for line in reasoning_content.splitlines() - ) - - # Format reasoning with
tag - content = f'{ongoing_content}
\nThought for {reasoning_duration} seconds\n{reasoning_display_content}\n
\n' - else: - content = "" - - reasoning_start_time = None - else: - - reasoning_display_content = "\n".join( - ( - f"> {line}" - if not line.startswith(">") - else line - ) - for line in reasoning_content.splitlines() - ) - - # Show ongoing thought process - content = f'{ongoing_content}
\nThinking…\n{reasoning_display_content}\n
\n' - - if ENABLE_REALTIME_CHAT_SAVE: - # Save message in the database - Chats.upsert_message_to_chat_by_id_and_message_id( - metadata["chat_id"], - metadata["message_id"], - { - "content": content, - }, - ) - else: - data = { - "content": content, - } - - await event_emitter( - { - "type": "chat:completion", - "data": data, - } - ) - except Exception as e: - done = "data: [DONE]" in line - if done: - pass - else: + # Skip empty lines + if not data.strip(): continue + # "data:" is the prefix for each event + if not data.startswith("data:"): + continue + + # Remove the prefix + data = data[len("data:") :].strip() + + try: + data = json.loads(data) + + if "selected_model_id" in data: + model_id = data["selected_model_id"] + Chats.upsert_message_to_chat_by_id_and_message_id( + metadata["chat_id"], + metadata["message_id"], + { + "selectedModelId": model_id, + }, + ) + else: + choices = data.get("choices", []) + if not choices: + continue + + value = choices[0].get("delta", {}).get("content") + + if value: + content = f"{content}{value}" + content_blocks[-1]["content"] = ( + content_blocks[-1]["content"] + value + ) + + if DETECT_REASONING: + content, content_blocks, _ = ( + tag_content_handler( + "reasoning", + reasoning_tags, + content, + content_blocks, + ) + ) + + if DETECT_CODE_INTERPRETER: + content, content_blocks, end = ( + tag_content_handler( + "code_interpreter", + code_interpreter_tags, + content, + content_blocks, + ) + ) + + if end: + break + + if ENABLE_REALTIME_CHAT_SAVE: + # Save message in the database + Chats.upsert_message_to_chat_by_id_and_message_id( + metadata["chat_id"], + metadata["message_id"], + { + "content": serialize_content_blocks( + content_blocks + ), + }, + ) + else: + data = { + "content": serialize_content_blocks( + content_blocks + ), + } + + await event_emitter( + { + "type": "chat:completion", + "data": data, + } + ) + except Exception as e: + done = "data: [DONE]" in line + if done: + pass + else: + log.debug("Error: ", e) + continue + + # Clean up the last text block + if content_blocks[-1]["type"] == "text": + content_blocks[-1]["content"] = content_blocks[-1][ + "content" + ].strip() + + if not content_blocks[-1]["content"]: + content_blocks.pop() + + if response.background: + await response.background() + + await stream_body_handler(response) + + MAX_RETRIES = 5 + retries = 0 + + while ( + content_blocks[-1]["type"] == "code_interpreter" + and retries < MAX_RETRIES + ): + retries += 1 + log.debug(f"Attempt count: {retries}") + + try: + if content_blocks[-1]["attributes"].get("type") == "code": + output = await event_caller( + { + "type": "execute:python", + "data": { + "id": str(uuid4()), + "code": content_blocks[-1]["content"], + }, + } + ) + except Exception as e: + output = str(e) + + content_blocks[-1]["output"] = output + content_blocks.append( + { + "type": "text", + "content": "", + } + ) + + await event_emitter( + { + "type": "chat:completion", + "data": { + "content": serialize_content_blocks(content_blocks), + }, + } + ) + + try: + res = await generate_chat_completion( + request, + { + "model": model_id, + "stream": True, + "messages": [ + *form_data["messages"], + { + "role": "assistant", + "content": serialize_content_blocks( + content_blocks, raw=True + ), + }, + ], + }, + user, + ) + + if isinstance(res, StreamingResponse): + await stream_body_handler(res) + else: + break + except Exception as e: + log.debug(e) + break + title = Chats.get_chat_title_by_id(metadata["chat_id"]) - data = {"done": True, "content": content, "title": title} + data = { + "done": True, + "content": serialize_content_blocks(content_blocks), + "title": title, + } if not ENABLE_REALTIME_CHAT_SAVE: # Save message in the database @@ -1243,7 +1436,7 @@ async def process_chat_response( metadata["chat_id"], metadata["message_id"], { - "content": content, + "content": serialize_content_blocks(content_blocks), }, ) @@ -1280,7 +1473,7 @@ async def process_chat_response( metadata["chat_id"], metadata["message_id"], { - "content": content, + "content": serialize_content_blocks(content_blocks), }, ) diff --git a/backend/open_webui/utils/misc.py b/backend/open_webui/utils/misc.py index 71e1cbbfb..c2a3945d0 100644 --- a/backend/open_webui/utils/misc.py +++ b/backend/open_webui/utils/misc.py @@ -131,6 +131,25 @@ def add_or_update_system_message(content: str, messages: list[dict]): return messages +def add_or_update_user_message(content: str, messages: list[dict]): + """ + Adds a new user message at the end of the messages list + or updates the existing user message at the end. + + :param msg: The message to be added or appended. + :param messages: The list of message dictionaries. + :return: The updated list of message dictionaries. + """ + + if messages and messages[-1].get("role") == "user": + messages[-1]["content"] = f"{messages[-1]['content']}\n{content}" + else: + # Insert at the end + messages.append({"role": "user", "content": content}) + + return messages + + def append_or_update_assistant_message(content: str, messages: list[dict]): """ Adds a new assistant message at the end of the messages list diff --git a/package-lock.json b/package-lock.json index c98e814d9..57bcf2906 100644 --- a/package-lock.json +++ b/package-lock.json @@ -56,7 +56,7 @@ "prosemirror-schema-list": "^1.4.1", "prosemirror-state": "^1.4.3", "prosemirror-view": "^1.34.3", - "pyodide": "^0.26.1", + "pyodide": "^0.27.2", "socket.io-client": "^4.2.0", "sortablejs": "^1.15.2", "svelte-sonner": "^0.3.19", @@ -9366,9 +9366,10 @@ } }, "node_modules/pyodide": { - "version": "0.26.1", - "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.26.1.tgz", - "integrity": "sha512-P+Gm88nwZqY7uBgjbQH8CqqU6Ei/rDn7pS1t02sNZsbyLJMyE2OVXjgNuqVT3KqYWnyGREUN0DbBUCJqk8R0ew==", + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.27.2.tgz", + "integrity": "sha512-sfA2kiUuQVRpWI4BYnU3sX5PaTTt/xrcVEmRzRcId8DzZXGGtPgCBC0gCqjUTUYSa8ofPaSjXmzESc86yvvCHg==", + "license": "Apache-2.0", "dependencies": { "ws": "^8.5.0" }, diff --git a/package.json b/package.json index a2463d9e3..a28091668 100644 --- a/package.json +++ b/package.json @@ -98,7 +98,7 @@ "prosemirror-schema-list": "^1.4.1", "prosemirror-state": "^1.4.3", "prosemirror-view": "^1.34.3", - "pyodide": "^0.26.1", + "pyodide": "^0.27.2", "socket.io-client": "^4.2.0", "sortablejs": "^1.15.2", "svelte-sonner": "^0.3.19", diff --git a/src/lib/apis/index.ts b/src/lib/apis/index.ts index 22d3c6ba4..c7fd78819 100644 --- a/src/lib/apis/index.ts +++ b/src/lib/apis/index.ts @@ -880,13 +880,14 @@ export const getChangelog = async () => { return res; }; -export const getVersionUpdates = async () => { +export const getVersionUpdates = async (token: string) => { let error = null; const res = await fetch(`${WEBUI_BASE_URL}/api/version/updates`, { method: 'GET', headers: { - 'Content-Type': 'application/json' + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}` } }) .then(async (res) => { diff --git a/src/lib/components/admin/Users/UserList.svelte b/src/lib/components/admin/Users/UserList.svelte index ce730571f..b309eec70 100644 --- a/src/lib/components/admin/Users/UserList.svelte +++ b/src/lib/components/admin/Users/UserList.svelte @@ -6,9 +6,9 @@ import dayjs from 'dayjs'; import relativeTime from 'dayjs/plugin/relativeTime'; - import localizedFormat from 'dayjs/plugin/localizedFormat'; + import localizedFormat from 'dayjs/plugin/localizedFormat'; dayjs.extend(relativeTime); - dayjs.extend(localizedFormat); + dayjs.extend(localizedFormat); import { toast } from 'svelte-sonner'; diff --git a/src/lib/components/admin/Users/UserList/EditUserModal.svelte b/src/lib/components/admin/Users/UserList/EditUserModal.svelte index 9b2edb407..1599a7687 100644 --- a/src/lib/components/admin/Users/UserList/EditUserModal.svelte +++ b/src/lib/components/admin/Users/UserList/EditUserModal.svelte @@ -7,11 +7,11 @@ import { updateUserById } from '$lib/apis/users'; import Modal from '$lib/components/common/Modal.svelte'; - import localizedFormat from 'dayjs/plugin/localizedFormat'; + import localizedFormat from 'dayjs/plugin/localizedFormat'; const i18n = getContext('i18n'); const dispatch = createEventDispatcher(); - dayjs.extend(localizedFormat); + dayjs.extend(localizedFormat); export let show = false; export let selectedUser; diff --git a/src/lib/components/admin/Users/UserList/UserChatsModal.svelte b/src/lib/components/admin/Users/UserList/UserChatsModal.svelte index a95df8291..d1e3a8623 100644 --- a/src/lib/components/admin/Users/UserList/UserChatsModal.svelte +++ b/src/lib/components/admin/Users/UserList/UserChatsModal.svelte @@ -2,10 +2,10 @@ import { toast } from 'svelte-sonner'; import dayjs from 'dayjs'; import { getContext, createEventDispatcher } from 'svelte'; - import localizedFormat from 'dayjs/plugin/localizedFormat'; + import localizedFormat from 'dayjs/plugin/localizedFormat'; const dispatch = createEventDispatcher(); - dayjs.extend(localizedFormat); + dayjs.extend(localizedFormat); import { getChatListByUserId, deleteChatById, getArchivedChatList } from '$lib/apis/chats'; diff --git a/src/lib/components/channel/Messages/Message.svelte b/src/lib/components/channel/Messages/Message.svelte index a84077823..e0f3497fa 100644 --- a/src/lib/components/channel/Messages/Message.svelte +++ b/src/lib/components/channel/Messages/Message.svelte @@ -3,12 +3,12 @@ import relativeTime from 'dayjs/plugin/relativeTime'; import isToday from 'dayjs/plugin/isToday'; import isYesterday from 'dayjs/plugin/isYesterday'; - import localizedFormat from 'dayjs/plugin/localizedFormat'; + import localizedFormat from 'dayjs/plugin/localizedFormat'; dayjs.extend(relativeTime); dayjs.extend(isToday); dayjs.extend(isYesterday); - dayjs.extend(localizedFormat); + dayjs.extend(localizedFormat); import { getContext, onMount } from 'svelte'; const i18n = getContext>('i18n'); @@ -155,9 +155,7 @@ @@ -176,9 +174,7 @@ diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index 3cfb61880..89fedb919 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -116,6 +116,7 @@ let selectedToolIds = []; let imageGenerationEnabled = false; + let codeInterpreterEnabled = false; let webSearchEnabled = false; let chat = null; @@ -826,14 +827,14 @@ } }; - const createMessagesList = (responseMessageId) => { + const createMessagesList = (history, responseMessageId) => { if (responseMessageId === null) { return []; } const message = history.messages[responseMessageId]; if (message?.parentId) { - return [...createMessagesList(message.parentId), message]; + return [...createMessagesList(history, message.parentId), message]; } else { return [message]; } @@ -895,7 +896,7 @@ }; const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => { - const messages = createMessagesList(responseMessageId); + const messages = createMessagesList(history, responseMessageId); const res = await chatAction(localStorage.token, actionId, { model: modelId, @@ -964,7 +965,7 @@ const modelId = selectedModels[0]; const model = $models.filter((m) => m.id === modelId).at(0); - const messages = createMessagesList(history.currentId); + const messages = createMessagesList(history, history.currentId); const parentMessage = messages.length !== 0 ? messages.at(-1) : null; const userMessageId = uuidv4(); @@ -1209,7 +1210,12 @@ ); history.messages[message.id] = message; - await chatCompletedHandler(chatId, message.model, message.id, createMessagesList(message.id)); + await chatCompletedHandler( + chatId, + message.model, + message.id, + createMessagesList(history, message.id) + ); } console.log(data); @@ -1225,7 +1231,7 @@ const submitPrompt = async (userPrompt, { _raw = false } = {}) => { console.log('submitPrompt', userPrompt, $chatId); - const messages = createMessagesList(history.currentId); + const messages = createMessagesList(history, history.currentId); const _selectedModels = selectedModels.map((modelId) => $models.map((m) => m.id).includes(modelId) ? modelId : '' ); @@ -1324,7 +1330,7 @@ saveSessionSelectedModels(); - await sendPrompt(userPrompt, userMessageId, { newChat: true }); + sendPrompt(userPrompt, userMessageId, { newChat: true }); }; const sendPrompt = async ( @@ -1332,6 +1338,8 @@ parentId: string, { modelId = null, modelIdx = null, newChat = false } = {} ) => { + const _chatId = JSON.parse(JSON.stringify($chatId)); + // Create new chat if newChat is true and first user message if ( newChat && @@ -1340,7 +1348,7 @@ ) { await initChatHandler(); } else { - await saveChatHandler($chatId); + await saveChatHandler(_chatId); } // If modelId is provided, use it, else use selected model @@ -1389,19 +1397,18 @@ await tick(); // Save chat after all messages have been created - await saveChatHandler($chatId); + saveChatHandler(_chatId); - const _chatId = JSON.parse(JSON.stringify($chatId)); await Promise.all( selectedModelIds.map(async (modelId, _modelIdx) => { console.log('modelId', modelId); const model = $models.filter((m) => m.id === modelId).at(0); if (model) { - const messages = createMessagesList(parentId); + const messages = createMessagesList(history, parentId); // If there are image files, check if model is vision capable const hasImages = messages.some((message) => - message.files?.some((file) => file.type === 'image') + message?.files?.some((file) => file.type === 'image') ); if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) { @@ -1443,7 +1450,7 @@ const chatEventEmitter = await getChatEventEmitter(model.id, _chatId); scrollToBottom(); - await sendPromptSocket(model, responseMessageId, _chatId); + await sendPromptSocket(history, model, responseMessageId, _chatId); if (chatEventEmitter) clearInterval(chatEventEmitter); } else { @@ -1456,7 +1463,7 @@ chats.set(await getChatList(localStorage.token, $currentChatPage)); }; - const sendPromptSocket = async (model, responseMessageId, _chatId) => { + const sendPromptSocket = async (history, model, responseMessageId, _chatId) => { const responseMessage = history.messages[responseMessageId]; const userMessage = history.messages[responseMessage.parentId]; @@ -1506,7 +1513,7 @@ }` } : undefined, - ...createMessagesList(responseMessageId).map((message) => ({ + ...createMessagesList(history, responseMessageId).map((message) => ({ ...message, content: removeDetailsWithReasoning(message.content) })) @@ -1562,6 +1569,7 @@ features: { image_generation: imageGenerationEnabled, + code_interpreter: codeInterpreterEnabled, web_search: webSearchEnabled }, variables: { @@ -1740,7 +1748,7 @@ .at(0); if (model) { - await sendPromptSocket(model, responseMessage.id, _chatId); + await sendPromptSocket(history, model, responseMessage.id, _chatId); } } }; @@ -1801,7 +1809,7 @@ system: $settings.system ?? undefined, params: params, history: history, - messages: createMessagesList(history.currentId), + messages: createMessagesList(history, history.currentId), tags: [], timestamp: Date.now() }); @@ -1823,7 +1831,7 @@ chat = await updateChatById(localStorage.token, _chatId, { models: selectedModels, history: history, - messages: createMessagesList(history.currentId), + messages: createMessagesList(history, history.currentId), params: params, files: chatFiles }); @@ -1931,7 +1939,7 @@ {/if}
- {#if $settings?.landingPageMode === 'chat' || createMessagesList(history.currentId).length > 0} + {#if $settings?.landingPageMode === 'chat' || createMessagesList(history, history.currentId).length > 0} -
{$i18n.t('Search the web')}
+
{$i18n.t('Search the web')}
{/if} @@ -660,6 +677,7 @@
{/if} + + {#if showWebSearch}