"use client"; import { Id, ToastContainer, toast } from "react-toastify"; import "react-toastify/dist/ReactToastify.css"; import { useRef, useState, useEffect } from "react"; import type { FormEvent } from "react"; import { ChatMessageBubble } from "@/components/ChatMessageBubble"; import { ChatWindowMessage } from "@/schema/ChatWindowMessage"; export function ChatWindow(props: { placeholder?: string }) { const { placeholder } = props; const [messages, setMessages] = useState([]); const [input, setInput] = useState(""); const [isLoading, setIsLoading] = useState(true); const [selectedURL, setSelectedURL] = useState(null); const [firecrawlApiKey, setFirecrawlApiKey] = useState(""); const [readyToChat, setReadyToChat] = useState(false); const initProgressToastId = useRef(null); const titleText = "Local Chat With Websites"; const emoji = "🔥"; const worker = useRef(null); async function queryStore(messages: ChatWindowMessage[]) { if (!worker.current) { throw new Error("Worker is not ready."); } return new ReadableStream({ start(controller) { if (!worker.current) { controller.close(); return; } const ollamaConfig = { baseUrl: "http://localhost:11435", temperature: 0.3, model: "mistral", }; const payload: Record = { messages, modelProvider: "ollama", modelConfig: ollamaConfig, }; if ( process.env.NEXT_PUBLIC_LANGCHAIN_TRACING_V2 === "true" && process.env.NEXT_PUBLIC_LANGCHAIN_API_KEY !== undefined ) { console.warn( "[WARNING]: You have set your LangChain API key publicly. This should only be done in local devlopment - remember to remove it before deploying!", ); payload.DEV_LANGCHAIN_TRACING = { LANGCHAIN_TRACING_V2: "true", LANGCHAIN_API_KEY: process.env.NEXT_PUBLIC_LANGCHAIN_API_KEY, LANGCHAIN_PROJECT: process.env.NEXT_PUBLIC_LANGCHAIN_PROJECT, }; } worker.current?.postMessage(payload); const onMessageReceived = async (e: any) => { switch (e.data.type) { case "log": console.log(e.data); break; case "init_progress": if (initProgressToastId.current === null) { initProgressToastId.current = toast( "Loading model weights... This may take a while", { progress: e.data.data.progress || 0.01, theme: "dark", }, ); } else { if (e.data.data.progress === 1) { await new Promise((resolve) => setTimeout(resolve, 2000)); } toast.update(initProgressToastId.current, { progress: e.data.data.progress || 0.01, }); } break; case "chunk": controller.enqueue(e.data.data); break; case "error": worker.current?.removeEventListener("message", onMessageReceived); console.log(e.data.error); const error = new Error(e.data.error); controller.error(error); break; case "complete": worker.current?.removeEventListener("message", onMessageReceived); controller.close(); break; } }; worker.current?.addEventListener("message", onMessageReceived); }, }); } async function sendMessage(e: FormEvent) { e.preventDefault(); if (isLoading || !input) { return; } const initialInput = input; const initialMessages = [...messages]; const newMessages = [ ...initialMessages, { role: "human" as const, content: input }, ]; setMessages(newMessages); setIsLoading(true); setInput(""); try { const stream = await queryStore(newMessages); const reader = stream.getReader(); let chunk = await reader.read(); const aiResponseMessage: ChatWindowMessage = { content: "", role: "ai" as const, }; setMessages([...newMessages, aiResponseMessage]); while (!chunk.done) { aiResponseMessage.content = aiResponseMessage.content + chunk.value; setMessages([...newMessages, aiResponseMessage]); chunk = await reader.read(); } setIsLoading(false); } catch (e: any) { setMessages(initialMessages); setIsLoading(false); setInput(initialInput); toast(`There was an issue with querying your website: ${e.message}`, { theme: "dark", }); } } // We use the `useEffect` hook to set up the worker as soon as the `App` component is mounted. useEffect(() => { if (!worker.current) { // Create the worker if it does not yet exist. worker.current = new Worker( new URL("../app/worker.ts", import.meta.url), { type: "module", }, ); setIsLoading(false); } }, []); async function embedWebsite(e: FormEvent) { console.log(e); console.log(selectedURL); console.log(firecrawlApiKey); e.preventDefault(); // const reader = new FileReader(); if (selectedURL === null) { toast(`You must enter a URL to embed.`, { theme: "dark", }); return; } setIsLoading(true); worker.current?.postMessage({ url: selectedURL, firecrawlApiKey: firecrawlApiKey, }); const onMessageReceived = (e: any) => { switch (e.data.type) { case "log": console.log(e.data); break; case "error": worker.current?.removeEventListener("message", onMessageReceived); setIsLoading(false); console.log(e.data.error); toast(`There was an issue embedding your website: ${e.data.error}`, { theme: "dark", }); break; case "complete": worker.current?.removeEventListener("message", onMessageReceived); setIsLoading(false); setReadyToChat(true); toast( `Embedding successful! Now try asking a question about your website.`, { theme: "dark", }, ); break; } }; worker.current?.addEventListener("message", onMessageReceived); } const chooseDataComponent = ( <>

{emoji} Local Chat With Websites {emoji}

  • 🏡 Welcome to the Local Web Chatbot!



    This is a direct fork of{" "} Jacob Lee's fully local PDF chatbot {" "} replacing the chat with PDF functionality with website support. It is a simple chatbot that allows you to ask questions about a website by embedding it and running queries against the vector store using a local LLM and embeddings.
  • ⚙️ The default LLM is Mistral-7B run locally by Ollama. You'll need to install{" "} the Ollama desktop app {" "} and run the following commands to give this site access to the locally running model:
                    $ OLLAMA_ORIGINS=https://webml-demo.vercel.app
                    OLLAMA_HOST=127.0.0.1:11435 ollama serve
                  

    Then, in another window:
                    $ OLLAMA_HOST=127.0.0.1:11435 ollama pull mistral
                  

    Additionally, you will need a Firecrawl API key for website embedding. Signing up at{" "} firecrawl.dev {" "} is easy and you get 500 credits free. Enter your API key into the box below the URL in the embedding form.
  • 🐙 Both this template and Jacob Lee's template are open source - you can see the source code and deploy your own version{" "} from the GitHub repo or Jacob's{" "} original GitHub repo !
  • 👇 Try embedding a website below, then asking questions! You can even turn off your WiFi after the website is scraped.
setSelectedURL(e.target.value)} > setFirecrawlApiKey(e.target.value)} >
); const chatInterfaceComponent = ( <>
{messages.length > 0 ? [...messages].reverse().map((m, i) => ( setMessages((previousMessages) => { const displayOrderedMessages = previousMessages.reverse(); return [ ...displayOrderedMessages.slice(0, i), ...displayOrderedMessages.slice(i + 1), ].reverse(); }) } > )) : ""}
setInput(e.target.value)} />
); return (

{emoji} {titleText}

{readyToChat ? chatInterfaceComponent : chooseDataComponent}
); }