From 8e6cd7eca8f917968247e7e51cb9e536e1112efd Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 16 Feb 2024 18:42:34 +0530 Subject: [PATCH 1/4] Add streamDownload function and handle model pull in background.ts --- src/background.ts | 91 +++++++++++++++++++++++++++++++++++++ src/contents/ollama-pull.ts | 54 ++++++++++++++++++++++ 2 files changed, 145 insertions(+) create mode 100644 src/contents/ollama-pull.ts diff --git a/src/background.ts b/src/background.ts index add4d6b..43b8dde 100644 --- a/src/background.ts +++ b/src/background.ts @@ -1,5 +1,80 @@ +import { getOllamaURL, isOllamaRunning } from "~services/ollama" + export {} +const progressHuman = (completed: number, total: number) => { + return ((completed / total) * 100).toFixed(0) + "%" +} + +const clearBadge = () => { + chrome.action.setBadgeText({ text: "" }) + chrome.action.setTitle({ title: "" }) +} + +const streamDownload = async (url: string, model: string) => { + url += "/api/pull" + const response = await fetch(url, { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ model, stream: true }) + }) + + const reader = response.body?.getReader() + + const decoder = new TextDecoder() + + let isSuccess = true + while (true) { + const { done, value } = await reader.read() + + if (done) { + break + } + + const text = decoder.decode(value) + try { + const json = JSON.parse(text.trim()) as { + status: string + total?: number + completed?: number + } + if (json.total && json.completed) { + chrome.action.setBadgeText({ + text: progressHuman(json.completed, json.total) + }) + chrome.action.setBadgeBackgroundColor({ color: "#0000FF" }) + } else { + chrome.action.setBadgeText({ text: "🏋️‍♂️" }) + chrome.action.setBadgeBackgroundColor({ color: "#FFFFFF" }) + } + + chrome.action.setTitle({ title: json.status }) + + if (json.status === "success") { + isSuccess = true + } + } catch (e) { + console.error(e) + } + } + + if (isSuccess) { + chrome.action.setBadgeText({ text: "✅" }) + chrome.action.setBadgeBackgroundColor({ color: "#00FF00" }) + chrome.action.setTitle({ title: "Model pulled successfully" }) + } else { + chrome.action.setBadgeText({ text: "❌" }) + chrome.action.setBadgeBackgroundColor({ color: "#FF0000" }) + chrome.action.setTitle({ title: "Model pull failed" }) + } + + setTimeout(() => { + clearBadge() + }, 5000) +} + chrome.runtime.onMessage.addListener(async (message) => { if (message.type === "sidepanel") { chrome.tabs.query({ active: true, currentWindow: true }, async (tabs) => { @@ -8,6 +83,22 @@ chrome.runtime.onMessage.addListener(async (message) => { tabId: tab.id }) }) + } else if (message.type === "pull_model") { + const ollamaURL = await getOllamaURL() + + const isRunning = await isOllamaRunning() + + if (!isRunning) { + chrome.action.setBadgeText({ text: "E" }) + chrome.action.setBadgeBackgroundColor({ color: "#FF0000" }) + chrome.action.setTitle({ title: "Ollama is not running" }) + setTimeout(() => { + clearBadge() + }, 5000) + } + console.log("Pulling model", message.modelName) + + await streamDownload(ollamaURL, message.modelName) } }) diff --git a/src/contents/ollama-pull.ts b/src/contents/ollama-pull.ts new file mode 100644 index 0000000..e8c2336 --- /dev/null +++ b/src/contents/ollama-pull.ts @@ -0,0 +1,54 @@ +import type { PlasmoCSConfig } from "plasmo" + +export const config: PlasmoCSConfig = { + matches: ["*://ollama.com/library/*"], + all_frames: true +} + +const downloadModel = async (modelName: string) => { + const ok = confirm( + `[Page Assist Extension] Do you want to pull ${modelName} model? This has nothing to do with Ollama.com website. The model will be pulled locally once you confirm.` + ) + if (ok) { + alert( + `[Page Assist Extension] Pulling ${modelName} model. For more details, check the extension icon.` + ) + + await chrome.runtime.sendMessage({ + type: "pull_model", + modelName + }) + return true + } + return false +} + +const downloadSVG = ` + + +` +const codeDiv = document.querySelectorAll("div.language-none") + +for (let i = 0; i < codeDiv.length; i++) { + const button = codeDiv[i].querySelector("button") + const command = codeDiv[i].querySelector("input") + if (button && command) { + const newButton = document.createElement("button") + newButton.innerHTML = downloadSVG + newButton.className = `border-l ${button.className}` + newButton.id = `download-${i}-pageassist` + const modelName = command?.value + .replace("ollama run", "") + .replace("ollama pull", "") + .trim() + newButton.addEventListener("click", () => { + downloadModel(modelName) + }) + + const span = document.createElement("span") + span.title = "Download model via Page Assist" + span.appendChild(newButton) + + button.parentNode.appendChild(span) + } +} From d763c286c13e3c69d9aa30bd81c767611be40bd1 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 17 Feb 2024 00:01:07 +0530 Subject: [PATCH 2/4] Remove unused dependencies and update routing --- package.json | 3 +- src/components/Common/Markdown.tsx | 4 +- src/components/Option/Layout.tsx | 90 ++++++++----------- .../Option/Playground/PlaygroundNewChat.tsx | 1 - src/hooks/useMessageOption.tsx | 4 + src/routes/index.tsx | 2 + src/routes/option-index.tsx | 2 - src/routes/option-model.tsx | 9 ++ yarn.lock | 25 ------ 9 files changed, 57 insertions(+), 83 deletions(-) create mode 100644 src/routes/option-model.tsx diff --git a/package.json b/package.json index 0985d1f..c29ecab 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,6 @@ }, "dependencies": { "@ant-design/cssinjs": "^1.18.4", - "@headlessui/react": "^1.7.18", "@heroicons/react": "^2.1.1", "@langchain/community": "^0.0.21", "@langchain/core": "^0.1.22", @@ -81,4 +80,4 @@ "contextMenus" ] } -} \ No newline at end of file +} diff --git a/src/components/Common/Markdown.tsx b/src/components/Common/Markdown.tsx index 688c6f2..c691ffb 100644 --- a/src/components/Common/Markdown.tsx +++ b/src/components/Common/Markdown.tsx @@ -24,8 +24,8 @@ export default function Markdown({ message }: { message: string }) {
+ {pathname !== "/" && ( +
+ + + +
+ )}
-
- - - +
+ + + + + + + + + + +
+
+
+ + {status === "pending" && } + + {status === "success" && ( + ( + + {`${text?.slice(0, 5)}...${text?.slice(-4)}`} + + ) + }, + { + title: "Modified", + dataIndex: "modified_at", + key: "modified_at", + render: (text: string) => dayjs(text).fromNow(true) + }, + { + title: "Size", + dataIndex: "size", + key: "size", + render: (text: number) => bytePerSecondFormatter(text) + }, + { + title: "Action", + render: (_, record) => ( +
+ + + + + + +
+ ) + } + ]} + expandable={{ + expandedRowRender: (record) => ( +
+ ), + defaultExpandAllRows: false + }} + bordered + dataSource={data} + rowKey={(record) => `${record.model}-${record.digest}`} + /> + )} + + + setOpen(false)}> +
pullOllamaModel(values.model))}> + + + + +
+ + ) +} diff --git a/src/components/Option/Playground/PlaygroundForm.tsx b/src/components/Option/Playground/PlaygroundForm.tsx index 9366e34..291d90c 100644 --- a/src/components/Option/Playground/PlaygroundForm.tsx +++ b/src/components/Option/Playground/PlaygroundForm.tsx @@ -7,7 +7,7 @@ import XMarkIcon from "@heroicons/react/24/outline/XMarkIcon" import { toBase64 } from "~libs/to-base64" import { useMessageOption } from "~hooks/useMessageOption" import { Tooltip } from "antd" -import { MicIcon, MicOffIcon } from "lucide-react" +import { MicIcon, StopCircleIcon } from "lucide-react" import { Image } from "antd" import { useSpeechRecognition } from "~hooks/useSpeechRecognition" @@ -60,8 +60,13 @@ export const PlaygroundForm = ({ dropedFile }: Props) => { useDynamicTextareaSize(textareaRef, form.values.message, 300) - const { onSubmit, selectedModel, chatMode, speechToTextLanguage } = - useMessageOption() + const { + onSubmit, + selectedModel, + chatMode, + speechToTextLanguage, + stopStreamingRequest + } = useMessageOption() const { isListening, start, stop, transcript } = useSpeechRecognition() @@ -208,23 +213,34 @@ export const PlaygroundForm = ({ dropedFile }: Props) => { - + {!isSending ? ( + + ) : ( + + + + )} diff --git a/src/components/Option/Playground/PlaygroundMessage.tsx b/src/components/Option/Playground/PlaygroundMessage.tsx index 1882558..6d5ca19 100644 --- a/src/components/Option/Playground/PlaygroundMessage.tsx +++ b/src/components/Option/Playground/PlaygroundMessage.tsx @@ -25,8 +25,7 @@ export const PlaygroundMessage = (props: Props) => { }, [isBtnPressed]) return ( -
+
diff --git a/src/hooks/useMessageOption.tsx b/src/hooks/useMessageOption.tsx index f22d27b..745b0fe 100644 --- a/src/hooks/useMessageOption.tsx +++ b/src/hooks/useMessageOption.tsx @@ -12,6 +12,7 @@ import { import { useStoreMessageOption } from "~store/option" import { saveHistory, saveMessage } from "~libs/db" import { useNavigate } from "react-router-dom" +import { notification } from "antd" export type BotResponse = { bot: { @@ -253,22 +254,58 @@ export const useMessageOption = () => { setIsProcessing(false) } catch (e) { + console.log(e) + + if (e?.name === "AbortError") { + newMessage[appendingIndex].message = newMessage[ + appendingIndex + ].message.slice(0, -1) + + setHistory([ + ...history, + { + role: "user", + content: message, + image + }, + { + role: "assistant", + content: newMessage[appendingIndex].message + } + ]) + + if (historyId) { + await saveMessage(historyId, selectedModel, "user", message, [image]) + await saveMessage( + historyId, + selectedModel, + "assistant", + newMessage[appendingIndex].message, + [] + ) + } else { + const newHistoryId = await saveHistory(message) + await saveMessage(newHistoryId.id, selectedModel, "user", message, [ + image + ]) + await saveMessage( + newHistoryId.id, + selectedModel, + "assistant", + newMessage[appendingIndex].message, + [] + ) + setHistoryId(newHistoryId.id) + } + } else { + notification.error({ + message: "Error", + description: e?.message || "Something went wrong" + }) + } + setIsProcessing(false) setStreaming(false) - - setMessages([ - ...messages, - { - isBot: true, - name: selectedModel, - message: `Something went wrong. Check out the following logs: - \`\`\` - ${e?.message} - \`\`\` - `, - sources: [] - } - ]) } } diff --git a/src/libs/byte-formater.ts b/src/libs/byte-formater.ts new file mode 100644 index 0000000..3499487 --- /dev/null +++ b/src/libs/byte-formater.ts @@ -0,0 +1,23 @@ +const UNITS = [ + "byte", + "kilobyte", + "megabyte", + "gigabyte", + "terabyte", + "petabyte" +] + +const getValueAndUnit = (n: number) => { + const i = n == 0 ? 0 : Math.floor(Math.log(n) / Math.log(1024)) + const value = n / Math.pow(1024, i) + return { value, unit: UNITS[i] } +} + +export const bytePerSecondFormatter = (n: number) => { + const { unit, value } = getValueAndUnit(n) + return new Intl.NumberFormat("en", { + notation: "compact", + style: "unit", + unit + }).format(value) +} diff --git a/src/routes/option-model.tsx b/src/routes/option-model.tsx index 1cbbf87..3f5a020 100644 --- a/src/routes/option-model.tsx +++ b/src/routes/option-model.tsx @@ -1,9 +1,10 @@ import OptionLayout from "~components/Option/Layout" +import { ModelsBody } from "~components/Option/Models" export const OptionModal = () => { return ( - yo + ) } diff --git a/src/services/ollama.ts b/src/services/ollama.ts index 5ca0fcd..fb256ed 100644 --- a/src/services/ollama.ts +++ b/src/services/ollama.ts @@ -53,6 +53,47 @@ export const isOllamaRunning = async () => { } } +export const getAllModels = async () => { + const baseUrl = await getOllamaURL() + const response = await fetch(`${cleanUrl(baseUrl)}/api/tags`) + if (!response.ok) { + throw new Error(response.statusText) + } + const json = await response.json() + + return json.models as { + name: string + model: string + modified_at: string + size: number + digest: string + details: { + parent_model: string + format: string + family: string + families: string[] + parameter_size: string + quantization_level: string + } + }[] +} + +export const deleteModel= async (model: string) => { + const baseUrl = await getOllamaURL() + const response = await fetch(`${cleanUrl(baseUrl)}/api/delete`, { + method: "DELETE", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify({ name: model }) + }) + + if (!response.ok) { + throw new Error(response.statusText) + } + return response.json() +} + export const fetchModels = async () => { try { const baseUrl = await getOllamaURL() @@ -65,6 +106,17 @@ export const fetchModels = async () => { return json.models as { name: string model: string + modified_at: string + size: number + digest: string + details: { + parent_model: string + format: string + family: string + families: string[] + parameter_size: string + quantization_level: string + } }[] } catch (e) { console.error(e) From 7b4158763f151428a698a9c13df70dc4da9b8462 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 18 Feb 2024 13:59:33 +0530 Subject: [PATCH 4/4] Update package.json version to 1.0.2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 40a03f8..38c75c0 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "pageassist", "displayName": "Page Assist - A Web UI for Local AI Models", - "version": "1.0.1", + "version": "1.0.2", "description": "Use your locally running AI models to assist you in your web browsing.", "author": "n4ze3m", "scripts": {