Merge pull request #137 from n4ze3m/next

v1.1.15
This commit is contained in:
Muhammed Nazeem 2024-07-16 10:47:47 +05:30 committed by GitHub
commit abc9a0c0be
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 1371 additions and 1137 deletions

View File

@ -20,7 +20,8 @@
"status": {
"pending": "Pending",
"finished": "Finished",
"processing": "Processing"
"processing": "Processing",
"failed": "Failed"
},
"addKnowledge": "Add Knowledge",
"form": {

View File

@ -23,6 +23,12 @@
},
"hideCurrentChatModelSettings": {
"label": "Hide the current Chat Model Settings"
},
"restoreLastChatModel": {
"label": "Restore last used model for previous chats"
},
"sendNotificationAfterIndexing": {
"label": "Send Notification After Finishing Processing the Knowledge Base"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "Pendiente",
"finished": "Finalizado",
"processing": "Procesando"
"processing": "Procesando",
"failed": "Fallido"
},
"addKnowledge": "Agregar Conocimiento",
"form": {

View File

@ -23,6 +23,12 @@
},
"hideCurrentChatModelSettings": {
"label": "Ocultar Configuraciones del Modelo de Chat Actual"
},
"restoreLastChatModel": {
"label": "Restaurar el último modelo utilizado para chats anteriores"
},
"sendNotificationAfterIndexing": {
"label": "Enviar notificación después de terminar el procesamiento de la base de conocimientos"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "En attente",
"finished": "Terminé",
"processing": "Traitement"
"processing": "Traitement",
"failed": "Échoué"
},
"addKnowledge": "Ajouter des connaissances",
"form": {

View File

@ -23,6 +23,12 @@
},
"hideCurrentChatModelSettings": {
"label": "Masquer les paramètres actuels du modèle de chat"
},
"restoreLastChatModel": {
"label": "Restaurer le dernier modèle utilisé pour les conversations précédentes"
},
"sendNotificationAfterIndexing": {
"label": "Envoyer une notification après avoir terminé le traitement de la base de connaissances"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "In attesa",
"finished": "Completato",
"processing": "In corso"
"processing": "In corso",
"failed": "Fallito"
},
"addKnowledge": "Aggiungi Knowledge Base",
"form": {

View File

@ -23,6 +23,12 @@
},
"hideCurrentChatModelSettings": {
"label": "Nascondi le impostazioni correnti del modello Chat"
},
"restoreLastChatModel": {
"label": "Ripristina l'ultimo modello utilizzato per le chat precedenti"
},
"sendNotificationAfterIndexing": {
"label": "Inviare notifica dopo aver terminato l'elaborazione della base di conoscenza"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "保留中",
"finished": "完了",
"processing": "処理中"
"processing": "処理中",
"failed": "失敗"
},
"addKnowledge": "知識を追加",
"form": {

View File

@ -26,6 +26,12 @@
},
"hideCurrentChatModelSettings": {
"label": "現在のチャットモデル設定を非表示"
},
"restoreLastChatModel": {
"label": "以前のチャットで最後に使用したモデルを復元する"
},
"sendNotificationAfterIndexing": {
"label": "ナレッジベースの処理完了後に通知を送信"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "തീരുമാനിക്കാനുണ്ട്",
"finished": "പൂർത്തീകരിച്ചു",
"processing": "പ്രോസസ്സിംഗ്"
"processing": "പ്രോസസ്സിംഗ്",
"failed": "പരാജയപ്പെട്ടു"
},
"addKnowledge": "വിജ്ഞാനം ചേര്‍ക്കുക",
"form": {

View File

@ -26,6 +26,12 @@
},
"hideCurrentChatModelSettings": {
"label": "നിലവിലുള്ള ചാറ്റ് മോഡൽ ക്രമീകരണങ്ങൾ മറയ്ക്കുക"
},
"restoreLastChatModel": {
"label": "മുൻപത്തെ ചാറ്റുകൾക്കായി അവസാനം ഉപയോഗിച്ച മോഡൽ പുനഃസ്ഥാപിക്കുക"
},
"sendNotificationAfterIndexing": {
"label": "അറിവ് ശേഖരം പ്രോസസ്സ് ചെയ്ത് കഴിഞ്ഞതിന് ശേഷം അറിയിപ്പ് അയയ്ക്കുക"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "Pendente",
"finished": "Concluído",
"processing": "Processando"
"processing": "Processando",
"failed": "Falhou"
},
"addKnowledge": "Adicionar Conhecimento",
"form": {

View File

@ -23,6 +23,12 @@
},
"hideCurrentChatModelSettings": {
"label": "Ocultar as Configurações Atuais do Modelo de Chat"
},
"restoreLastChatModel": {
"label": "Restaurar o último modelo usado para conversas anteriores"
},
"sendNotificationAfterIndexing": {
"label": "Enviar notificação após concluir o processamento da base de conhecimento"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "Ожидание",
"finished": "Завершено",
"processing": "Обработка"
"processing": "Обработка",
"failed": "Не удалось"
},
"addKnowledge": "Добавить знание",
"form": {

View File

@ -23,6 +23,12 @@
},
"hideCurrentChatModelSettings": {
"label": "Скрыть текущие настройки модели чата"
},
"restoreLastChatModel": {
"label": "Восстановить последнюю использованную модель для предыдущих чатов"
},
"sendNotificationAfterIndexing": {
"label": "Отправить уведомление после завершения обработки базы знаний"
}
},
"webSearch": {

View File

@ -20,7 +20,8 @@
"status": {
"pending": "待定",
"finished": "已完成",
"processing": "处理中"
"processing": "处理中",
"failed": "失败"
},
"addKnowledge": "添加知识",
"form": {

View File

@ -26,6 +26,12 @@
},
"hideCurrentChatModelSettings": {
"label": "隐藏当前聊天模型设置"
},
"restoreLastChatModel": {
"label": "恢复上次用于之前聊天的模型"
},
"sendNotificationAfterIndexing": {
"label": "完成知识库处理后发送通知"
}
},
"webSearch": {

View File

@ -65,3 +65,63 @@
animation: gradient-border 3s infinite;
border-radius: 10px;
}
/* Hide scrollbar by default */
.custom-scrollbar {
scrollbar-width: none;
-ms-overflow-style: none;
}
.custom-scrollbar::-webkit-scrollbar {
display: none;
}
/* Show scrollbar on hover */
.custom-scrollbar:hover {
scrollbar-width: thin;
-ms-overflow-style: auto;
}
.custom-scrollbar:hover::-webkit-scrollbar {
display: block;
width: 8px;
}
/* Custom scrollbar styles for light theme */
.custom-scrollbar:hover::-webkit-scrollbar-track {
@apply bg-gray-50;
border-radius: 4px;
}
.custom-scrollbar:hover::-webkit-scrollbar-thumb {
@apply bg-gray-300;
border-radius: 4px;
transition: background 0.2s ease;
}
.custom-scrollbar:hover::-webkit-scrollbar-thumb:hover {
@apply bg-gray-400;
}
/* Custom scrollbar styles for dark theme */
.dark .custom-scrollbar:hover::-webkit-scrollbar-track {
background-color: #262626;
}
.dark .custom-scrollbar:hover::-webkit-scrollbar-thumb {
background-color: #404040;
}
.dark .custom-scrollbar:hover::-webkit-scrollbar-thumb:hover {
background-color: #525252;
}
/* For Firefox */
.custom-scrollbar {
scrollbar-color: theme('colors.gray.300') theme('colors.gray.50');
scrollbar-width: thin;
}
.dark .custom-scrollbar {
scrollbar-color: #404040 #262626;
}

View File

@ -37,8 +37,9 @@ export const KnowledgeSettings = () => {
const statusColor = {
finished: "green",
processing: "blue",
pending: "gray"
processing: "yellow",
pending: "gray",
failed: "red"
}
return (

View File

@ -78,6 +78,7 @@ export const Playground = () => {
dropState === "dragging" ? "bg-gray-100 dark:bg-gray-800 z-10" : ""
} bg-white dark:bg-[#171717]`}>
<PlaygroundChat />
<div className="flex flex-col items-center">
<div className="flex-grow">
<div className="w-full flex justify-center">

View File

@ -3,6 +3,8 @@ import { useMessageOption } from "~/hooks/useMessageOption"
import { PlaygroundEmpty } from "./PlaygroundEmpty"
import { PlaygroundMessage } from "~/components/Common/Playground/Message"
import { MessageSourcePopup } from "@/components/Common/Playground/MessageSourcePopup"
import { useSmartScroll } from "~/hooks/useSmartScroll"
import { ChevronDown } from "lucide-react"
export const PlaygroundChat = () => {
const {
@ -13,24 +15,24 @@ export const PlaygroundChat = () => {
editMessage,
ttsEnabled
} = useMessageOption()
const divRef = React.useRef<HTMLDivElement>(null)
const [isSourceOpen, setIsSourceOpen] = React.useState(false)
const [source, setSource] = React.useState<any>(null)
React.useEffect(() => {
if (divRef.current) {
divRef.current.scrollIntoView({ behavior: "smooth" })
}
})
const { containerRef, isAtBottom, scrollToBottom } = useSmartScroll(
messages,
streaming
)
return (
<>
{" "}
<div className="grow flex flex-col md:translate-x-0 transition-transform duration-300 ease-in-out">
<div
ref={containerRef}
className="custom-scrollbar grow flex flex-col md:translate-x-0 transition-transform duration-300 ease-in-out overflow-y-auto h-[calc(100vh-200px)]">
{messages.length === 0 && (
<div className="mt-32">
<PlaygroundEmpty />
</div>
)}
{/* {messages.length > 0 && <div className="w-full h-16 flex-shrink-0"></div>} */}
{messages.map((message, index) => (
<PlaygroundMessage
key={index}
@ -55,10 +57,18 @@ export const PlaygroundChat = () => {
/>
))}
{messages.length > 0 && (
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>
<div className="w-full h-16 flex-shrink-0"></div>
)}
<div ref={divRef} />
</div>
{!isAtBottom && (
<div className="fixed md:bottom-44 bottom-36 z-[9999999] left-0 right-0 flex justify-center">
<button
onClick={scrollToBottom}
className="bg-gray-100 dark:bg-gray-800 p-1 rounded-full shadow-md hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200">
<ChevronDown className="size-4 text-gray-600 dark:text-gray-300" />
</button>
</div>
)}
<MessageSourcePopup
open={isSourceOpen}
setOpen={setIsSourceOpen}

View File

@ -24,9 +24,17 @@ export const GeneralSettings = () => {
false
)
const [restoreLastChatModel, setRestoreLastChatModel] = useStorage(
"restoreLastChatModel",
false
)
const [hideCurrentChatModelSettings, setHideCurrentChatModelSettings] =
useStorage("hideCurrentChatModelSettings", false)
const [sendNotificationAfterIndexing, setSendNotificationAfterIndexing] =
useStorage("sendNotificationAfterIndexing", false)
const queryClient = useQueryClient()
const { mode, toggleDarkMode } = useDarkMode()
@ -107,6 +115,32 @@ export const GeneralSettings = () => {
onChange={(checked) => setHideCurrentChatModelSettings(checked)}
/>
</div>
<div className="flex flex-row justify-between">
<div className="inline-flex items-center gap-2">
<span className="text-gray-700 dark:text-neutral-50">
{t("generalSettings.settings.restoreLastChatModel.label")}
</span>
</div>
<Switch
checked={restoreLastChatModel}
onChange={(checked) => setRestoreLastChatModel(checked)}
/>
</div>
<div className="flex flex-row justify-between">
<div className="inline-flex items-center gap-2">
<span className="text-gray-700 dark:text-neutral-50">
{t("generalSettings.settings.sendNotificationAfterIndexing.label")}
</span>
</div>
<Switch
checked={sendNotificationAfterIndexing}
onChange={setSendNotificationAfterIndexing}
/>
</div>
<div className="flex flex-row justify-between">
<span className="text-gray-700 dark:text-neutral-50 ">
{t("generalSettings.settings.darkMode.label")}

View File

@ -11,14 +11,24 @@ import { useMessageOption } from "~/hooks/useMessageOption"
import { PencilIcon, Trash2 } from "lucide-react"
import { useNavigate } from "react-router-dom"
import { useTranslation } from "react-i18next"
import {
getLastUsedChatModel,
lastUsedChatModelEnabled
} from "@/services/model-settings"
type Props = {
onClose: () => void
}
export const Sidebar = ({ onClose }: Props) => {
const { setMessages, setHistory, setHistoryId, historyId, clearChat } =
useMessageOption()
const {
setMessages,
setHistory,
setHistoryId,
historyId,
clearChat,
setSelectedModel
} = useMessageOption()
const { t } = useTranslation(["option", "common"])
const client = useQueryClient()
const navigate = useNavigate()
@ -88,6 +98,13 @@ export const Sidebar = ({ onClose }: Props) => {
setHistoryId(chat.id)
setHistory(formatToChatHistory(history))
setMessages(formatToMessage(history))
const isLastUsedChatModel = await lastUsedChatModelEnabled()
if (isLastUsedChatModel) {
const currentChatModel = await getLastUsedChatModel(chat.id)
if (currentChatModel) {
setSelectedModel(currentChatModel)
}
}
navigate("/")
onClose()
}}>

View File

@ -1,6 +1,7 @@
import { getOllamaURL, isOllamaRunning } from "../services/ollama"
import { browser } from "wxt/browser"
import { setBadgeBackgroundColor, setBadgeText, setTitle } from "@/utils/action"
const progressHuman = (completed: number, total: number) => {
return ((completed / total) * 100).toFixed(0) + "%"
}
@ -75,11 +76,12 @@ const streamDownload = async (url: string, model: string) => {
clearBadge()
}, 5000)
}
export default defineBackground({
main() {
browser.runtime.onMessage.addListener(async (message) => {
if (message.type === "sidepanel") {
browser.sidebarAction.open()
await browser.sidebarAction.open()
} else if (message.type === "pull_model") {
const ollamaURL = await getOllamaURL()
@ -100,7 +102,7 @@ export default defineBackground({
if (import.meta.env.BROWSER === "chrome") {
chrome.action.onClicked.addListener((tab) => {
browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
chrome.tabs.create({ url: chrome.runtime.getURL("/options.html") })
})
} else {
browser.browserAction.onClicked.addListener((tab) => {
@ -109,23 +111,31 @@ export default defineBackground({
})
}
const contextMenuTitle = {
webUi: browser.i18n.getMessage("openOptionToChat"),
sidePanel: browser.i18n.getMessage("openSidePanelToChat")
}
const contextMenuId = {
webUi: "open-web-ui-pa",
sidePanel: "open-side-panel-pa"
}
browser.contextMenus.create({
id: "open-side-panel-pa",
title: browser.i18n.getMessage("openSidePanelToChat"),
id: contextMenuId["sidePanel"],
title: contextMenuTitle["sidePanel"],
contexts: ["all"]
})
if (import.meta.env.BROWSER === "chrome") {
browser.contextMenus.onClicked.addListener((info, tab) => {
if (info.menuItemId === "open-side-panel-pa") {
chrome.tabs.query(
{ active: true, currentWindow: true },
async (tabs) => {
const tab = tabs[0]
chrome.sidePanel.open({
tabId: tab.id!
})
}
)
} else if (info.menuItemId === "open-web-ui-pa") {
browser.tabs.create({
url: browser.runtime.getURL("/options.html")
})
}
})
@ -152,6 +162,10 @@ export default defineBackground({
browser.contextMenus.onClicked.addListener((info, tab) => {
if (info.menuItemId === "open-side-panel-pa") {
browser.sidebarAction.toggle()
} else if (info.menuItemId === "open-web-ui-pa") {
browser.tabs.create({
url: browser.runtime.getURL("/options.html")
})
}
})

View File

@ -1,4 +1,5 @@
import { saveHistory, saveMessage } from "@/db"
import { setLastUsedChatModel } from "@/services/model-settings"
import { ChatHistory } from "@/store/option"
export const saveMessageOnError = async ({
@ -23,7 +24,7 @@ export const saveMessageOnError = async ({
historyId: string | null
selectedModel: string
setHistoryId: (historyId: string) => void
isRegenerating: boolean,
isRegenerating: boolean
message_source?: "copilot" | "web-ui"
}) => {
if (
@ -66,6 +67,7 @@ export const saveMessageOnError = async ({
[],
2
)
await setLastUsedChatModel(historyId, selectedModel)
} else {
const newHistoryId = await saveHistory(userMessage, false, message_source)
if (!isRegenerating) {
@ -89,6 +91,7 @@ export const saveMessageOnError = async ({
2
)
setHistoryId(newHistoryId.id)
await setLastUsedChatModel(newHistoryId.id, selectedModel)
}
return true
@ -115,7 +118,7 @@ export const saveMessageOnSuccess = async ({
message: string
image: string
fullText: string
source: any[],
source: any[]
message_source?: "copilot" | "web-ui"
}) => {
if (historyId) {
@ -139,6 +142,7 @@ export const saveMessageOnSuccess = async ({
source,
2
)
await setLastUsedChatModel(historyId, selectedModel!)
} else {
const newHistoryId = await saveHistory(message, false, message_source)
await saveMessage(
@ -160,5 +164,6 @@ export const saveMessageOnSuccess = async ({
2
)
setHistoryId(newHistoryId.id)
await setLastUsedChatModel(newHistoryId.id, selectedModel!)
}
}

View File

@ -1,106 +0,0 @@
import { useCallback, useEffect, useRef, useState } from "react"
import { useMessageOption } from "./useMessageOption"
export const useScrollAnchor = () => {
const { isProcessing, messages } = useMessageOption()
const [isAtTop, setIsAtTop] = useState(false)
const [isAtBottom, setIsAtBottom] = useState(true)
const [userScrolled, setUserScrolled] = useState(false)
const [isOverflowing, setIsOverflowing] = useState(false)
const messagesStartRef = useRef<HTMLDivElement>(null)
const messagesEndRef = useRef<HTMLDivElement>(null)
const containerRef = useRef<HTMLDivElement>(null)
const isAutoScrolling = useRef(false)
console.log(`isAtTop: ${isAtTop}, isAtBottom: ${isAtBottom}, userScrolled: ${userScrolled}, isOverflowing: ${isOverflowing}`)
useEffect(() => {
if (!isProcessing && userScrolled) {
console.log("userScrolled")
setUserScrolled(false)
}
}, [isProcessing])
useEffect(() => {
if (isProcessing && !userScrolled) {
scrollToBottom()
}
}, [messages])
useEffect(() => {
const container = containerRef.current
if (!container) return
const topObserver = new IntersectionObserver(
([entry]) => {
setIsAtTop(entry.isIntersecting)
},
{ threshold: 1 }
)
const bottomObserver = new IntersectionObserver(
([entry]) => {
setIsAtBottom(entry.isIntersecting)
if (entry.isIntersecting) {
setUserScrolled(false)
} else if (!isAutoScrolling.current) {
setUserScrolled(true)
}
},
{ threshold: 1 }
)
if (messagesStartRef.current) {
topObserver.observe(messagesStartRef.current)
}
if (messagesEndRef.current) {
bottomObserver.observe(messagesEndRef.current)
}
const resizeObserver = new ResizeObserver(() => {
setIsOverflowing(container.scrollHeight > container.clientHeight)
})
resizeObserver.observe(container)
return () => {
topObserver.disconnect()
bottomObserver.disconnect()
resizeObserver.disconnect()
}
}, [])
const scrollToTop = useCallback(() => {
if (messagesStartRef.current) {
messagesStartRef.current.scrollIntoView({ behavior: "smooth" })
}
}, [])
const scrollToBottom = useCallback(() => {
isAutoScrolling.current = true
setTimeout(() => {
if (messagesEndRef.current) {
messagesEndRef.current.scrollIntoView({ behavior: "smooth" })
}
isAutoScrolling.current = false
}, 100)
}, [])
return {
messagesStartRef,
messagesEndRef,
containerRef,
isAtTop,
isAtBottom,
userScrolled,
isOverflowing,
scrollToTop,
scrollToBottom,
setIsAtBottom
}
}

View File

@ -0,0 +1,35 @@
import { useRef, useEffect, useState } from 'react';
export const useSmartScroll = (messages: any[], streaming: boolean) => {
const containerRef = useRef<HTMLDivElement>(null);
const [isAtBottom, setIsAtBottom] = useState(true);
useEffect(() => {
const container = containerRef.current;
if (!container) return;
const handleScroll = () => {
const { scrollTop, scrollHeight, clientHeight } = container;
setIsAtBottom(scrollHeight - scrollTop - clientHeight < 50);
};
container.addEventListener('scroll', handleScroll);
return () => container.removeEventListener('scroll', handleScroll);
}, []);
useEffect(() => {
if (isAtBottom && containerRef.current) {
const scrollOptions: ScrollIntoViewOptions = streaming
? { behavior: 'smooth', block: 'end' }
: { behavior: 'auto', block: 'end' };
containerRef.current.lastElementChild?.scrollIntoView(scrollOptions);
}
}, [messages, streaming, isAtBottom]);
const scrollToBottom = () => {
containerRef.current?.lastElementChild?.scrollIntoView({ behavior: 'smooth', block: 'end' });
};
return { containerRef, isAtBottom, scrollToBottom };
};

View File

@ -12,6 +12,7 @@ import { PageAssisCSVUrlLoader } from "@/loader/csv"
import { PageAssisTXTUrlLoader } from "@/loader/txt"
import { PageAssistDocxLoader } from "@/loader/docx"
import { cleanUrl } from "./clean-url"
import { sendEmbeddingCompleteNotification } from "./send-notification"
export const processKnowledge = async (msg: any, id: string): Promise<void> => {
@ -102,6 +103,8 @@ export const processKnowledge = async (msg: any, id: string): Promise<void> => {
}
await updateKnowledgeStatus(id, "finished")
await sendEmbeddingCompleteNotification()
} catch (error) {
console.error(`Error processing knowledge with id: ${id}`, error)
await updateKnowledgeStatus(id, "failed")

View File

@ -0,0 +1,29 @@
import { Storage } from "@plasmohq/storage"
const storage = new Storage()
export const sendNotification = async (title: string, message: string) => {
try {
const sendNotificationAfterIndexing = await storage.get<boolean>(
"sendNotificationAfterIndexing"
)
if (sendNotificationAfterIndexing) {
console.log("Sending notification")
browser.notifications.create({
type: "basic",
iconUrl: browser.runtime.getURL("/icon/128.png"),
title,
message
})
console.log("Notification sent")
}
} catch (error) {
console.error(error)
}
}
export const sendEmbeddingCompleteNotification = async () => {
await sendNotification(
"Page Assist - Embedding Completed",
"The knowledge base embedding process is complete. You can now use the knowledge base for chatting."
)
}

View File

@ -7,5 +7,8 @@
},
"openSidePanelToChat": {
"message": "Open Copilot to Chat"
},
"openOptionToChat": {
"message": "Open Web UI to Chat"
}
}

View File

@ -3,4 +3,22 @@ import PubSub from "pubsub-js"
export const KNOWLEDGE_QUEUE = Symbol("queue")
PubSub.subscribe(KNOWLEDGE_QUEUE, processKnowledge)
let isProcessing = false
PubSub.subscribe(KNOWLEDGE_QUEUE, async (msg, id) => {
try {
isProcessing = true
await processKnowledge(msg, id)
isProcessing = false
} catch (error) {
console.error(error)
isProcessing = false
}
})
window.addEventListener("beforeunload", (event) => {
if (isProcessing) {
event.preventDefault()
event.returnValue = ""
}
})

View File

@ -80,3 +80,21 @@ export const getCustomOllamaHeaders = async (): Promise<
return headerMap
}
export const getOpenOnIconClick = async (): Promise<string> => {
const openOnIconClick = await storage.get<string>("openOnIconClick");
return openOnIconClick || "webUI";
};
export const setOpenOnIconClick = async (option: "webUI" | "sidePanel"): Promise<void> => {
await storage.set("openOnIconClick", option);
};
export const getOpenOnRightClick = async (): Promise<string> => {
const openOnRightClick = await storage.get<string>("openOnRightClick");
return openOnRightClick || "sidePanel";
};
export const setOpenOnRightClick = async (option: "webUI" | "sidePanel"): Promise<void> => {
await storage.set("openOnRightClick", option);
};

View File

@ -72,7 +72,6 @@ const getAllModelSettings = async () => {
if (!value && key === "keepAlive") {
settings[key] = "5m"
}
}
return settings
} catch (error) {
@ -81,8 +80,10 @@ const getAllModelSettings = async () => {
}
}
const setModelSetting = async (key: string,
value: string | number | boolean) => {
const setModelSetting = async (
key: string,
value: string | number | boolean
) => {
await storage.set(key, value)
}
@ -98,4 +99,30 @@ export const getAllDefaultModelSettings = async (): Promise<ModelSettings> => {
return settings
}
export const lastUsedChatModelEnabled = async (): Promise<boolean> => {
const isLastUsedChatModelEnabled = await storage.get<boolean | undefined>(
"restoreLastChatModel"
)
return isLastUsedChatModelEnabled ?? false
}
export const setLastUsedChatModelEnabled = async (
enabled: boolean
): Promise<void> => {
await storage.set("restoreLastChatModel", enabled)
}
export const getLastUsedChatModel = async (
historyId: string
): Promise<string | undefined> => {
return await storage.get<string | undefined>(`lastUsedChatModel-${historyId}`)
}
export const setLastUsedChatModel = async (
historyId: string,
model: string
): Promise<void> => {
await storage.set(`lastUsedChatModel-${historyId}`, model)
}
export { getAllModelSettings, setModelSetting }

View File

@ -11,7 +11,8 @@ const chromeMV3Permissions = [
"action",
"unlimitedStorage",
"contextMenus",
"tts"
"tts",
"notifications"
]
const firefoxMV2Permissions = [
@ -22,6 +23,7 @@ const firefoxMV2Permissions = [
"contextMenus",
"webRequest",
"webRequestBlocking",
"notifications",
"http://*/*",
"https://*/*",
"file://*/*"
@ -48,7 +50,7 @@ export default defineConfig({
outDir: "build",
manifest: {
version: "1.1.14",
version: "1.1.15",
name:
process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models"