Merge pull request #305 from n4ze3m/next

v1.4.4
This commit is contained in:
Muhammed Nazeem 2025-01-26 19:15:36 +05:30 committed by GitHub
commit b6814a9e19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
62 changed files with 634 additions and 224 deletions

2
.gitignore vendored
View File

@ -45,4 +45,4 @@ keys.json
# WXT # WXT
.wxt .wxt
# WebStorm # WebStorm
.idea .idea

BIN
bun.lockb

Binary file not shown.

View File

@ -58,6 +58,7 @@
"remark-math": "5.1.1", "remark-math": "5.1.1",
"tesseract.js": "^5.1.1", "tesseract.js": "^5.1.1",
"turndown": "^7.1.3", "turndown": "^7.1.3",
"unist-util-visit": "^5.0.0",
"yt-transcript": "^0.0.2", "yt-transcript": "^0.0.2",
"zustand": "^4.5.0" "zustand": "^4.5.0"
}, },

View File

@ -125,5 +125,9 @@
"pin": "تثبيت", "pin": "تثبيت",
"unpin": "إلغاء التثبيت", "unpin": "إلغاء التثبيت",
"generationInfo": "معلومات التوليد", "generationInfo": "معلومات التوليد",
"sidebarChat": "دردشة الشريط الجانبي" "sidebarChat": "دردشة الشريط الجانبي",
"reasoning": {
"thinking": "جاري التفكير....",
"thought": "فكر لمدة {{time}}"
}
} }

View File

@ -118,5 +118,9 @@
"pin": "Fastgør", "pin": "Fastgør",
"unpin": "Frigør", "unpin": "Frigør",
"generationInfo": "Genererings Info", "generationInfo": "Genererings Info",
"sidebarChat": "Sidepanel Chat" "sidebarChat": "Sidepanel Chat",
"reasoning": {
"thinking": "Tænker....",
"thought": "Tænkte i {{time}}"
}
} }

View File

@ -118,5 +118,9 @@
"pin": "Anheften", "pin": "Anheften",
"unpin": "Losheften", "unpin": "Losheften",
"generationInfo": "Generierungsinformationen", "generationInfo": "Generierungsinformationen",
"sidebarChat": "Seitenleisten-Chat" "sidebarChat": "Seitenleisten-Chat",
"reasoning": {
"thinking": "Denke nach....",
"thought": "Gedanke für {{time}}"
}
} }

View File

@ -152,5 +152,9 @@
"pin": "Pin", "pin": "Pin",
"unpin": "Unpin", "unpin": "Unpin",
"generationInfo": "Generation Info", "generationInfo": "Generation Info",
"sidebarChat": "Sidebar Chat" "sidebarChat": "Sidebar Chat",
"reasoning": {
"thinking": "Thinking....",
"thought": "Thought for {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "Fijar", "pin": "Fijar",
"unpin": "Desfijar", "unpin": "Desfijar",
"generationInfo": "Información de Generación", "generationInfo": "Información de Generación",
"sidebarChat": "Chat lateral" "sidebarChat": "Chat lateral",
"reasoning": {
"thinking": "Pensando....",
"thought": "Pensamiento por {{time}}"
}
} }

View File

@ -111,5 +111,9 @@
"pin": "پین کردن", "pin": "پین کردن",
"unpin": "حذف پین", "unpin": "حذف پین",
"generationInfo": "اطلاعات تولید", "generationInfo": "اطلاعات تولید",
"sidebarChat": "چت کناری" "sidebarChat": "چت کناری",
"reasoning": {
"thinking": "در حال فکر کردن....",
"thought": "فکر کردن برای {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "Épingler", "pin": "Épingler",
"unpin": "Désépingler", "unpin": "Désépingler",
"generationInfo": "Informations de génération", "generationInfo": "Informations de génération",
"sidebarChat": "Chat latéral" "sidebarChat": "Chat latéral",
"reasoning": {
"thinking": "Réflexion....",
"thought": "Réflexion pendant {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "Fissa", "pin": "Fissa",
"unpin": "Rimuovi", "unpin": "Rimuovi",
"generationInfo": "Informazioni sulla Generazione", "generationInfo": "Informazioni sulla Generazione",
"sidebarChat": "Chat Laterale" "sidebarChat": "Chat Laterale",
"reasoning": {
"thinking": "Pensando....",
"thought": "Pensato per {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "固定", "pin": "固定",
"unpin": "固定解除", "unpin": "固定解除",
"generationInfo": "生成情報", "generationInfo": "生成情報",
"sidebarChat": "サイドバーチャット" "sidebarChat": "サイドバーチャット",
"reasoning": {
"thinking": "考え中....",
"thought": "{{time}}の思考"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "고정", "pin": "고정",
"unpin": "고정 해제", "unpin": "고정 해제",
"generationInfo": "생성 정보", "generationInfo": "생성 정보",
"sidebarChat": "사이드바 채팅" "sidebarChat": "사이드바 채팅",
"reasoning": {
"thinking": "생각 중....",
"thought": "{{time}} 동안 생각함"
}
} }

View File

@ -116,6 +116,9 @@
"pin": "പിൻ ചെയ്യുക", "pin": "പിൻ ചെയ്യുക",
"unpin": "അൺപിൻ ചെയ്യുക", "unpin": "അൺപിൻ ചെയ്യുക",
"generationInfo": "ജനറേഷൻ വിവരങ്ങൾ", "generationInfo": "ജനറേഷൻ വിവരങ്ങൾ",
"sidebarChat": "സൈഡ്ബാർ ചാറ്റ്" "sidebarChat": "സൈഡ്ബാർ ചാറ്റ്",
"reasoning": {
"thinking": "ചിന്തിക്കുന്നു....",
"thought": "{{time}} നേരത്തെ ചിന്ത"
}
} }

View File

@ -118,5 +118,9 @@
"pin": "Fest", "pin": "Fest",
"unpin": "Løsne", "unpin": "Løsne",
"generationInfo": "Generasjonsinformasjon", "generationInfo": "Generasjonsinformasjon",
"sidebarChat": "Sidepanel-chat" "sidebarChat": "Sidepanel-chat",
"reasoning": {
"thinking": "Tenker....",
"thought": "Tenkte i {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "Fixar", "pin": "Fixar",
"unpin": "Desafixar", "unpin": "Desafixar",
"generationInfo": "Informações de Geração", "generationInfo": "Informações de Geração",
"sidebarChat": "Chat Lateral" "sidebarChat": "Chat Lateral",
"reasoning": {
"thinking": "Pensando....",
"thought": "Pensou por {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "Закрепить", "pin": "Закрепить",
"unpin": "Открепить", "unpin": "Открепить",
"generationInfo": "Информация о генерации", "generationInfo": "Информация о генерации",
"sidebarChat": "Боковой чат" "sidebarChat": "Боковой чат",
"reasoning": {
"thinking": "Размышляю...",
"thought": "Размышлял {{time}}"
}
} }

View File

@ -122,5 +122,9 @@
"pin": "Fäst", "pin": "Fäst",
"unpin": "Ta bort fäst", "unpin": "Ta bort fäst",
"generationInfo": "Generationsinformation", "generationInfo": "Generationsinformation",
"sidebarChat": "Sidofältschatt" "sidebarChat": "Sidofältschatt",
"reasoning": {
"thinking": "Tänker....",
"thought": "Tänkte i {{time}}"
}
} }

View File

@ -122,5 +122,9 @@
"pin": "Прикріпити", "pin": "Прикріпити",
"unpin": "Відкріпити", "unpin": "Відкріпити",
"generationInfo": "Інформація про генерацію", "generationInfo": "Інформація про генерацію",
"sidebarChat": "Бічний чат" "sidebarChat": "Бічний чат",
"reasoning": {
"thinking": "Думаю....",
"thought": "Думав протягом {{time}}"
}
} }

View File

@ -117,5 +117,9 @@
"pin": "置顶", "pin": "置顶",
"unpin": "取消置顶", "unpin": "取消置顶",
"generationInfo": "生成信息", "generationInfo": "生成信息",
"sidebarChat": "侧边栏聊天" "sidebarChat": "侧边栏聊天",
"reasoning": {
"thinking": "思考中....",
"thought": "思考了 {{time}}"
}
} }

View File

@ -18,7 +18,8 @@ import { useTTS } from "@/hooks/useTTS"
import { tagColors } from "@/utils/color" import { tagColors } from "@/utils/color"
import { removeModelSuffix } from "@/db/models" import { removeModelSuffix } from "@/db/models"
import { GenerationInfo } from "./GenerationInfo" import { GenerationInfo } from "./GenerationInfo"
import { parseReasoning } from "@/libs/reasoning"
import { humanizeMilliseconds } from "@/utils/humanize-miliseconds"
type Props = { type Props = {
message: string message: string
message_type?: string message_type?: string
@ -40,6 +41,8 @@ type Props = {
onSourceClick?: (source: any) => void onSourceClick?: (source: any) => void
isTTSEnabled?: boolean isTTSEnabled?: boolean
generationInfo?: any generationInfo?: any
isStreaming: boolean
reasoningTimeTaken?: number
} }
export const PlaygroundMessage = (props: Props) => { export const PlaygroundMessage = (props: Props) => {
@ -48,7 +51,6 @@ export const PlaygroundMessage = (props: Props) => {
const { t } = useTranslation("common") const { t } = useTranslation("common")
const { cancel, isSpeaking, speak } = useTTS() const { cancel, isSpeaking, speak } = useTTS()
return ( return (
<div className="group w-full text-gray-800 dark:text-gray-100"> <div className="group w-full text-gray-800 dark:text-gray-100">
<div className="text-base md:max-w-2xl lg:max-w-xl xl:max-w-3xl flex lg:px-0 m-auto w-full"> <div className="text-base md:max-w-2xl lg:max-w-xl xl:max-w-3xl flex lg:px-0 m-auto w-full">
@ -94,7 +96,40 @@ export const PlaygroundMessage = (props: Props) => {
<div className="flex flex-grow flex-col"> <div className="flex flex-grow flex-col">
{!editMode ? ( {!editMode ? (
props.isBot ? ( props.isBot ? (
<Markdown message={props.message} /> <>
{parseReasoning(props.message).map((e, i) => {
if (e.type === "reasoning") {
return (
<Collapse
key={i}
className="border-none !mb-3"
items={[
{
key: "reasoning",
label:
props.isStreaming && e?.reasoning_running ? (
<div className="flex items-center gap-2">
<span className="italic">
{t("reasoning.thinking")}
</span>
</div>
) : (
t("reasoning.thought", {
time: humanizeMilliseconds(
props.reasoningTimeTaken
)
})
),
children: <Markdown message={e.content} />
}
]}
/>
)
}
return <Markdown key={i} message={e.content} />
})}
</>
) : ( ) : (
<p <p
className={`prose dark:prose-invert whitespace-pre-line prose-p:leading-relaxed prose-pre:p-0 dark:prose-dark ${ className={`prose dark:prose-invert whitespace-pre-line prose-p:leading-relaxed prose-pre:p-0 dark:prose-dark ${
@ -220,8 +255,8 @@ export const PlaygroundMessage = (props: Props) => {
} }
title={t("generationInfo")}> title={t("generationInfo")}>
<button <button
aria-label={t("generationInfo")} aria-label={t("generationInfo")}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500"> className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
<InfoIcon className="w-3 h-3 text-gray-400 group-hover:text-gray-500" /> <InfoIcon className="w-3 h-3 text-gray-400 group-hover:text-gray-500" />
</button> </button>
</Popover> </Popover>
@ -231,7 +266,7 @@ export const PlaygroundMessage = (props: Props) => {
props.currentMessageIndex === props.totalMessages - 1 && ( props.currentMessageIndex === props.totalMessages - 1 && (
<Tooltip title={t("regenerate")}> <Tooltip title={t("regenerate")}>
<button <button
aria-label={t("regenerate")} aria-label={t("regenerate")}
onClick={props.onRengerate} onClick={props.onRengerate}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500"> className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
<RotateCcw className="w-3 h-3 text-gray-400 group-hover:text-gray-500" /> <RotateCcw className="w-3 h-3 text-gray-400 group-hover:text-gray-500" />

View File

@ -16,21 +16,25 @@ import { SaveButton } from "../SaveButton"
export const AdvanceOllamaSettings = () => { export const AdvanceOllamaSettings = () => {
const [form] = Form.useForm() const [form] = Form.useForm()
const watchUrlRewriteEnabled = Form.useWatch("urlRewriteEnabled", form) const watchUrlRewriteEnabled = Form.useWatch("urlRewriteEnabled", form)
const { t } = useTranslation("settings")
const fetchAdvancedData = async () => { const fetchAdvancedData = async () => {
try {
const [urlRewriteEnabled, rewriteUrl, headers] = await Promise.all([ const [urlRewriteEnabled, rewriteUrl, headers] = await Promise.all([
isUrlRewriteEnabled(), isUrlRewriteEnabled(),
getRewriteUrl(), getRewriteUrl(),
customOllamaHeaders() customOllamaHeaders()
]) ])
form.setFieldsValue({ urlRewriteEnabled, rewriteUrl, headers }) form.setFieldsValue({ urlRewriteEnabled, rewriteUrl, headers })
} catch (e) {
console.error(e)
}
} }
React.useEffect(() => { React.useEffect(() => {
fetchAdvancedData() fetchAdvancedData()
}, []) }, [])
const { t } = useTranslation("settings")
return ( return (
<Form <Form

View File

@ -120,20 +120,30 @@ export const Header: React.FC<Props> = ({
{"/"} {"/"}
</span> </span>
<div className="hidden lg:block"> <div className="hidden lg:block">
<PageAssistSelect <Select
className="w-80" className="w-80"
placeholder={t("common:selectAModel")} placeholder={t("common:selectAModel")}
loadingText={t("common:selectAModel")} // loadingText={t("common:selectAModel")}
value={selectedModel} value={selectedModel}
onChange={(e) => { onChange={(e) => {
setSelectedModel(e.value) setSelectedModel(e)
localStorage.setItem("selectedModel", e.value) localStorage.setItem("selectedModel", e)
}} }}
isLoading={isModelsLoading} filterOption={(input, option) => {
//@ts-ignore
return (
option?.label?.props["data-title"]
?.toLowerCase()
?.indexOf(input.toLowerCase()) >= 0
)
}}
showSearch
loading={isModelsLoading}
options={models?.map((model) => ({ options={models?.map((model) => ({
label: ( label: (
<span <span
key={model.model} key={model.model}
data-title={model.name}
className="flex flex-row gap-3 items-center "> className="flex flex-row gap-3 items-center ">
<ProviderIcons <ProviderIcons
provider={model?.provider} provider={model?.provider}
@ -144,9 +154,10 @@ export const Header: React.FC<Props> = ({
), ),
value: model.model value: model.model
}))} }))}
onRefresh={() => { size="large"
refetch() // onRefresh={() => {
}} // refetch()
// }}
/> />
</div> </div>
<div className="lg:hidden"> <div className="lg:hidden">

View File

@ -122,7 +122,6 @@ const generateChatImage = async (messages: Message[]) => {
}) })
canvas.height = totalHeight canvas.height = totalHeight
console.log(totalHeight)
ctx.fillStyle = "#ffffff" ctx.fillStyle = "#ffffff"
ctx.fillRect(0, 0, canvas.width, canvas.height) ctx.fillRect(0, 0, canvas.width, canvas.height)

View File

@ -18,7 +18,7 @@ export const KnowledgeSettings = () => {
const { data, status } = useQuery({ const { data, status } = useQuery({
queryKey: ["fetchAllKnowledge"], queryKey: ["fetchAllKnowledge"],
queryFn: () => getAllKnowledge(), queryFn: () => getAllKnowledge(),
refetchInterval: 1000 refetchInterval: 1000,
}) })
const { mutate: deleteKnowledgeMutation, isPending: isDeleting } = const { mutate: deleteKnowledgeMutation, isPending: isDeleting } =

View File

@ -104,7 +104,6 @@ export const Playground = () => {
const lastUsedPrompt = await getLastUsedChatSystemPrompt( const lastUsedPrompt = await getLastUsedChatSystemPrompt(
recentChat.history.id recentChat.history.id
) )
console.log("lastUsedPrompt", lastUsedPrompt)
if (lastUsedPrompt) { if (lastUsedPrompt) {
if (lastUsedPrompt.prompt_id) { if (lastUsedPrompt.prompt_id) {
const prompt = await getPromptById(lastUsedPrompt.prompt_id) const prompt = await getPromptById(lastUsedPrompt.prompt_id)

View File

@ -55,6 +55,8 @@ export const PlaygroundChat = () => {
}} }}
isTTSEnabled={ttsEnabled} isTTSEnabled={ttsEnabled}
generationInfo={message?.generationInfo} generationInfo={message?.generationInfo}
isStreaming={streaming}
reasoningTimeTaken={message?.reasoning_time_taken}
/> />
))} ))}
{messages.length > 0 && ( {messages.length > 0 && (

View File

@ -15,6 +15,7 @@ import { KnowledgeSelect } from "../Knowledge/KnowledgeSelect"
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
import { PiGlobe } from "react-icons/pi" import { PiGlobe } from "react-icons/pi"
import { handleChatInputKeyDown } from "@/utils/key-down" import { handleChatInputKeyDown } from "@/utils/key-down"
import { getIsSimpleInternetSearch } from "@/services/search"
type Props = { type Props = {
dropedFile: File | undefined dropedFile: File | undefined
@ -181,7 +182,8 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
} }
if (webSearch) { if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag() const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) { const simpleSearch = await getIsSimpleInternetSearch()
if (!defaultEM && !simpleSearch) {
form.setFieldError("message", t("formError.noEmbeddingModel")) form.setFieldError("message", t("formError.noEmbeddingModel"))
return return
} }

View File

@ -291,7 +291,7 @@ export const GeneralSettings = () => {
await browser.storage.local.clear() await browser.storage.local.clear()
await browser.storage.session.clear() await browser.storage.session.clear()
} catch (e) { } catch (e) {
console.log("Error clearing storage:", e) console.error("Error clearing storage:", e)
} }
} }
}} }}

View File

@ -15,9 +15,14 @@ export const SettingsOllama = () => {
const { status } = useQuery({ const { status } = useQuery({
queryKey: ["fetchOllamURL"], queryKey: ["fetchOllamURL"],
queryFn: async () => { queryFn: async () => {
const [ollamaURL] = await Promise.all([getOllamaURL()]) try {
setOllamaURL(ollamaURL) const [ollamaURL] = await Promise.all([getOllamaURL()])
return {} setOllamaURL(ollamaURL)
return {}
} catch (e) {
console.error(e)
return {}
}
} }
}) })

View File

@ -46,7 +46,7 @@ export const TTSModeSettings = ({ hideBorder }: { hideBorder?: boolean }) => {
return { voices, models } return { voices, models }
} }
} catch (e) { } catch (e) {
console.log(e) console.error(e)
message.error("Error fetching ElevenLabs data") message.error("Error fetching ElevenLabs data")
} }
return null return null

View File

@ -48,6 +48,8 @@ export const SidePanelBody = () => {
}} }}
isTTSEnabled={ttsEnabled} isTTSEnabled={ttsEnabled}
generationInfo={message?.generationInfo} generationInfo={message?.generationInfo}
isStreaming={streaming}
reasoningTimeTaken={message?.reasoning_time_taken}
/> />
))} ))}
<div className="w-full h-48 flex-shrink-0"></div> <div className="w-full h-48 flex-shrink-0"></div>

View File

@ -20,6 +20,7 @@ import { ModelSelect } from "@/components/Common/ModelSelect"
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
import { PiGlobeX, PiGlobe } from "react-icons/pi" import { PiGlobeX, PiGlobe } from "react-icons/pi"
import { handleChatInputKeyDown } from "@/utils/key-down" import { handleChatInputKeyDown } from "@/utils/key-down"
import { getIsSimpleInternetSearch } from "@/services/search"
type Props = { type Props = {
dropedFile: File | undefined dropedFile: File | undefined
@ -99,7 +100,8 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
} }
if (webSearch) { if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag() const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) { const simpleSearch = await getIsSimpleInternetSearch()
if (!defaultEM && !simpleSearch) {
form.setFieldError("message", t("formError.noEmbeddingModel")) form.setFieldError("message", t("formError.noEmbeddingModel"))
return return
} }

View File

@ -1,4 +1,4 @@
import { useMutation, useQuery } from "@tanstack/react-query" import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
import React from "react" import React from "react"
import { import {
getOllamaURL, getOllamaURL,
@ -7,7 +7,6 @@ import {
setOllamaURL as saveOllamaURL, setOllamaURL as saveOllamaURL,
setPromptForRag, setPromptForRag,
setSystemPromptForNonRag, setSystemPromptForNonRag,
getAllModels,
defaultEmbeddingChunkOverlap, defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize, defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag, defaultEmbeddingModelForRag,
@ -54,11 +53,12 @@ export const SettingsBody = () => {
const [hideCurrentChatModelSettings, setHideCurrentChatModelSettings] = const [hideCurrentChatModelSettings, setHideCurrentChatModelSettings] =
useStorage("hideCurrentChatModelSettings", false) useStorage("hideCurrentChatModelSettings", false)
const [ speechToTextLanguage, setSpeechToTextLanguage ] = useStorage( const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage(
"speechToTextLanguage", "speechToTextLanguage",
"en-US" "en-US"
) )
const { mode, toggleDarkMode } = useDarkMode() const { mode, toggleDarkMode } = useDarkMode()
const queryClient = useQueryClient()
const { changeLocale, locale, supportLanguage } = useI18n() const { changeLocale, locale, supportLanguage } = useI18n()
@ -84,7 +84,6 @@ export const SettingsBody = () => {
defaultEmbeddingModelForRag(), defaultEmbeddingModelForRag(),
getTotalFilePerKB() getTotalFilePerKB()
]) ])
return { return {
url: ollamaURL, url: ollamaURL,
normalSystemPrompt: systemPrompt, normalSystemPrompt: systemPrompt,
@ -106,6 +105,7 @@ export const SettingsBody = () => {
overlap: number overlap: number
}) => { }) => {
await saveForRag(f.model, f.chunkSize, f.overlap, data.totalFilePerKB) await saveForRag(f.model, f.chunkSize, f.overlap, data.totalFilePerKB)
await queryClient.invalidateQueries({ queryKey: ["sidebarSettings"] })
} }
}) })

View File

@ -32,6 +32,7 @@ type Message = {
sources?: string[] sources?: string[]
search?: WebSearch search?: WebSearch
createdAt: number createdAt: number
reasoning_time_taken?: number
messageType?: string messageType?: string
generationInfo?: any generationInfo?: any
} }
@ -256,7 +257,8 @@ export const saveMessage = async (
source?: any[], source?: any[],
time?: number, time?: number,
message_type?: string, message_type?: string,
generationInfo?: any generationInfo?: any,
reasoning_time_taken?: number
) => { ) => {
const id = generateID() const id = generateID()
let createdAt = Date.now() let createdAt = Date.now()
@ -273,7 +275,8 @@ export const saveMessage = async (
createdAt, createdAt,
sources: source, sources: source,
messageType: message_type, messageType: message_type,
generationInfo: generationInfo generationInfo: generationInfo,
reasoning_time_taken
} }
const db = new PageAssitDatabase() const db = new PageAssitDatabase()
await db.addMessage(message) await db.addMessage(message)
@ -303,6 +306,7 @@ export const formatToMessage = (messages: MessageHistory): MessageType[] => {
sources: message?.sources || [], sources: message?.sources || [],
images: message.images || [], images: message.images || [],
generationInfo: message?.generationInfo, generationInfo: message?.generationInfo,
reasoning_time_taken: message?.reasoning_time_taken
} }
}) })
} }
@ -369,7 +373,6 @@ export const deleteChatForEdit = async (history_id: string, index: number) => {
const db = new PageAssitDatabase() const db = new PageAssitDatabase()
const chatHistory = (await db.getChatHistory(history_id)).reverse() const chatHistory = (await db.getChatHistory(history_id)).reverse()
const previousHistory = chatHistory.slice(0, index + 1) const previousHistory = chatHistory.slice(0, index + 1)
// console.log(previousHistory)
await db.db.set({ [history_id]: previousHistory.reverse() }) await db.db.set({ [history_id]: previousHistory.reverse() })
} }

View File

@ -163,8 +163,8 @@ export const getAllKnowledge = async (status?: string) => {
if (status) { if (status) {
return data return data
.filter((d) => d.db_type === "knowledge") .filter((d) => d?.db_type === "knowledge")
.filter((d) => d.status === status) .filter((d) => d?.status === status)
.map((d) => { .map((d) => {
d.source.forEach((s) => { d.source.forEach((s) => {
delete s.content delete s.content
@ -175,9 +175,9 @@ export const getAllKnowledge = async (status?: string) => {
} }
return data return data
.filter((d) => d.db_type === "knowledge") .filter((d) => d?.db_type === "knowledge")
.map((d) => { .map((d) => {
d.source.forEach((s) => { d?.source.forEach((s) => {
delete s.content delete s.content
}) })
return d return d

View File

@ -293,7 +293,7 @@ export const getModelInfo = async (id: string) => {
export const getAllCustomModels = async () => { export const getAllCustomModels = async () => {
const db = new ModelDb() const db = new ModelDb()
const models = (await db.getAll()).filter( const models = (await db.getAll()).filter(
(model) => model.db_type === "openai_model" (model) => model?.db_type === "openai_model"
) )
const modelsWithProvider = await Promise.all( const modelsWithProvider = await Promise.all(
models.map(async (model) => { models.map(async (model) => {
@ -324,7 +324,7 @@ export const deleteAllModelsByProviderId = async (provider_id: string) => {
export const isLookupExist = async (lookup: string) => { export const isLookupExist = async (lookup: string) => {
const db = new ModelDb() const db = new ModelDb()
const models = await db.getAll() const models = await db.getAll()
const model = models.find((model) => model.lookup === lookup) const model = models.find((model) => model?.lookup === lookup)
return model ? true : false return model ? true : false
} }
@ -394,85 +394,90 @@ export const dynamicFetchLlamafile = async ({
export const ollamaFormatAllCustomModels = async ( export const ollamaFormatAllCustomModels = async (
modelType: "all" | "chat" | "embedding" = "all" modelType: "all" | "chat" | "embedding" = "all"
) => { ) => {
const [allModles, allProviders] = await Promise.all([ try {
getAllCustomModels(), const [allModles, allProviders] = await Promise.all([
getAllOpenAIConfig() getAllCustomModels(),
]) getAllOpenAIConfig()
])
const lmstudioProviders = allProviders.filter(
(provider) => provider.provider === "lmstudio" const lmstudioProviders = allProviders.filter(
) (provider) => provider.provider === "lmstudio"
)
const llamafileProviders = allProviders.filter(
(provider) => provider.provider === "llamafile" const llamafileProviders = allProviders.filter(
) (provider) => provider.provider === "llamafile"
)
const ollamaProviders = allProviders.filter(
(provider) => provider.provider === "ollama2" const ollamaProviders = allProviders.filter(
) (provider) => provider.provider === "ollama2"
)
const lmModelsPromises = lmstudioProviders.map((provider) =>
dynamicFetchLMStudio({ const lmModelsPromises = lmstudioProviders.map((provider) =>
baseUrl: provider.baseUrl, dynamicFetchLMStudio({
providerId: provider.id baseUrl: provider.baseUrl,
}) providerId: provider.id
) })
)
const llamafileModelsPromises = llamafileProviders.map((provider) =>
dynamicFetchLlamafile({ const llamafileModelsPromises = llamafileProviders.map((provider) =>
baseUrl: provider.baseUrl, dynamicFetchLlamafile({
providerId: provider.id baseUrl: provider.baseUrl,
}) providerId: provider.id
) })
)
const ollamaModelsPromises = ollamaProviders.map((provider) =>
dynamicFetchOllama2({ const ollamaModelsPromises = ollamaProviders.map((provider) =>
baseUrl: provider.baseUrl, dynamicFetchOllama2({
providerId: provider.id baseUrl: provider.baseUrl,
})) providerId: provider.id
}))
const lmModelsFetch = await Promise.all(lmModelsPromises)
const lmModelsFetch = await Promise.all(lmModelsPromises)
const llamafileModelsFetch = await Promise.all(llamafileModelsPromises)
const llamafileModelsFetch = await Promise.all(llamafileModelsPromises)
const ollamaModelsFetch = await Promise.all(ollamaModelsPromises)
const ollamaModelsFetch = await Promise.all(ollamaModelsPromises)
const lmModels = lmModelsFetch.flat()
const lmModels = lmModelsFetch.flat()
const llamafileModels = llamafileModelsFetch.flat()
const llamafileModels = llamafileModelsFetch.flat()
const ollama2Models = ollamaModelsFetch.flat()
const ollama2Models = ollamaModelsFetch.flat()
// merge allModels and lmModels
const allModlesWithLMStudio = [ // merge allModels and lmModels
...(modelType !== "all" const allModlesWithLMStudio = [
? allModles.filter((model) => model.model_type === modelType) ...(modelType !== "all"
: allModles), ? allModles.filter((model) => model.model_type === modelType)
...lmModels, : allModles),
...llamafileModels, ...lmModels,
...ollama2Models ...llamafileModels,
] ...ollama2Models
]
const ollamaModels = allModlesWithLMStudio.map((model) => {
return { const ollamaModels = allModlesWithLMStudio.map((model) => {
name: model.name, return {
model: model.id, name: model.name,
modified_at: "", model: model.id,
provider: modified_at: "",
allProviders.find((provider) => provider.id === model.provider_id) provider:
?.provider || "custom", allProviders.find((provider) => provider.id === model.provider_id)
size: 0, ?.provider || "custom",
digest: "", size: 0,
details: { digest: "",
parent_model: "", details: {
format: "", parent_model: "",
family: "", format: "",
families: [], family: "",
parameter_size: "", families: [],
quantization_level: "" parameter_size: "",
quantization_level: ""
}
} }
} })
})
return ollamaModels
return ollamaModels } catch(e) {
console.error(e)
return []
}
} }

View File

@ -114,7 +114,7 @@ export const addOpenAICofig = async ({ name, baseUrl, apiKey, provider }: { name
export const getAllOpenAIConfig = async () => { export const getAllOpenAIConfig = async () => {
const openaiDb = new OpenAIModelDb() const openaiDb = new OpenAIModelDb()
const configs = await openaiDb.getAll() const configs = await openaiDb.getAll()
return configs.filter(config => config.db_type === "openai") return configs.filter(config => config?.db_type === "openai")
} }
export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => { export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => {

View File

@ -41,7 +41,6 @@ export default defineBackground({
}) })
} else { } else {
browser.browserAction.onClicked.addListener((tab) => { browser.browserAction.onClicked.addListener((tab) => {
console.log("browser.browserAction.onClicked.addListener")
browser.tabs.create({ url: browser.runtime.getURL("/options.html") }) browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
}) })
} }

View File

@ -41,7 +41,6 @@ export default defineBackground({
}) })
} else { } else {
browser.browserAction.onClicked.addListener((tab) => { browser.browserAction.onClicked.addListener((tab) => {
console.log("browser.browserAction.onClicked.addListener")
browser.tabs.create({ url: browser.runtime.getURL("/options.html") }) browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
}) })
} }

View File

@ -130,7 +130,8 @@ export const saveMessageOnSuccess = async ({
message_source = "web-ui", message_source = "web-ui",
message_type, generationInfo, message_type, generationInfo,
prompt_id, prompt_id,
prompt_content prompt_content,
reasoning_time_taken = 0
}: { }: {
historyId: string | null historyId: string | null
setHistoryId: (historyId: string) => void setHistoryId: (historyId: string) => void
@ -145,6 +146,7 @@ export const saveMessageOnSuccess = async ({
generationInfo?: any generationInfo?: any
prompt_id?: string prompt_id?: string
prompt_content?: string prompt_content?: string
reasoning_time_taken?: number
}) => { }) => {
if (historyId) { if (historyId) {
if (!isRegenerate) { if (!isRegenerate) {
@ -157,7 +159,8 @@ export const saveMessageOnSuccess = async ({
[], [],
1, 1,
message_type, message_type,
generationInfo generationInfo,
reasoning_time_taken
) )
} }
await saveMessage( await saveMessage(
@ -169,7 +172,8 @@ export const saveMessageOnSuccess = async ({
source, source,
2, 2,
message_type, message_type,
generationInfo generationInfo,
reasoning_time_taken
) )
await setLastUsedChatModel(historyId, selectedModel!) await setLastUsedChatModel(historyId, selectedModel!)
if (prompt_id || prompt_content) { if (prompt_id || prompt_content) {
@ -187,7 +191,8 @@ export const saveMessageOnSuccess = async ({
[], [],
1, 1,
message_type, message_type,
generationInfo generationInfo,
reasoning_time_taken
) )
await saveMessage( await saveMessage(
newHistoryId.id, newHistoryId.id,
@ -198,7 +203,8 @@ export const saveMessageOnSuccess = async ({
source, source,
2, 2,
message_type, message_type,
generationInfo generationInfo,
reasoning_time_taken
) )
setHistoryId(newHistoryId.id) setHistoryId(newHistoryId.id)
await setLastUsedChatModel(newHistoryId.id, selectedModel!) await setLastUsedChatModel(newHistoryId.id, selectedModel!)

View File

@ -36,6 +36,7 @@ import { humanMessageFormatter } from "@/utils/human-message"
import { pageAssistEmbeddingModel } from "@/models/embedding" import { pageAssistEmbeddingModel } from "@/models/embedding"
import { PAMemoryVectorStore } from "@/libs/PAMemoryVectorStore" import { PAMemoryVectorStore } from "@/libs/PAMemoryVectorStore"
import { getScreenshotFromCurrentTab } from "@/libs/get-screenshot" import { getScreenshotFromCurrentTab } from "@/libs/get-screenshot"
import { isReasoningEnded, isReasoningStarted, removeReasoning } from "@/libs/reasoning"
export const useMessage = () => { export const useMessage = () => {
const { const {
@ -55,14 +56,9 @@ export const useMessage = () => {
setWebSearch, setWebSearch,
isSearchingInternet isSearchingInternet
} = useStoreMessageOption() } = useStoreMessageOption()
const [defaultInternetSearchOn, ] = useStorage( const [defaultInternetSearchOn] = useStorage("defaultInternetSearchOn", false)
"defaultInternetSearchOn",
false
)
const [ const [defaultChatWithWebsite] = useStorage("defaultChatWithWebsite", false)
defaultChatWithWebsite,
] = useStorage("defaultChatWithWebsite", false)
const [chatWithWebsiteEmbedding] = useStorage( const [chatWithWebsiteEmbedding] = useStorage(
"chatWithWebsiteEmbedding", "chatWithWebsiteEmbedding",
@ -115,10 +111,10 @@ export const useMessage = () => {
setIsProcessing(false) setIsProcessing(false)
setStreaming(false) setStreaming(false)
currentChatModelSettings.reset() currentChatModelSettings.reset()
if(defaultInternetSearchOn) { if (defaultInternetSearchOn) {
setWebSearch(true) setWebSearch(true)
} }
if(defaultChatWithWebsite) { if (defaultChatWithWebsite) {
setChatMode("rag") setChatMode("rag")
} }
} }
@ -329,6 +325,7 @@ export const useMessage = () => {
}) })
const response = await questionOllama.invoke(promptForQuestion) const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString() query = response.content.toString()
query = removeReasoning(query)
} }
let context: string = "" let context: string = ""
@ -405,7 +402,7 @@ export const useMessage = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -413,18 +410,36 @@ export const useMessage = () => {
} }
) )
let count = 0 let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
let timetaken = 0
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -440,7 +455,8 @@ export const useMessage = () => {
...message, ...message,
message: fullText, message: fullText,
sources: source, sources: source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -470,7 +486,8 @@ export const useMessage = () => {
fullText, fullText,
source, source,
message_source: "copilot", message_source: "copilot",
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -601,11 +618,7 @@ export const useMessage = () => {
const applicationChatHistory = [] const applicationChatHistory = []
const data = await getScreenshotFromCurrentTab() const data = await getScreenshotFromCurrentTab()
console.log(
data?.success
? `[PageAssist] Screenshot is taken`
: `[PageAssist] Screenshot is not taken`
)
const visionImage = data?.screenshot || "" const visionImage = data?.screenshot || ""
if (visionImage === "") { if (visionImage === "") {
@ -656,7 +669,7 @@ export const useMessage = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -664,18 +677,36 @@ export const useMessage = () => {
} }
) )
let count = 0 let count = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
let timetaken = 0
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -689,7 +720,8 @@ export const useMessage = () => {
return { return {
...message, ...message,
message: fullText, message: fullText,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -718,7 +750,8 @@ export const useMessage = () => {
fullText, fullText,
source: [], source: [],
message_source: "copilot", message_source: "copilot",
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -906,7 +939,7 @@ export const useMessage = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -914,18 +947,37 @@ export const useMessage = () => {
} }
) )
let count = 0 let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
let timetaken = 0
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -940,7 +992,8 @@ export const useMessage = () => {
return { return {
...message, ...message,
message: fullText, message: fullText,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -970,7 +1023,8 @@ export const useMessage = () => {
fullText, fullText,
source: [], source: [],
message_source: "copilot", message_source: "copilot",
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -1158,6 +1212,7 @@ export const useMessage = () => {
}) })
const response = await questionOllama.invoke(promptForQuestion) const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString() query = response.content.toString()
query = removeReasoning(query)
} }
const { prompt, source } = await getSystemPromptForWeb(query) const { prompt, source } = await getSystemPromptForWeb(query)
@ -1213,7 +1268,7 @@ export const useMessage = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -1221,18 +1276,37 @@ export const useMessage = () => {
} }
) )
let count = 0 let count = 0
let timetaken = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -1248,7 +1322,8 @@ export const useMessage = () => {
...message, ...message,
message: fullText, message: fullText,
sources: source, sources: source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -1277,7 +1352,8 @@ export const useMessage = () => {
image, image,
fullText, fullText,
source, source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -1441,25 +1517,43 @@ export const useMessage = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
] ]
}) })
let count = 0 let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
let timetaken = 0
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -1474,7 +1568,8 @@ export const useMessage = () => {
return { return {
...message, ...message,
message: fullText, message: fullText,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -1506,7 +1601,8 @@ export const useMessage = () => {
source: [], source: [],
message_source: "copilot", message_source: "copilot",
message_type: messageType, message_type: messageType,
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)

View File

@ -37,6 +37,11 @@ import { pageAssistModel } from "@/models"
import { getNoOfRetrievedDocs } from "@/services/app" import { getNoOfRetrievedDocs } from "@/services/app"
import { humanMessageFormatter } from "@/utils/human-message" import { humanMessageFormatter } from "@/utils/human-message"
import { pageAssistEmbeddingModel } from "@/models/embedding" import { pageAssistEmbeddingModel } from "@/models/embedding"
import {
isReasoningEnded,
isReasoningStarted,
removeReasoning
} from "@/libs/reasoning"
export const useMessageOption = () => { export const useMessageOption = () => {
const { const {
@ -76,10 +81,7 @@ export const useMessageOption = () => {
} = useStoreMessageOption() } = useStoreMessageOption()
const currentChatModelSettings = useStoreChatModelSettings() const currentChatModelSettings = useStoreChatModelSettings()
const [selectedModel, setSelectedModel] = useStorage("selectedModel") const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const [defaultInternetSearchOn, ] = useStorage( const [defaultInternetSearchOn] = useStorage("defaultInternetSearchOn", false)
"defaultInternetSearchOn",
false
)
const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage( const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage(
"speechToTextLanguage", "speechToTextLanguage",
"en-US" "en-US"
@ -102,7 +104,7 @@ export const useMessageOption = () => {
setStreaming(false) setStreaming(false)
currentChatModelSettings.reset() currentChatModelSettings.reset()
textareaRef?.current?.focus() textareaRef?.current?.focus()
if(defaultInternetSearchOn) { if (defaultInternetSearchOn) {
setWebSearch(true) setWebSearch(true)
} }
} }
@ -195,6 +197,7 @@ export const useMessageOption = () => {
setMessages(newMessage) setMessages(newMessage)
let fullText = "" let fullText = ""
let contentToSave = "" let contentToSave = ""
let timetaken = 0
try { try {
setIsSearchingInternet(true) setIsSearchingInternet(true)
@ -261,6 +264,7 @@ export const useMessageOption = () => {
}) })
const response = await questionOllama.invoke(promptForQuestion) const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString() query = response.content.toString()
query = removeReasoning(query)
} }
const { prompt, source } = await getSystemPromptForWeb(query) const { prompt, source } = await getSystemPromptForWeb(query)
@ -317,7 +321,7 @@ export const useMessageOption = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -325,18 +329,35 @@ export const useMessageOption = () => {
} }
) )
let count = 0 let count = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -352,7 +373,8 @@ export const useMessageOption = () => {
...message, ...message,
message: fullText, message: fullText,
sources: source, sources: source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -381,7 +403,8 @@ export const useMessageOption = () => {
image, image,
fullText, fullText,
source, source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -537,6 +560,7 @@ export const useMessageOption = () => {
setMessages(newMessage) setMessages(newMessage)
let fullText = "" let fullText = ""
let contentToSave = "" let contentToSave = ""
let timetaken = 0
try { try {
const prompt = await systemPromptForNonRagOption() const prompt = await systemPromptForNonRagOption()
@ -613,7 +637,7 @@ export const useMessageOption = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -622,9 +646,28 @@ export const useMessageOption = () => {
) )
let count = 0 let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
@ -633,7 +676,8 @@ export const useMessageOption = () => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -648,7 +692,8 @@ export const useMessageOption = () => {
return { return {
...message, ...message,
message: fullText, message: fullText,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -679,7 +724,8 @@ export const useMessageOption = () => {
source: [], source: [],
generationInfo, generationInfo,
prompt_content: promptContent, prompt_content: promptContent,
prompt_id: promptId prompt_id: promptId,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -818,7 +864,7 @@ export const useMessageOption = () => {
knownledge_id: selectedKnowledge.id knownledge_id: selectedKnowledge.id
} }
) )
let timetaken = 0
try { try {
let query = message let query = message
const { ragPrompt: systemPrompt, ragQuestionPrompt: questionPrompt } = const { ragPrompt: systemPrompt, ragQuestionPrompt: questionPrompt } =
@ -882,6 +928,7 @@ export const useMessageOption = () => {
}) })
const response = await questionOllama.invoke(promptForQuestion) const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString() query = response.content.toString()
query = removeReasoning(query)
} }
const docSize = await getNoOfRetrievedDocs() const docSize = await getNoOfRetrievedDocs()
@ -925,7 +972,7 @@ export const useMessageOption = () => {
try { try {
generationInfo = output?.generations?.[0][0]?.generationInfo generationInfo = output?.generations?.[0][0]?.generationInfo
} catch (e) { } catch (e) {
console.log("handleLLMEnd error", e) console.error("handleLLMEnd error", e)
} }
} }
} }
@ -933,18 +980,36 @@ export const useMessageOption = () => {
} }
) )
let count = 0 let count = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
for await (const chunk of chunks) { for await (const chunk of chunks) {
contentToSave += chunk?.content contentToSave += chunk?.content
fullText += chunk?.content fullText += chunk?.content
if (count === 0) { if (count === 0) {
setIsProcessing(true) setIsProcessing(true)
} }
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => { setMessages((prev) => {
return prev.map((message) => { return prev.map((message) => {
if (message.id === generateMessageId) { if (message.id === generateMessageId) {
return { return {
...message, ...message,
message: fullText + "▋" message: fullText + "▋",
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -960,7 +1025,8 @@ export const useMessageOption = () => {
...message, ...message,
message: fullText, message: fullText,
sources: source, sources: source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
} }
} }
return message return message
@ -989,7 +1055,8 @@ export const useMessageOption = () => {
image, image,
fullText, fullText,
source, source,
generationInfo generationInfo,
reasoning_time_taken: timetaken
}) })
setIsProcessing(false) setIsProcessing(false)
@ -1206,6 +1273,6 @@ export const useMessageOption = () => {
setTemporaryChat, setTemporaryChat,
useOCR, useOCR,
setUseOCR, setUseOCR,
defaultInternetSearchOn, defaultInternetSearchOn
} }
} }

View File

@ -85,7 +85,6 @@ export class PageAssistVectorStore extends VectorStore {
metadata: documents[idx].metadata, metadata: documents[idx].metadata,
file_id: this.file_id file_id: this.file_id
})) }))
console.log(`vector:${this.knownledge_id}`)
await insertVector(`vector:${this.knownledge_id}`, memoryVectors) await insertVector(`vector:${this.knownledge_id}`, memoryVectors)
} }
@ -118,7 +117,6 @@ export class PageAssistVectorStore extends VectorStore {
const data = await getVector(`vector:${this.knownledge_id}`) const data = await getVector(`vector:${this.knownledge_id}`)
const pgVector = [...data.vectors] const pgVector = [...data.vectors]
const filteredMemoryVectors = pgVector.filter(filterFunction) const filteredMemoryVectors = pgVector.filter(filterFunction)
console.log(filteredMemoryVectors)
const searches = filteredMemoryVectors const searches = filteredMemoryVectors
.map((vector, index) => ({ .map((vector, index) => ({
similarity: this.similarity(query, vector.embedding), similarity: this.similarity(query, vector.embedding),
@ -126,7 +124,6 @@ export class PageAssistVectorStore extends VectorStore {
})) }))
.sort((a, b) => (a.similarity > b.similarity ? -1 : 0)) .sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
.slice(0, k) .slice(0, k)
console.log(searches)
const result: [Document, number][] = searches.map((search) => [ const result: [Document, number][] = searches.map((search) => [
new Document({ new Document({
metadata: filteredMemoryVectors[search.index].metadata, metadata: filteredMemoryVectors[search.index].metadata,

View File

@ -52,7 +52,7 @@ export const getDataFromCurrentTab = async () => {
resolve(data[0].result) resolve(data[0].result)
} }
} catch (e) { } catch (e) {
console.log("error", e) console.error("error", e)
// this is a weird method but it works // this is a weird method but it works
if (import.meta.env.BROWSER === "firefox") { if (import.meta.env.BROWSER === "firefox") {
// all I need is to get the pdf url but somehow // all I need is to get the pdf url but somehow

View File

@ -41,9 +41,9 @@ export const getAllOpenAIModels = async (baseUrl: string, apiKey?: string) => {
return data.data return data.data
} catch (e) { } catch (e) {
if (e instanceof DOMException && e.name === 'AbortError') { if (e instanceof DOMException && e.name === 'AbortError') {
console.log('Request timed out') console.error('Request timed out')
} else { } else {
console.log(e) console.error(e)
} }
return [] return []
} }

73
src/libs/reasoning.ts Normal file
View File

@ -0,0 +1,73 @@
const tags = ["think", "reason", "reasoning", "thought"];
export function parseReasoning(text: string): { type: 'reasoning' | 'text', content: string, reasoning_running?: boolean }[] {
try {
const result: { type: 'reasoning' | 'text', content: string, reasoning_running?: boolean }[] = []
const tagPattern = new RegExp(`<(${tags.join('|')})>`, 'i')
const closeTagPattern = new RegExp(`</(${tags.join('|')})>`, 'i')
let currentIndex = 0
let isReasoning = false
while (currentIndex < text.length) {
const openTagMatch = text.slice(currentIndex).match(tagPattern)
const closeTagMatch = text.slice(currentIndex).match(closeTagPattern)
if (!isReasoning && openTagMatch) {
const beforeText = text.slice(currentIndex, currentIndex + openTagMatch.index)
if (beforeText.trim()) {
result.push({ type: 'text', content: beforeText.trim() })
}
isReasoning = true
currentIndex += openTagMatch.index! + openTagMatch[0].length
continue
}
if (isReasoning && closeTagMatch) {
const reasoningContent = text.slice(currentIndex, currentIndex + closeTagMatch.index)
if (reasoningContent.trim()) {
result.push({ type: 'reasoning', content: reasoningContent.trim() })
}
isReasoning = false
currentIndex += closeTagMatch.index! + closeTagMatch[0].length
continue
}
if (currentIndex < text.length) {
const remainingText = text.slice(currentIndex)
result.push({
type: isReasoning ? 'reasoning' : 'text',
content: remainingText.trim(),
reasoning_running: isReasoning
})
break
}
}
return result
} catch (e) {
console.error(`Error parsing reasoning: ${e}`)
return [
{
type: 'text',
content: text
}
]
}
}
export function isReasoningStarted(text: string): boolean {
const tagPattern = new RegExp(`<(${tags.join('|')})>`, 'i')
return tagPattern.test(text)
}
export function isReasoningEnded(text: string): boolean {
const closeTagPattern = new RegExp(`</(${tags.join('|')})>`, 'i')
return closeTagPattern.test(text)
}
export function removeReasoning(text: string): string {
const tagPattern = new RegExp(`<(${tags.join('|')})>.*?</(${tags.join('|')})>`, 'gis')
return text.replace(tagPattern, '').trim()
}

View File

@ -7,14 +7,12 @@ export const sendNotification = async (title: string, message: string) => {
"sendNotificationAfterIndexing" "sendNotificationAfterIndexing"
) )
if (sendNotificationAfterIndexing) { if (sendNotificationAfterIndexing) {
console.log("Sending notification")
browser.notifications.create({ browser.notifications.create({
type: "basic", type: "basic",
iconUrl: browser.runtime.getURL("/icon/128.png"), iconUrl: browser.runtime.getURL("/icon/128.png"),
title, title,
message message
}) })
console.log("Notification sent")
} }
} catch (error) { } catch (error) {
console.error(error) console.error(error)

View File

@ -89,7 +89,6 @@ export class PageAssistHtmlLoader
await urlRewriteRuntime(this.url, "web") await urlRewriteRuntime(this.url, "web")
let text = ""; let text = "";
if (isWikipedia(this.url)) { if (isWikipedia(this.url)) {
console.log("Wikipedia URL detected")
const fetchHTML = await fetch(this.url) const fetchHTML = await fetch(this.url)
text = parseWikipedia(await fetchHTML.text()) text = parseWikipedia(await fetchHTML.text())
} else { } else {

View File

@ -108,7 +108,7 @@ export class ChatChromeAI extends SimpleChatModel<ChromeAICallOptions> {
*/ */
destroy() { destroy() {
if (!this.session) { if (!this.session) {
return console.log("No session found. Returning.") return console.error("No session found. Returning.")
} }
this.session.destroy() this.session.destroy()
} }

View File

@ -88,7 +88,6 @@ export const pageAssistModel = async ({
} }
}) as any }) as any
} }
console.log('useMlock', useMlock)
return new ChatOllama({ return new ChatOllama({
baseUrl, baseUrl,
keepAlive, keepAlive,

View File

@ -184,7 +184,6 @@ export const fetchChatModels = async ({
try { try {
const models = await getAllModels({ returnEmpty }) const models = await getAllModels({ returnEmpty })
const chatModels = models const chatModels = models
?.filter((model) => { ?.filter((model) => {
return ( return (

View File

@ -6,7 +6,7 @@ const storage2 = new Storage({
}) })
const TOTAL_SEARCH_RESULTS = 2 const TOTAL_SEARCH_RESULTS = 2
const DEFAULT_PROVIDER = "google" const DEFAULT_PROVIDER = "duckduckgo"
const AVAILABLE_PROVIDERS = ["google", "duckduckgo"] as const const AVAILABLE_PROVIDERS = ["google", "duckduckgo"] as const

View File

@ -3,6 +3,7 @@ import { Storage } from "@plasmohq/storage"
import { getOllamaURL } from "./ollama" import { getOllamaURL } from "./ollama"
import { cleanUrl } from "@/libs/clean-url" import { cleanUrl } from "@/libs/clean-url"
import { HumanMessage } from "langchain/schema" import { HumanMessage } from "langchain/schema"
import { removeReasoning } from "@/libs/reasoning"
const storage = new Storage() const storage = new Storage()
// this prompt is copied from the OpenWebUI codebase // this prompt is copied from the OpenWebUI codebase
@ -64,9 +65,9 @@ export const generateTitle = async (model: string, query: string, fallBackTitle:
}) })
]) ])
return title.content.toString() return removeReasoning(title.content.toString())
} catch (error) { } catch (error) {
console.log(`Error generating title: ${error}`) console.error(`Error generating title: ${error}`)
return fallBackTitle return fallBackTitle
} }
} }

View File

@ -17,6 +17,7 @@ export type Message = {
sources: any[] sources: any[]
images?: string[] images?: string[]
search?: WebSearch search?: WebSearch
reasoning_time_taken?: number
id?: string id?: string
messageType?: string messageType?: string
} }

View File

@ -17,4 +17,5 @@ export type Message = {
messageType?: string messageType?: string
id?: string id?: string
generationInfo?: any generationInfo?: any
reasoning_time_taken?: number
} }

View File

@ -0,0 +1,30 @@
import dayjs from 'dayjs'
import duration from 'dayjs/plugin/duration'
dayjs.extend(duration)
export const humanizeMilliseconds = (milliseconds: number): string => {
try {
const duration = dayjs.duration(milliseconds)
if (milliseconds < 1000) {
return `${milliseconds}ms`
}
if (milliseconds < 60000) {
return `${Math.floor(duration.asSeconds())}s`
}
if (milliseconds < 3600000) {
return `${Math.floor(duration.asMinutes())}m`
}
if (milliseconds < 86400000) {
return `${Math.floor(duration.asHours())}h`
}
return `${Math.floor(duration.asDays())}d`
} catch (e) {
return `${milliseconds}ms`
}
}

View File

@ -34,7 +34,6 @@ export const rerankDocs = async ({
} }
}) })
console.log("similarity", similarity)
const sortedDocs = similarity const sortedDocs = similarity
.sort((a, b) => b.similarity - a.similarity) .sort((a, b) => b.similarity - a.similarity)
.filter((sim) => sim.similarity > 0.5) .filter((sim) => sim.similarity > 0.5)

View File

@ -17,7 +17,6 @@ export const getPageAssistTextSplitter = async () => {
switch (splittingStrategy) { switch (splittingStrategy) {
case "CharacterTextSplitter": case "CharacterTextSplitter":
console.log("Using CharacterTextSplitter")
const splittingSeparator = await defaultSsplttingSeparator() const splittingSeparator = await defaultSsplttingSeparator()
const processedSeparator = splittingSeparator const processedSeparator = splittingSeparator
.replace(/\\n/g, "\n") .replace(/\\n/g, "\n")

View File

@ -42,7 +42,6 @@ export const localBraveSearch = async (query: string) => {
return { title, link, content } return { title, link, content }
}).filter((result) => result.link && result.title && result.content) }).filter((result) => result.link && result.title && result.content)
console.log(searchResults)
return searchResults return searchResults
} }

View File

@ -28,12 +28,23 @@ export const localGoogleSearch = async (query: string) => {
const htmlString = await fetch( const htmlString = await fetch(
`https://www.${baseGoogleDomain}/search?hl=en&q=` + query, `https://www.${baseGoogleDomain}/search?hl=en&q=` + query,
{ {
signal: abortController.signal signal: abortController.signal,
headers: {
"User-Agent": navigator.userAgent,
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br",
"DNT": "1",
"Connection": "keep-alive",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "none",
"Sec-Fetch-User": "?1"
}
} }
) ).then((response) => response.text())
.then((response) => response.text())
.catch() .catch()
const parser = new DOMParser() const parser = new DOMParser()
const doc = parser.parseFromString(htmlString, "text/html") const doc = parser.parseFromString(htmlString, "text/html")
@ -89,9 +100,9 @@ export const webGoogleSearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl) baseUrl: cleanUrl(ollamaUrl)
}) })
const textSplitter = await getPageAssistTextSplitter() const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs) const chunks = await textSplitter.splitDocuments(docs)
const store = new MemoryVectorStore(ollamaEmbedding) const store = new MemoryVectorStore(ollamaEmbedding)

View File

@ -51,7 +51,7 @@ export default defineConfig({
outDir: "build", outDir: "build",
manifest: { manifest: {
version: "1.4.3", version: "1.4.4",
name: name:
process.env.TARGET === "firefox" process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models" ? "Page Assist - A Web UI for Local AI Models"
@ -91,7 +91,7 @@ export default defineConfig({
{ {
extension_pages: extension_pages:
"script-src 'self' 'wasm-unsafe-eval'; object-src 'self';" "script-src 'self' 'wasm-unsafe-eval'; object-src 'self';"
} : undefined, } : "script-src 'self' 'wasm-unsafe-eval' blob:; object-src 'self'; worker-src 'self' blob:;",
permissions: permissions:
process.env.TARGET === "firefox" process.env.TARGET === "firefox"
? firefoxMV2Permissions ? firefoxMV2Permissions