feat: Add reasoning UI

This commit is contained in:
n4ze3m 2025-01-24 22:29:18 +05:30
parent b39d60fc3c
commit 97daaf9dc2
31 changed files with 461 additions and 72 deletions

BIN
bun.lockb

Binary file not shown.

View File

@ -58,6 +58,7 @@
"remark-math": "5.1.1",
"tesseract.js": "^5.1.1",
"turndown": "^7.1.3",
"unist-util-visit": "^5.0.0",
"yt-transcript": "^0.0.2",
"zustand": "^4.5.0"
},

View File

@ -125,5 +125,9 @@
"pin": "تثبيت",
"unpin": "إلغاء التثبيت",
"generationInfo": "معلومات التوليد",
"sidebarChat": "دردشة الشريط الجانبي"
"sidebarChat": "دردشة الشريط الجانبي",
"reasoning": {
"thinking": "جاري التفكير....",
"thought": "فكر لمدة {{time}}"
}
}

View File

@ -118,5 +118,9 @@
"pin": "Fastgør",
"unpin": "Frigør",
"generationInfo": "Genererings Info",
"sidebarChat": "Sidepanel Chat"
"sidebarChat": "Sidepanel Chat",
"reasoning": {
"thinking": "Tænker....",
"thought": "Tænkte i {{time}}"
}
}

View File

@ -118,5 +118,9 @@
"pin": "Anheften",
"unpin": "Losheften",
"generationInfo": "Generierungsinformationen",
"sidebarChat": "Seitenleisten-Chat"
"sidebarChat": "Seitenleisten-Chat",
"reasoning": {
"thinking": "Denke nach....",
"thought": "Gedanke für {{time}}"
}
}

View File

@ -152,5 +152,9 @@
"pin": "Pin",
"unpin": "Unpin",
"generationInfo": "Generation Info",
"sidebarChat": "Sidebar Chat"
"sidebarChat": "Sidebar Chat",
"reasoning": {
"thinking": "Thinking....",
"thought": "Thought for {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "Fijar",
"unpin": "Desfijar",
"generationInfo": "Información de Generación",
"sidebarChat": "Chat lateral"
"sidebarChat": "Chat lateral",
"reasoning": {
"thinking": "Pensando....",
"thought": "Pensamiento por {{time}}"
}
}

View File

@ -111,5 +111,9 @@
"pin": "پین کردن",
"unpin": "حذف پین",
"generationInfo": "اطلاعات تولید",
"sidebarChat": "چت کناری"
"sidebarChat": "چت کناری",
"reasoning": {
"thinking": "در حال فکر کردن....",
"thought": "فکر کردن برای {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "Épingler",
"unpin": "Désépingler",
"generationInfo": "Informations de génération",
"sidebarChat": "Chat latéral"
"sidebarChat": "Chat latéral",
"reasoning": {
"thinking": "Réflexion....",
"thought": "Réflexion pendant {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "Fissa",
"unpin": "Rimuovi",
"generationInfo": "Informazioni sulla Generazione",
"sidebarChat": "Chat Laterale"
"sidebarChat": "Chat Laterale",
"reasoning": {
"thinking": "Pensando....",
"thought": "Pensato per {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "固定",
"unpin": "固定解除",
"generationInfo": "生成情報",
"sidebarChat": "サイドバーチャット"
"sidebarChat": "サイドバーチャット",
"reasoning": {
"thinking": "考え中....",
"thought": "{{time}}の思考"
}
}

View File

@ -117,5 +117,9 @@
"pin": "고정",
"unpin": "고정 해제",
"generationInfo": "생성 정보",
"sidebarChat": "사이드바 채팅"
"sidebarChat": "사이드바 채팅",
"reasoning": {
"thinking": "생각 중....",
"thought": "{{time}} 동안 생각함"
}
}

View File

@ -116,6 +116,9 @@
"pin": "പിൻ ചെയ്യുക",
"unpin": "അൺപിൻ ചെയ്യുക",
"generationInfo": "ജനറേഷൻ വിവരങ്ങൾ",
"sidebarChat": "സൈഡ്ബാർ ചാറ്റ്"
"sidebarChat": "സൈഡ്ബാർ ചാറ്റ്",
"reasoning": {
"thinking": "ചിന്തിക്കുന്നു....",
"thought": "{{time}} നേരത്തെ ചിന്ത"
}
}

View File

@ -118,5 +118,9 @@
"pin": "Fest",
"unpin": "Løsne",
"generationInfo": "Generasjonsinformasjon",
"sidebarChat": "Sidepanel-chat"
"sidebarChat": "Sidepanel-chat",
"reasoning": {
"thinking": "Tenker....",
"thought": "Tenkte i {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "Fixar",
"unpin": "Desafixar",
"generationInfo": "Informações de Geração",
"sidebarChat": "Chat Lateral"
"sidebarChat": "Chat Lateral",
"reasoning": {
"thinking": "Pensando....",
"thought": "Pensou por {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "Закрепить",
"unpin": "Открепить",
"generationInfo": "Информация о генерации",
"sidebarChat": "Боковой чат"
"sidebarChat": "Боковой чат",
"reasoning": {
"thinking": "Размышляю...",
"thought": "Размышлял {{time}}"
}
}

View File

@ -122,5 +122,9 @@
"pin": "Fäst",
"unpin": "Ta bort fäst",
"generationInfo": "Generationsinformation",
"sidebarChat": "Sidofältschatt"
"sidebarChat": "Sidofältschatt",
"reasoning": {
"thinking": "Tänker....",
"thought": "Tänkte i {{time}}"
}
}

View File

@ -122,5 +122,9 @@
"pin": "Прикріпити",
"unpin": "Відкріпити",
"generationInfo": "Інформація про генерацію",
"sidebarChat": "Бічний чат"
"sidebarChat": "Бічний чат",
"reasoning": {
"thinking": "Думаю....",
"thought": "Думав протягом {{time}}"
}
}

View File

@ -117,5 +117,9 @@
"pin": "置顶",
"unpin": "取消置顶",
"generationInfo": "生成信息",
"sidebarChat": "侧边栏聊天"
"sidebarChat": "侧边栏聊天",
"reasoning": {
"thinking": "思考中....",
"thought": "思考了 {{time}}"
}
}

View File

@ -18,7 +18,8 @@ import { useTTS } from "@/hooks/useTTS"
import { tagColors } from "@/utils/color"
import { removeModelSuffix } from "@/db/models"
import { GenerationInfo } from "./GenerationInfo"
import { parseReasoning } from "@/libs/reasoning"
import { humanizeMilliseconds } from "@/utils/humanize-miliseconds"
type Props = {
message: string
message_type?: string
@ -40,6 +41,8 @@ type Props = {
onSourceClick?: (source: any) => void
isTTSEnabled?: boolean
generationInfo?: any
isStreaming: boolean
reasoningTimeTaken?: number
}
export const PlaygroundMessage = (props: Props) => {
@ -48,7 +51,6 @@ export const PlaygroundMessage = (props: Props) => {
const { t } = useTranslation("common")
const { cancel, isSpeaking, speak } = useTTS()
return (
<div className="group w-full text-gray-800 dark:text-gray-100">
<div className="text-base md:max-w-2xl lg:max-w-xl xl:max-w-3xl flex lg:px-0 m-auto w-full">
@ -94,7 +96,40 @@ export const PlaygroundMessage = (props: Props) => {
<div className="flex flex-grow flex-col">
{!editMode ? (
props.isBot ? (
<Markdown message={props.message} />
<>
{parseReasoning(props.message).map((e, i) => {
if (e.type === "reasoning") {
return (
<Collapse
key={i}
className="border-none !mb-3"
items={[
{
key: "reasoning",
label:
props.isStreaming && e?.reasoning_running ? (
<div className="flex items-center gap-2">
<span className="italic">
{t("reasoning.thinking")}
</span>
</div>
) : (
t("reasoning.thought", {
time: humanizeMilliseconds(
props.reasoningTimeTaken
)
})
),
children: <Markdown message={e.content} />
}
]}
/>
)
}
return <Markdown key={i} message={e.content} />
})}
</>
) : (
<p
className={`prose dark:prose-invert whitespace-pre-line prose-p:leading-relaxed prose-pre:p-0 dark:prose-dark ${
@ -220,8 +255,8 @@ export const PlaygroundMessage = (props: Props) => {
}
title={t("generationInfo")}>
<button
aria-label={t("generationInfo")}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
aria-label={t("generationInfo")}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
<InfoIcon className="w-3 h-3 text-gray-400 group-hover:text-gray-500" />
</button>
</Popover>
@ -231,7 +266,7 @@ export const PlaygroundMessage = (props: Props) => {
props.currentMessageIndex === props.totalMessages - 1 && (
<Tooltip title={t("regenerate")}>
<button
aria-label={t("regenerate")}
aria-label={t("regenerate")}
onClick={props.onRengerate}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
<RotateCcw className="w-3 h-3 text-gray-400 group-hover:text-gray-500" />

View File

@ -55,6 +55,8 @@ export const PlaygroundChat = () => {
}}
isTTSEnabled={ttsEnabled}
generationInfo={message?.generationInfo}
isStreaming={streaming}
reasoningTimeTaken={message?.reasoning_time_taken}
/>
))}
{messages.length > 0 && (

View File

@ -48,6 +48,8 @@ export const SidePanelBody = () => {
}}
isTTSEnabled={ttsEnabled}
generationInfo={message?.generationInfo}
isStreaming={streaming}
reasoningTimeTaken={message?.reasoning_time_taken}
/>
))}
<div className="w-full h-48 flex-shrink-0"></div>

View File

@ -32,6 +32,7 @@ type Message = {
sources?: string[]
search?: WebSearch
createdAt: number
reasoning_time_taken?: number
messageType?: string
generationInfo?: any
}
@ -256,7 +257,8 @@ export const saveMessage = async (
source?: any[],
time?: number,
message_type?: string,
generationInfo?: any
generationInfo?: any,
reasoning_time_taken?: number
) => {
const id = generateID()
let createdAt = Date.now()
@ -273,7 +275,8 @@ export const saveMessage = async (
createdAt,
sources: source,
messageType: message_type,
generationInfo: generationInfo
generationInfo: generationInfo,
reasoning_time_taken
}
const db = new PageAssitDatabase()
await db.addMessage(message)
@ -303,6 +306,7 @@ export const formatToMessage = (messages: MessageHistory): MessageType[] => {
sources: message?.sources || [],
images: message.images || [],
generationInfo: message?.generationInfo,
reasoning_time_taken: message?.reasoning_time_taken
}
})
}

View File

@ -130,7 +130,8 @@ export const saveMessageOnSuccess = async ({
message_source = "web-ui",
message_type, generationInfo,
prompt_id,
prompt_content
prompt_content,
reasoning_time_taken = 0
}: {
historyId: string | null
setHistoryId: (historyId: string) => void
@ -145,6 +146,7 @@ export const saveMessageOnSuccess = async ({
generationInfo?: any
prompt_id?: string
prompt_content?: string
reasoning_time_taken?: number
}) => {
if (historyId) {
if (!isRegenerate) {
@ -157,7 +159,8 @@ export const saveMessageOnSuccess = async ({
[],
1,
message_type,
generationInfo
generationInfo,
reasoning_time_taken
)
}
await saveMessage(
@ -169,7 +172,8 @@ export const saveMessageOnSuccess = async ({
source,
2,
message_type,
generationInfo
generationInfo,
reasoning_time_taken
)
await setLastUsedChatModel(historyId, selectedModel!)
if (prompt_id || prompt_content) {
@ -187,7 +191,8 @@ export const saveMessageOnSuccess = async ({
[],
1,
message_type,
generationInfo
generationInfo,
reasoning_time_taken
)
await saveMessage(
newHistoryId.id,
@ -198,7 +203,8 @@ export const saveMessageOnSuccess = async ({
source,
2,
message_type,
generationInfo
generationInfo,
reasoning_time_taken
)
setHistoryId(newHistoryId.id)
await setLastUsedChatModel(newHistoryId.id, selectedModel!)

View File

@ -36,6 +36,7 @@ import { humanMessageFormatter } from "@/utils/human-message"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import { PAMemoryVectorStore } from "@/libs/PAMemoryVectorStore"
import { getScreenshotFromCurrentTab } from "@/libs/get-screenshot"
import { isReasoningEnded, isReasoningStarted, removeReasoning } from "@/libs/reasoning"
export const useMessage = () => {
const {
@ -55,14 +56,9 @@ export const useMessage = () => {
setWebSearch,
isSearchingInternet
} = useStoreMessageOption()
const [defaultInternetSearchOn, ] = useStorage(
"defaultInternetSearchOn",
false
)
const [defaultInternetSearchOn] = useStorage("defaultInternetSearchOn", false)
const [
defaultChatWithWebsite,
] = useStorage("defaultChatWithWebsite", false)
const [defaultChatWithWebsite] = useStorage("defaultChatWithWebsite", false)
const [chatWithWebsiteEmbedding] = useStorage(
"chatWithWebsiteEmbedding",
@ -115,10 +111,10 @@ export const useMessage = () => {
setIsProcessing(false)
setStreaming(false)
currentChatModelSettings.reset()
if(defaultInternetSearchOn) {
if (defaultInternetSearchOn) {
setWebSearch(true)
}
if(defaultChatWithWebsite) {
if (defaultChatWithWebsite) {
setChatMode("rag")
}
}
@ -329,6 +325,7 @@ export const useMessage = () => {
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
query = removeReasoning(query)
}
let context: string = ""
@ -413,18 +410,36 @@ export const useMessage = () => {
}
)
let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
let timetaken = 0
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -440,7 +455,8 @@ export const useMessage = () => {
...message,
message: fullText,
sources: source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -470,7 +486,8 @@ export const useMessage = () => {
fullText,
source,
message_source: "copilot",
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -664,18 +681,36 @@ export const useMessage = () => {
}
)
let count = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
let timetaken = 0
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -689,7 +724,8 @@ export const useMessage = () => {
return {
...message,
message: fullText,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -718,7 +754,8 @@ export const useMessage = () => {
fullText,
source: [],
message_source: "copilot",
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -914,18 +951,37 @@ export const useMessage = () => {
}
)
let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
let timetaken = 0
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -940,7 +996,8 @@ export const useMessage = () => {
return {
...message,
message: fullText,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -970,7 +1027,8 @@ export const useMessage = () => {
fullText,
source: [],
message_source: "copilot",
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -1158,6 +1216,7 @@ export const useMessage = () => {
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
query = removeReasoning(query)
}
const { prompt, source } = await getSystemPromptForWeb(query)
@ -1221,18 +1280,37 @@ export const useMessage = () => {
}
)
let count = 0
let timetaken = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -1248,7 +1326,8 @@ export const useMessage = () => {
...message,
message: fullText,
sources: source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -1277,7 +1356,8 @@ export const useMessage = () => {
image,
fullText,
source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -1448,18 +1528,36 @@ export const useMessage = () => {
]
})
let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
let timetaken = 0
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -1474,7 +1572,8 @@ export const useMessage = () => {
return {
...message,
message: fullText,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -1506,7 +1605,8 @@ export const useMessage = () => {
source: [],
message_source: "copilot",
message_type: messageType,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)

View File

@ -37,6 +37,11 @@ import { pageAssistModel } from "@/models"
import { getNoOfRetrievedDocs } from "@/services/app"
import { humanMessageFormatter } from "@/utils/human-message"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import {
isReasoningEnded,
isReasoningStarted,
removeReasoning
} from "@/libs/reasoning"
export const useMessageOption = () => {
const {
@ -76,10 +81,7 @@ export const useMessageOption = () => {
} = useStoreMessageOption()
const currentChatModelSettings = useStoreChatModelSettings()
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const [defaultInternetSearchOn, ] = useStorage(
"defaultInternetSearchOn",
false
)
const [defaultInternetSearchOn] = useStorage("defaultInternetSearchOn", false)
const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage(
"speechToTextLanguage",
"en-US"
@ -102,7 +104,7 @@ export const useMessageOption = () => {
setStreaming(false)
currentChatModelSettings.reset()
textareaRef?.current?.focus()
if(defaultInternetSearchOn) {
if (defaultInternetSearchOn) {
setWebSearch(true)
}
}
@ -195,6 +197,7 @@ export const useMessageOption = () => {
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
let timetaken = 0
try {
setIsSearchingInternet(true)
@ -261,6 +264,7 @@ export const useMessageOption = () => {
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
query = removeReasoning(query)
}
const { prompt, source } = await getSystemPromptForWeb(query)
@ -325,18 +329,35 @@ export const useMessageOption = () => {
}
)
let count = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -352,7 +373,8 @@ export const useMessageOption = () => {
...message,
message: fullText,
sources: source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -381,7 +403,8 @@ export const useMessageOption = () => {
image,
fullText,
source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -537,6 +560,7 @@ export const useMessageOption = () => {
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
let timetaken = 0
try {
const prompt = await systemPromptForNonRagOption()
@ -622,9 +646,28 @@ export const useMessageOption = () => {
)
let count = 0
let reasoningStartTime: Date | null = null
let reasoningEndTime: Date | null = null
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
if (count === 0) {
setIsProcessing(true)
}
@ -633,7 +676,8 @@ export const useMessageOption = () => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -648,7 +692,8 @@ export const useMessageOption = () => {
return {
...message,
message: fullText,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -679,7 +724,8 @@ export const useMessageOption = () => {
source: [],
generationInfo,
prompt_content: promptContent,
prompt_id: promptId
prompt_id: promptId,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -818,7 +864,7 @@ export const useMessageOption = () => {
knownledge_id: selectedKnowledge.id
}
)
let timetaken = 0
try {
let query = message
const { ragPrompt: systemPrompt, ragQuestionPrompt: questionPrompt } =
@ -882,6 +928,7 @@ export const useMessageOption = () => {
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
query = removeReasoning(query)
}
const docSize = await getNoOfRetrievedDocs()
@ -933,18 +980,36 @@ export const useMessageOption = () => {
}
)
let count = 0
let reasoningStartTime: Date | undefined = undefined
let reasoningEndTime: Date | undefined = undefined
for await (const chunk of chunks) {
contentToSave += chunk?.content
fullText += chunk?.content
if (count === 0) {
setIsProcessing(true)
}
if (isReasoningStarted(fullText) && !reasoningStartTime) {
reasoningStartTime = new Date()
}
if (
reasoningStartTime &&
!reasoningEndTime &&
isReasoningEnded(fullText)
) {
reasoningEndTime = new Date()
const reasoningTime =
reasoningEndTime.getTime() - reasoningStartTime.getTime()
timetaken = reasoningTime
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
message: fullText + "▋",
reasoning_time_taken: timetaken
}
}
return message
@ -960,7 +1025,8 @@ export const useMessageOption = () => {
...message,
message: fullText,
sources: source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
}
}
return message
@ -989,7 +1055,8 @@ export const useMessageOption = () => {
image,
fullText,
source,
generationInfo
generationInfo,
reasoning_time_taken: timetaken
})
setIsProcessing(false)
@ -1206,6 +1273,6 @@ export const useMessageOption = () => {
setTemporaryChat,
useOCR,
setUseOCR,
defaultInternetSearchOn,
defaultInternetSearchOn
}
}

73
src/libs/reasoning.ts Normal file
View File

@ -0,0 +1,73 @@
const tags = ["think", "reason", "reasoning", "thought"];
export function parseReasoning(text: string): { type: 'reasoning' | 'text', content: string, reasoning_running?: boolean }[] {
try {
const result: { type: 'reasoning' | 'text', content: string, reasoning_running?: boolean }[] = []
const tagPattern = new RegExp(`<(${tags.join('|')})>`, 'i')
const closeTagPattern = new RegExp(`</(${tags.join('|')})>`, 'i')
let currentIndex = 0
let isReasoning = false
while (currentIndex < text.length) {
const openTagMatch = text.slice(currentIndex).match(tagPattern)
const closeTagMatch = text.slice(currentIndex).match(closeTagPattern)
if (!isReasoning && openTagMatch) {
const beforeText = text.slice(currentIndex, currentIndex + openTagMatch.index)
if (beforeText.trim()) {
result.push({ type: 'text', content: beforeText.trim() })
}
isReasoning = true
currentIndex += openTagMatch.index! + openTagMatch[0].length
continue
}
if (isReasoning && closeTagMatch) {
const reasoningContent = text.slice(currentIndex, currentIndex + closeTagMatch.index)
if (reasoningContent.trim()) {
result.push({ type: 'reasoning', content: reasoningContent.trim() })
}
isReasoning = false
currentIndex += closeTagMatch.index! + closeTagMatch[0].length
continue
}
if (currentIndex < text.length) {
const remainingText = text.slice(currentIndex)
result.push({
type: isReasoning ? 'reasoning' : 'text',
content: remainingText.trim(),
reasoning_running: isReasoning
})
break
}
}
return result
} catch (e) {
console.log(`Error parsing reasoning: ${e}`)
return [
{
type: 'text',
content: text
}
]
}
}
export function isReasoningStarted(text: string): boolean {
const tagPattern = new RegExp(`<(${tags.join('|')})>`, 'i')
return tagPattern.test(text)
}
export function isReasoningEnded(text: string): boolean {
const closeTagPattern = new RegExp(`</(${tags.join('|')})>`, 'i')
return closeTagPattern.test(text)
}
export function removeReasoning(text: string): string {
const tagPattern = new RegExp(`<(${tags.join('|')})>.*?</(${tags.join('|')})>`, 'gis')
return text.replace(tagPattern, '').trim()
}

View File

@ -17,6 +17,7 @@ export type Message = {
sources: any[]
images?: string[]
search?: WebSearch
reasoning_time_taken?: number
id?: string
messageType?: string
}

View File

@ -17,4 +17,5 @@ export type Message = {
messageType?: string
id?: string
generationInfo?: any
reasoning_time_taken?: number
}

View File

@ -0,0 +1,30 @@
import dayjs from 'dayjs'
import duration from 'dayjs/plugin/duration'
dayjs.extend(duration)
export const humanizeMilliseconds = (milliseconds: number): string => {
try {
const duration = dayjs.duration(milliseconds)
if (milliseconds < 1000) {
return `${milliseconds}ms`
}
if (milliseconds < 60000) {
return `${Math.floor(duration.asSeconds())}s`
}
if (milliseconds < 3600000) {
return `${Math.floor(duration.asMinutes())}m`
}
if (milliseconds < 86400000) {
return `${Math.floor(duration.asHours())}h`
}
return `${Math.floor(duration.asDays())}d`
} catch (e) {
return `${milliseconds}ms`
}
}

View File

@ -51,7 +51,7 @@ export default defineConfig({
outDir: "build",
manifest: {
version: "1.4.3",
version: "1.4.4",
name:
process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models"