Merge pull request #119 from n4ze3m/next

v1.1.12
This commit is contained in:
Muhammed Nazeem 2024-06-13 00:30:02 +05:30 committed by GitHub
commit d198b87edd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 459 additions and 122 deletions

BIN
bun.lockb

Binary file not shown.

View File

@ -44,10 +44,12 @@
"react": "18.2.0", "react": "18.2.0",
"react-dom": "18.2.0", "react-dom": "18.2.0",
"react-i18next": "^14.1.0", "react-i18next": "^14.1.0",
"react-icons": "^5.2.1",
"react-markdown": "8.0.0", "react-markdown": "8.0.0",
"react-router-dom": "6.10.0", "react-router-dom": "6.10.0",
"react-syntax-highlighter": "^15.5.0", "react-syntax-highlighter": "^15.5.0",
"react-toastify": "^10.0.4", "react-toastify": "^10.0.4",
"rehype-katex": "6.0.3",
"rehype-mathjax": "4.0.3", "rehype-mathjax": "4.0.3",
"remark-gfm": "3.0.1", "remark-gfm": "3.0.1",
"remark-math": "5.1.1", "remark-math": "5.1.1",

BIN
src/assets/fonts/Arimo.ttf Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,70 +1,26 @@
@font-face { @font-face {
font-family: "Inter"; font-family: "Arimo";
src: url("inter.ttf") format("truetype"); src: url("fonts/Arimo.ttf");
font-display: swap;
} }
@font-face { .arimo {
font-family: "Onest"; font-family: "Arimo", sans-serif;
src: url("onest.ttf") format("truetype"); font-weight: 500;
} font-style: normal;
.inter {
font-family: "Inter", sans-serif !important;
}
.onest {
font-family: "Onest", sans-serif !important;
} }
@tailwind base; @tailwind base;
@tailwind components; @tailwind components;
@tailwind utilities; @tailwind utilities;
.ant-select-selection-search-input { .ant-select-selection-search-input {
border: none !important; border: none !important;
box-shadow: none !important; box-shadow: none !important;
} }
.gradient-border {
--borderWidth: 3px;
position: relative;
border-radius: var(--borderWidth);
}
.gradient-border:after {
content: "";
position: absolute;
top: calc(-1 * var(--borderWidth));
left: calc(-1 * var(--borderWidth));
height: calc(100% + var(--borderWidth) * 2);
width: calc(100% + var(--borderWidth) * 2);
background: linear-gradient(
60deg,
#f79533,
#f37055,
#ef4e7b,
#a166ab,
#5073b8,
#1098ad,
#07b39b,
#6fba82
);
border-radius: calc(2 * var(--borderWidth));
z-index: -1;
animation: animatedgradient 3s ease alternate infinite;
background-size: 300% 300%;
}
@keyframes animatedgradient {
0% {
background-position: 0% 50%;
}
50% {
background-position: 100% 50%;
}
100% {
background-position: 0% 50%;
}
}
/* Hide scrollbar for Chrome, Safari and Opera */ /* Hide scrollbar for Chrome, Safari and Opera */
.no-scrollbar::-webkit-scrollbar { .no-scrollbar::-webkit-scrollbar {
display: none; display: none;
@ -75,3 +31,37 @@
-ms-overflow-style: none; /* IE and Edge */ -ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */ scrollbar-width: none; /* Firefox */
} }
@keyframes gradient-border {
0% {
border-image-source: linear-gradient(
45deg,
#f79533,
#f37055,
#ef4e7b,
#a166ab
);
}
50% {
border-image-source: linear-gradient(45deg, #ef4e7b, #a166ab);
}
74% {
border-image-source: linear-gradient(60deg, #5073b8, #1098ad);
}
100% {
border-image-source: linear-gradient(
45deg,
#f79533,
#f37055,
#ef4e7b,
#a166ab
);
}
}
.animated-gradient-border {
border: 3px solid;
border-image-slice: 1;
animation: gradient-border 3s infinite;
border-radius: 10px;
}

View File

@ -1,13 +1,12 @@
import remarkGfm from "remark-gfm" import remarkGfm from "remark-gfm"
import remarkMath from "remark-math" import remarkMath from "remark-math"
import ReactMarkdown from "react-markdown" import ReactMarkdown from "react-markdown"
import "property-information" import "property-information"
import React from "react" import React from "react"
import { CodeBlock } from "./CodeBlock" import { CodeBlock } from "./CodeBlock"
export default function Markdown({ message }: { message: string }) { export default function Markdown({ message }: { message: string }) {
return ( return (
<React.Fragment> <React.Fragment>
<ReactMarkdown <ReactMarkdown
@ -17,10 +16,10 @@ export default function Markdown({ message }: { message: string }) {
code({ node, inline, className, children, ...props }) { code({ node, inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || "") const match = /language-(\w+)/.exec(className || "")
return !inline ? ( return !inline ? (
<CodeBlock <CodeBlock
language={match ? match[1] : ""} language={match ? match[1] : ""}
value={String(children).replace(/\n$/, "")} value={String(children).replace(/\n$/, "")}
/> />
) : ( ) : (
<code className={`${className} font-semibold`} {...props}> <code className={`${className} font-semibold`} {...props}>
{children} {children}

View File

@ -4,7 +4,7 @@ import { useTranslation } from "react-i18next"
export const WebSearch = () => { export const WebSearch = () => {
const {t} = useTranslation('common') const {t} = useTranslation('common')
return ( return (
<div className="gradient-border mt-4 flex w-56 items-center gap-4 rounded-lg bg-neutral-100 p-1ccc text-slate-900 dark:bg-neutral-800 dark:text-slate-50"> <div className="animated-gradient-border mt-4 flex w-56 items-center gap-4 !rounded-lg bg-neutral-100 p-1 text-slate-900 dark:bg-neutral-800 dark:text-slate-50">
<div className="rounded p-1"> <div className="rounded p-1">
<Globe className="w-6 h-6" /> <Globe className="w-6 h-6" />
</div> </div>

View File

@ -4,7 +4,7 @@ import React from "react"
import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize" import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize"
import { toBase64 } from "~/libs/to-base64" import { toBase64 } from "~/libs/to-base64"
import { useMessageOption } from "~/hooks/useMessageOption" import { useMessageOption } from "~/hooks/useMessageOption"
import { Checkbox, Dropdown, Select, Switch, Tooltip } from "antd" import { Checkbox, Dropdown, Switch, Tooltip } from "antd"
import { Image } from "antd" import { Image } from "antd"
import { useWebUI } from "~/store/webui" import { useWebUI } from "~/store/webui"
import { defaultEmbeddingModelForRag } from "~/services/ollama" import { defaultEmbeddingModelForRag } from "~/services/ollama"
@ -13,6 +13,7 @@ import { getVariable } from "~/utils/select-varaible"
import { useTranslation } from "react-i18next" import { useTranslation } from "react-i18next"
import { KnowledgeSelect } from "../Knowledge/KnowledgeSelect" import { KnowledgeSelect } from "../Knowledge/KnowledgeSelect"
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
import { PiGlobe } from "react-icons/pi"
type Props = { type Props = {
dropedFile: File | undefined dropedFile: File | undefined
@ -250,19 +251,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
{!selectedKnowledge && ( {!selectedKnowledge && (
<Tooltip title={t("tooltip.searchInternet")}> <Tooltip title={t("tooltip.searchInternet")}>
<div className="inline-flex items-center gap-2"> <div className="inline-flex items-center gap-2">
<svg <PiGlobe className="h-5 w-5 dark:text-gray-300" />
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
strokeWidth={1.5}
stroke="currentColor"
className="w-5 h-5 dark:text-gray-300">
<path
strokeLinecap="round"
strokeLinejoin="round"
d="M12 21a9.004 9.004 0 0 0 8.716-6.747M12 21a9.004 9.004 0 0 1-8.716-6.747M12 21c2.485 0 4.5-4.03 4.5-9S14.485 3 12 3m0 18c-2.485 0-4.5-4.03-4.5-9S9.515 3 12 3m0 0a8.997 8.997 0 0 1 7.843 4.582M12 3a8.997 8.997 0 0 0-7.843 4.582m15.686 0A11.953 11.953 0 0 1 12 10.5c-2.998 0-5.74-1.1-7.843-2.918m15.686 0A8.959 8.959 0 0 1 21 12c0 .778-.099 1.533-.284 2.253m0 0A17.919 17.919 0 0 1 12 16.5c-3.162 0-6.133-.815-8.716-2.247m0 0A9.015 9.015 0 0 1 3 12c0-1.605.42-3.113 1.157-4.418"
/>
</svg>
<Switch <Switch
value={webSearch} value={webSearch}
onChange={(e) => setWebSearch(e)} onChange={(e) => setWebSearch(e)}

View File

@ -3,11 +3,19 @@ import { PlaygroundMessage } from "~/components/Common/Playground/Message"
import { useMessage } from "~/hooks/useMessage" import { useMessage } from "~/hooks/useMessage"
import { EmptySidePanel } from "../Chat/empty" import { EmptySidePanel } from "../Chat/empty"
import { useWebUI } from "@/store/webui" import { useWebUI } from "@/store/webui"
import { MessageSourcePopup } from "@/components/Common/Playground/MessageSourcePopup"
export const SidePanelBody = () => { export const SidePanelBody = () => {
const { messages, streaming, regenerateLastMessage, editMessage } = const {
useMessage() messages,
streaming,
regenerateLastMessage,
editMessage,
isSearchingInternet
} = useMessage()
const divRef = React.useRef<HTMLDivElement>(null) const divRef = React.useRef<HTMLDivElement>(null)
const [isSourceOpen, setIsSourceOpen] = React.useState(false)
const [source, setSource] = React.useState<any>(null)
const { ttsEnabled } = useWebUI() const { ttsEnabled } = useWebUI()
React.useEffect(() => { React.useEffect(() => {
if (divRef.current) { if (divRef.current) {
@ -27,19 +35,26 @@ export const SidePanelBody = () => {
currentMessageIndex={index} currentMessageIndex={index}
totalMessages={messages.length} totalMessages={messages.length}
onRengerate={regenerateLastMessage} onRengerate={regenerateLastMessage}
isProcessing={streaming}
isSearchingInternet={isSearchingInternet}
sources={message.sources}
onEditFormSubmit={(value) => { onEditFormSubmit={(value) => {
editMessage(index, value, !message.isBot) editMessage(index, value, !message.isBot)
}} }}
isProcessing={streaming} onSourceClick={(data) => {
setSource(data)
setIsSourceOpen(true)
}}
isTTSEnabled={ttsEnabled} isTTSEnabled={ttsEnabled}
/> />
))} ))}
{import.meta.env.BROWSER === "chrome" ? ( <div className="w-full h-48 flex-shrink-0"></div>
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>
) : (
<div className="w-full h-48 flex-shrink-0"></div>
)}
<div ref={divRef} /> <div ref={divRef} />
<MessageSourcePopup
open={isSourceOpen}
setOpen={setIsSourceOpen}
source={source}
/>
</div> </div>
) )
} }

View File

@ -4,13 +4,14 @@ import React from "react"
import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize" import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize"
import { useMessage } from "~/hooks/useMessage" import { useMessage } from "~/hooks/useMessage"
import { toBase64 } from "~/libs/to-base64" import { toBase64 } from "~/libs/to-base64"
import { Checkbox, Dropdown, Image, Tooltip } from "antd" import { Checkbox, Dropdown, Image, Switch, Tooltip } from "antd"
import { useWebUI } from "~/store/webui" import { useWebUI } from "~/store/webui"
import { defaultEmbeddingModelForRag } from "~/services/ollama" import { defaultEmbeddingModelForRag } from "~/services/ollama"
import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react" import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react"
import { useTranslation } from "react-i18next" import { useTranslation } from "react-i18next"
import { ModelSelect } from "@/components/Common/ModelSelect" import { ModelSelect } from "@/components/Common/ModelSelect"
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition" import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
import { PiGlobeX, PiGlobe } from "react-icons/pi"
type Props = { type Props = {
dropedFile: File | undefined dropedFile: File | undefined
@ -88,6 +89,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
return return
} }
} }
if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError("message", t("formError.noEmbeddingModel"))
return
}
}
form.reset() form.reset()
textAreaFocus() textAreaFocus()
await sendMessage({ await sendMessage({
@ -111,7 +119,9 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
speechToTextLanguage, speechToTextLanguage,
stopStreamingRequest, stopStreamingRequest,
streaming, streaming,
setChatMode setChatMode,
webSearch,
setWebSearch
} = useMessage() } = useMessage()
React.useEffect(() => { React.useEffect(() => {
@ -137,6 +147,30 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
} }
}) })
React.useEffect(() => {
const handleDrop = (e: DragEvent) => {
e.preventDefault()
if (e.dataTransfer?.items) {
for (let i = 0; i < e.dataTransfer.items.length; i++) {
if (e.dataTransfer.items[i].type === "text/plain") {
e.dataTransfer.items[i].getAsString((text) => {
form.setFieldValue("message", text)
})
}
}
}
}
const handleDragOver = (e: DragEvent) => {
e.preventDefault()
}
textareaRef.current?.addEventListener("drop", handleDrop)
textareaRef.current?.addEventListener("dragover", handleDragOver)
return () => {
textareaRef.current?.removeEventListener("drop", handleDrop)
textareaRef.current?.removeEventListener("dragover", handleDragOver)
}
}, [])
return ( return (
<div className="px-3 pt-3 md:px-6 md:pt-6 bg-gray-50 dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600"> <div className="px-3 pt-3 md:px-6 md:pt-6 bg-gray-50 dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600">
<div <div
@ -175,6 +209,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
return return
} }
} }
if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError("message", t("formError.noEmbeddingModel"))
return
}
}
await stopListening() await stopListening()
form.reset() form.reset()
textAreaFocus() textAreaFocus()
@ -210,6 +251,20 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
{...form.getInputProps("message")} {...form.getInputProps("message")}
/> />
<div className="flex mt-4 justify-end gap-3"> <div className="flex mt-4 justify-end gap-3">
<Tooltip title={t("tooltip.searchInternet")}>
<button
type="button"
onClick={() => setWebSearch(!webSearch)}
className={`inline-flex items-center gap-2 ${
chatMode === "rag" ? "hidden" : "block"
}`}>
{webSearch ? (
<PiGlobe className="h-5 w-5 dark:text-gray-300" />
) : (
<PiGlobeX className="h-5 w-5 text-gray-600 dark:text-gray-400" />
)}
</button>
</Tooltip>
<ModelSelect /> <ModelSelect />
{browserSupportsSpeechRecognition && ( {browserSupportsSpeechRecognition && (
<Tooltip title={t("tooltip.speechToText")}> <Tooltip title={t("tooltip.speechToText")}>

View File

@ -19,7 +19,7 @@ function IndexOption() {
algorithm: algorithm:
mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm, mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm,
token: { token: {
fontFamily: i18n.language === "ru" ? "Onest" : "Inter" fontFamily: "Arimo"
} }
}} }}
renderEmpty={() => ( renderEmpty={() => (

View File

@ -2,7 +2,7 @@
<html> <html>
<head> <head>
<title>Page Assist - A Web UI for Local AI Models</title> <title>Page Assist - A Web UI for Local AI Models</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="manifest.type" content="browser_action" /> <meta name="manifest.type" content="browser_action" />
<link href="~/assets/tailwind.css" rel="stylesheet" /> <link href="~/assets/tailwind.css" rel="stylesheet" />
<meta charset="utf-8" /> <meta charset="utf-8" />

View File

@ -20,7 +20,7 @@ function IndexSidepanel() {
algorithm: algorithm:
mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm, mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm,
token: { token: {
fontFamily: i18n.language === "ru" ? "Onest" : "Inter" fontFamily: "Arimo"
} }
}} }}
renderEmpty={() => ( renderEmpty={() => (

View File

@ -2,7 +2,7 @@
<html> <html>
<head> <head>
<title>Page Assist - A Web UI for Local AI Models</title> <title>Page Assist - A Web UI for Local AI Models</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="manifest.type" content="browser_action" /> <meta name="manifest.type" content="browser_action" />
<meta name="manifest.open_at_install" content="false" /> <meta name="manifest.open_at_install" content="false" />
<meta name="manifest.browser_style" content="false" /> <meta name="manifest.browser_style" content="false" />

View File

@ -2,11 +2,12 @@ import React from "react"
import { cleanUrl } from "~/libs/clean-url" import { cleanUrl } from "~/libs/clean-url"
import { import {
defaultEmbeddingModelForRag, defaultEmbeddingModelForRag,
geWebSearchFollowUpPrompt,
getOllamaURL, getOllamaURL,
promptForRag, promptForRag,
systemPromptForNonRag systemPromptForNonRag
} from "~/services/ollama" } from "~/services/ollama"
import { type Message } from "~/store/option" import { useStoreMessageOption, type Message } from "~/store/option"
import { useStoreMessage } from "~/store" import { useStoreMessage } from "~/store"
import { HumanMessage, SystemMessage } from "@langchain/core/messages" import { HumanMessage, SystemMessage } from "@langchain/core/messages"
import { getDataFromCurrentTab } from "~/libs/get-html" import { getDataFromCurrentTab } from "~/libs/get-html"
@ -29,6 +30,7 @@ import { useStorage } from "@plasmohq/storage/hook"
import { useStoreChatModelSettings } from "@/store/model" import { useStoreChatModelSettings } from "@/store/model"
import { ChatOllama } from "@/models/ChatOllama" import { ChatOllama } from "@/models/ChatOllama"
import { getAllDefaultModelSettings } from "@/services/model-settings" import { getAllDefaultModelSettings } from "@/services/model-settings"
import { getSystemPromptForWeb } from "@/web/web"
export const useMessage = () => { export const useMessage = () => {
const { const {
@ -42,6 +44,9 @@ export const useMessage = () => {
const { t } = useTranslation("option") const { t } = useTranslation("option")
const [selectedModel, setSelectedModel] = useStorage("selectedModel") const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const currentChatModelSettings = useStoreChatModelSettings() const currentChatModelSettings = useStoreChatModelSettings()
const { setIsSearchingInternet, webSearch, setWebSearch, isSearchingInternet } =
useStoreMessageOption()
const { const {
history, history,
setHistory, setHistory,
@ -571,6 +576,249 @@ export const useMessage = () => {
} }
} }
const searchChatMode = async (
message: string,
image: string,
isRegenerate: boolean,
messages: Message[],
history: ChatHistory,
signal: AbortSignal
) => {
const url = await getOllamaURL()
setStreaming(true)
const userDefaultModelSettings = await getAllDefaultModelSettings()
if (image.length > 0) {
image = `data:image/jpeg;base64,${image.split(",")[1]}`
}
const ollama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url),
keepAlive:
currentChatModelSettings?.keepAlive ??
userDefaultModelSettings?.keepAlive,
temperature:
currentChatModelSettings?.temperature ??
userDefaultModelSettings?.temperature,
topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK,
topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP,
numCtx:
currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx,
seed: currentChatModelSettings?.seed
})
let newMessage: Message[] = []
let generateMessageId = generateID()
if (!isRegenerate) {
newMessage = [
...messages,
{
isBot: false,
name: "You",
message,
sources: [],
images: [image]
},
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
} else {
newMessage = [
...messages,
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
}
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
try {
setIsSearchingInternet(true)
let query = message
if (newMessage.length > 2) {
let questionPrompt = await geWebSearchFollowUpPrompt()
const lastTenMessages = newMessage.slice(-10)
lastTenMessages.pop()
const chat_history = lastTenMessages
.map((message) => {
return `${message.isBot ? "Assistant: " : "Human: "}${message.message}`
})
.join("\n")
const promptForQuestion = questionPrompt
.replaceAll("{chat_history}", chat_history)
.replaceAll("{question}", message)
const questionOllama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url),
keepAlive:
currentChatModelSettings?.keepAlive ??
userDefaultModelSettings?.keepAlive,
temperature:
currentChatModelSettings?.temperature ??
userDefaultModelSettings?.temperature,
topK:
currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK,
topP:
currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP,
numCtx:
currentChatModelSettings?.numCtx ??
userDefaultModelSettings?.numCtx,
seed: currentChatModelSettings?.seed
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
}
const { prompt, source } = await getSystemPromptForWeb(query)
setIsSearchingInternet(false)
// message = message.trim().replaceAll("\n", " ")
let humanMessage = new HumanMessage({
content: [
{
text: message,
type: "text"
}
]
})
if (image.length > 0) {
humanMessage = new HumanMessage({
content: [
{
text: message,
type: "text"
},
{
image_url: image,
type: "image_url"
}
]
})
}
const applicationChatHistory = generateHistory(history)
if (prompt) {
applicationChatHistory.unshift(
new SystemMessage({
content: [
{
text: prompt,
type: "text"
}
]
})
)
}
const chunks = await ollama.stream(
[...applicationChatHistory, humanMessage],
{
signal: signal
}
)
let count = 0
for await (const chunk of chunks) {
contentToSave += chunk.content
fullText += chunk.content
if (count === 0) {
setIsProcessing(true)
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
}
}
return message
})
})
count++
}
// update the message with the full text
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText,
sources: source
}
}
return message
})
})
setHistory([
...history,
{
role: "user",
content: message,
image
},
{
role: "assistant",
content: fullText
}
])
await saveMessageOnSuccess({
historyId,
setHistoryId,
isRegenerate,
selectedModel: selectedModel,
message,
image,
fullText,
source
})
setIsProcessing(false)
setStreaming(false)
} catch (e) {
const errorSave = await saveMessageOnError({
e,
botMessage: fullText,
history,
historyId,
image,
selectedModel,
setHistory,
setHistoryId,
userMessage: message,
isRegenerating: isRegenerate
})
if (!errorSave) {
notification.error({
message: t("error"),
description: e?.message || t("somethingWentWrong")
})
}
setIsProcessing(false)
setStreaming(false)
} finally {
setAbortController(null)
}
}
const onSubmit = async ({ const onSubmit = async ({
message, message,
image, image,
@ -597,14 +845,25 @@ export const useMessage = () => {
} }
if (chatMode === "normal") { if (chatMode === "normal") {
await normalChatMode( if (webSearch) {
message, await searchChatMode(
image, message,
isRegenerate, image,
chatHistory || messages, isRegenerate || false,
memory || history, messages,
signal memory || history,
) signal
)
} else {
await normalChatMode(
message,
image,
isRegenerate,
chatHistory || messages,
memory || history,
signal
)
}
} else { } else {
const newEmbeddingController = new AbortController() const newEmbeddingController = new AbortController()
let embeddingSignal = newEmbeddingController.signal let embeddingSignal = newEmbeddingController.signal
@ -714,6 +973,9 @@ export const useMessage = () => {
isEmbedding, isEmbedding,
speechToTextLanguage, speechToTextLanguage,
setSpeechToTextLanguage, setSpeechToTextLanguage,
regenerateLastMessage regenerateLastMessage,
webSearch,
setWebSearch,
isSearchingInternet,
} }
} }

View File

@ -10,16 +10,13 @@ export const OptionRouting = () => {
const { i18n } = useTranslation() const { i18n } = useTranslation()
return ( return (
<div <div className={`${mode === "dark" ? "dark" : "light"} arimo`}>
className={`${mode === "dark" ? "dark" : "light"} ${
i18n.language === "ru" ? "onest" : "inter"
}`}>
<Suspense fallback={<PageAssistLoader />}> <Suspense fallback={<PageAssistLoader />}>
{import.meta.env.BROWSER === "chrome" ? ( {import.meta.env.BROWSER === "chrome" ? (
<OptionRoutingChrome /> <OptionRoutingChrome />
) : ( ) : (
<OptionRoutingFirefox /> <OptionRoutingFirefox />
)} )}
</Suspense> </Suspense>
</div> </div>
) )
@ -30,16 +27,13 @@ export const SidepanelRouting = () => {
const { i18n } = useTranslation() const { i18n } = useTranslation()
return ( return (
<div <div className={`${mode === "dark" ? "dark" : "light"} arimo`}>
className={`${mode === "dark" ? "dark" : "light"} ${
i18n.language === "ru" ? "onest" : "inter"
}`}>
<Suspense fallback={<PageAssistLoader />}> <Suspense fallback={<PageAssistLoader />}>
{import.meta.env.BROWSER === "chrome" ? ( {import.meta.env.BROWSER === "chrome" ? (
<SidepanelRoutingChrome /> <SidepanelRoutingChrome />
) : ( ) : (
<SidepanelRoutingFirefox /> <SidepanelRoutingFirefox />
)} )}
</Suspense> </Suspense>
</div> </div>
) )

View File

@ -13,11 +13,42 @@ const DEFAULT_RAG_QUESTION_PROMPT =
const DEFAUTL_RAG_SYSTEM_PROMPT = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. If you don't know the answer, just say you don't know. DO NOT try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. {context} Question: {question} Helpful answer:` const DEFAUTL_RAG_SYSTEM_PROMPT = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. If you don't know the answer, just say you don't know. DO NOT try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. {context} Question: {question} Helpful answer:`
const DEFAULT_WEBSEARCH_PROMP = `You are a helpful assistant that can answer any questions. You can use the following search results in case you want to answer questions about anything in real-time. The current date and time are {current_date_time}. const DEFAULT_WEBSEARCH_PROMP = `You are an AI model who is expert at searching the web and answering user's queries.
Search results: Generate a response that is informative and relevant to the user's query based on provided search results. the current date and time are {current_date_time}.
{search_results}` \`search-results\` block provides knowledge from the web search results. You can use this information to generate a meaningful response.
<search-results>
{search_results}
</search-results>
`
const DEFAULT_WEBSEARCH_FOLLOWUP_PROMPT = `You will give a follow-up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the AI model to search the internet.
Example:
Follow-up question: What are the symptoms of a heart attack?
Rephrased question: Symptoms of a heart attack.
Follow-up question: Where is the upcoming Olympics being held?
Rephrased question: Location of the upcoming Olympics.
Follow-up question: Taylor Swift's latest album?
Rephrased question: Name of Taylor Swift's latest album.
Previous Conversation:
{chat_history}
Follow-up question: {question}
Rephrased question:
`
export const getOllamaURL = async () => { export const getOllamaURL = async () => {
const ollamaURL = await storage.get("ollamaURL") const ollamaURL = await storage.get("ollamaURL")
@ -289,7 +320,7 @@ export const setWebSearchPrompt = async (prompt: string) => {
export const geWebSearchFollowUpPrompt = async () => { export const geWebSearchFollowUpPrompt = async () => {
const prompt = await storage.get("webSearchFollowUpPrompt") const prompt = await storage.get("webSearchFollowUpPrompt")
if (!prompt || prompt.length === 0) { if (!prompt || prompt.length === 0) {
return DEFAULT_RAG_QUESTION_PROMPT return DEFAULT_WEBSEARCH_FOLLOWUP_PROMPT
} }
return prompt return prompt
} }

View File

@ -3,5 +3,5 @@ module.exports = {
mode: "jit", mode: "jit",
darkMode: "class", darkMode: "class",
content: ["./src/**/*.tsx"], content: ["./src/**/*.tsx"],
plugins: [require("@tailwindcss/forms"), require("@tailwindcss/typography"),] plugins: [require("@tailwindcss/forms"), require("@tailwindcss/typography")]
} }

View File

@ -48,7 +48,7 @@ export default defineConfig({
outDir: "build", outDir: "build",
manifest: { manifest: {
version: "1.1.11", version: "1.1.12",
name: name:
process.env.TARGET === "firefox" process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models" ? "Page Assist - A Web UI for Local AI Models"