commit
d198b87edd
@ -44,10 +44,12 @@
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0",
|
||||
"react-i18next": "^14.1.0",
|
||||
"react-icons": "^5.2.1",
|
||||
"react-markdown": "8.0.0",
|
||||
"react-router-dom": "6.10.0",
|
||||
"react-syntax-highlighter": "^15.5.0",
|
||||
"react-toastify": "^10.0.4",
|
||||
"rehype-katex": "6.0.3",
|
||||
"rehype-mathjax": "4.0.3",
|
||||
"remark-gfm": "3.0.1",
|
||||
"remark-math": "5.1.1",
|
||||
|
BIN
src/assets/fonts/Arimo.ttf
Normal file
BIN
src/assets/fonts/Arimo.ttf
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,70 +1,26 @@
|
||||
@font-face {
|
||||
font-family: "Inter";
|
||||
src: url("inter.ttf") format("truetype");
|
||||
font-family: "Arimo";
|
||||
src: url("fonts/Arimo.ttf");
|
||||
font-display: swap;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: "Onest";
|
||||
src: url("onest.ttf") format("truetype");
|
||||
}
|
||||
|
||||
.inter {
|
||||
font-family: "Inter", sans-serif !important;
|
||||
}
|
||||
|
||||
.onest {
|
||||
font-family: "Onest", sans-serif !important;
|
||||
.arimo {
|
||||
font-family: "Arimo", sans-serif;
|
||||
font-weight: 500;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
|
||||
|
||||
.ant-select-selection-search-input {
|
||||
border: none !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
|
||||
.gradient-border {
|
||||
--borderWidth: 3px;
|
||||
position: relative;
|
||||
border-radius: var(--borderWidth);
|
||||
}
|
||||
.gradient-border:after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
top: calc(-1 * var(--borderWidth));
|
||||
left: calc(-1 * var(--borderWidth));
|
||||
height: calc(100% + var(--borderWidth) * 2);
|
||||
width: calc(100% + var(--borderWidth) * 2);
|
||||
background: linear-gradient(
|
||||
60deg,
|
||||
#f79533,
|
||||
#f37055,
|
||||
#ef4e7b,
|
||||
#a166ab,
|
||||
#5073b8,
|
||||
#1098ad,
|
||||
#07b39b,
|
||||
#6fba82
|
||||
);
|
||||
border-radius: calc(2 * var(--borderWidth));
|
||||
z-index: -1;
|
||||
animation: animatedgradient 3s ease alternate infinite;
|
||||
background-size: 300% 300%;
|
||||
}
|
||||
|
||||
@keyframes animatedgradient {
|
||||
0% {
|
||||
background-position: 0% 50%;
|
||||
}
|
||||
50% {
|
||||
background-position: 100% 50%;
|
||||
}
|
||||
100% {
|
||||
background-position: 0% 50%;
|
||||
}
|
||||
}
|
||||
/* Hide scrollbar for Chrome, Safari and Opera */
|
||||
.no-scrollbar::-webkit-scrollbar {
|
||||
display: none;
|
||||
@ -75,3 +31,37 @@
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
|
||||
@keyframes gradient-border {
|
||||
0% {
|
||||
border-image-source: linear-gradient(
|
||||
45deg,
|
||||
#f79533,
|
||||
#f37055,
|
||||
#ef4e7b,
|
||||
#a166ab
|
||||
);
|
||||
}
|
||||
50% {
|
||||
border-image-source: linear-gradient(45deg, #ef4e7b, #a166ab);
|
||||
}
|
||||
74% {
|
||||
border-image-source: linear-gradient(60deg, #5073b8, #1098ad);
|
||||
}
|
||||
100% {
|
||||
border-image-source: linear-gradient(
|
||||
45deg,
|
||||
#f79533,
|
||||
#f37055,
|
||||
#ef4e7b,
|
||||
#a166ab
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
.animated-gradient-border {
|
||||
border: 3px solid;
|
||||
border-image-slice: 1;
|
||||
animation: gradient-border 3s infinite;
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
@ -1,13 +1,12 @@
|
||||
import remarkGfm from "remark-gfm"
|
||||
import remarkMath from "remark-math"
|
||||
import ReactMarkdown from "react-markdown"
|
||||
|
||||
import "property-information"
|
||||
import React from "react"
|
||||
import { CodeBlock } from "./CodeBlock"
|
||||
|
||||
|
||||
export default function Markdown({ message }: { message: string }) {
|
||||
|
||||
return (
|
||||
<React.Fragment>
|
||||
<ReactMarkdown
|
||||
|
@ -4,7 +4,7 @@ import { useTranslation } from "react-i18next"
|
||||
export const WebSearch = () => {
|
||||
const {t} = useTranslation('common')
|
||||
return (
|
||||
<div className="gradient-border mt-4 flex w-56 items-center gap-4 rounded-lg bg-neutral-100 p-1ccc text-slate-900 dark:bg-neutral-800 dark:text-slate-50">
|
||||
<div className="animated-gradient-border mt-4 flex w-56 items-center gap-4 !rounded-lg bg-neutral-100 p-1 text-slate-900 dark:bg-neutral-800 dark:text-slate-50">
|
||||
<div className="rounded p-1">
|
||||
<Globe className="w-6 h-6" />
|
||||
</div>
|
||||
|
@ -4,7 +4,7 @@ import React from "react"
|
||||
import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize"
|
||||
import { toBase64 } from "~/libs/to-base64"
|
||||
import { useMessageOption } from "~/hooks/useMessageOption"
|
||||
import { Checkbox, Dropdown, Select, Switch, Tooltip } from "antd"
|
||||
import { Checkbox, Dropdown, Switch, Tooltip } from "antd"
|
||||
import { Image } from "antd"
|
||||
import { useWebUI } from "~/store/webui"
|
||||
import { defaultEmbeddingModelForRag } from "~/services/ollama"
|
||||
@ -13,6 +13,7 @@ import { getVariable } from "~/utils/select-varaible"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import { KnowledgeSelect } from "../Knowledge/KnowledgeSelect"
|
||||
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
|
||||
import { PiGlobe } from "react-icons/pi"
|
||||
|
||||
type Props = {
|
||||
dropedFile: File | undefined
|
||||
@ -250,19 +251,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
|
||||
{!selectedKnowledge && (
|
||||
<Tooltip title={t("tooltip.searchInternet")}>
|
||||
<div className="inline-flex items-center gap-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth={1.5}
|
||||
stroke="currentColor"
|
||||
className="w-5 h-5 dark:text-gray-300">
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M12 21a9.004 9.004 0 0 0 8.716-6.747M12 21a9.004 9.004 0 0 1-8.716-6.747M12 21c2.485 0 4.5-4.03 4.5-9S14.485 3 12 3m0 18c-2.485 0-4.5-4.03-4.5-9S9.515 3 12 3m0 0a8.997 8.997 0 0 1 7.843 4.582M12 3a8.997 8.997 0 0 0-7.843 4.582m15.686 0A11.953 11.953 0 0 1 12 10.5c-2.998 0-5.74-1.1-7.843-2.918m15.686 0A8.959 8.959 0 0 1 21 12c0 .778-.099 1.533-.284 2.253m0 0A17.919 17.919 0 0 1 12 16.5c-3.162 0-6.133-.815-8.716-2.247m0 0A9.015 9.015 0 0 1 3 12c0-1.605.42-3.113 1.157-4.418"
|
||||
/>
|
||||
</svg>
|
||||
<PiGlobe className="h-5 w-5 dark:text-gray-300" />
|
||||
<Switch
|
||||
value={webSearch}
|
||||
onChange={(e) => setWebSearch(e)}
|
||||
|
@ -3,11 +3,19 @@ import { PlaygroundMessage } from "~/components/Common/Playground/Message"
|
||||
import { useMessage } from "~/hooks/useMessage"
|
||||
import { EmptySidePanel } from "../Chat/empty"
|
||||
import { useWebUI } from "@/store/webui"
|
||||
import { MessageSourcePopup } from "@/components/Common/Playground/MessageSourcePopup"
|
||||
|
||||
export const SidePanelBody = () => {
|
||||
const { messages, streaming, regenerateLastMessage, editMessage } =
|
||||
useMessage()
|
||||
const {
|
||||
messages,
|
||||
streaming,
|
||||
regenerateLastMessage,
|
||||
editMessage,
|
||||
isSearchingInternet
|
||||
} = useMessage()
|
||||
const divRef = React.useRef<HTMLDivElement>(null)
|
||||
const [isSourceOpen, setIsSourceOpen] = React.useState(false)
|
||||
const [source, setSource] = React.useState<any>(null)
|
||||
const { ttsEnabled } = useWebUI()
|
||||
React.useEffect(() => {
|
||||
if (divRef.current) {
|
||||
@ -27,19 +35,26 @@ export const SidePanelBody = () => {
|
||||
currentMessageIndex={index}
|
||||
totalMessages={messages.length}
|
||||
onRengerate={regenerateLastMessage}
|
||||
isProcessing={streaming}
|
||||
isSearchingInternet={isSearchingInternet}
|
||||
sources={message.sources}
|
||||
onEditFormSubmit={(value) => {
|
||||
editMessage(index, value, !message.isBot)
|
||||
}}
|
||||
isProcessing={streaming}
|
||||
onSourceClick={(data) => {
|
||||
setSource(data)
|
||||
setIsSourceOpen(true)
|
||||
}}
|
||||
isTTSEnabled={ttsEnabled}
|
||||
/>
|
||||
))}
|
||||
{import.meta.env.BROWSER === "chrome" ? (
|
||||
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>
|
||||
) : (
|
||||
<div className="w-full h-48 flex-shrink-0"></div>
|
||||
)}
|
||||
<div ref={divRef} />
|
||||
<MessageSourcePopup
|
||||
open={isSourceOpen}
|
||||
setOpen={setIsSourceOpen}
|
||||
source={source}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
@ -4,13 +4,14 @@ import React from "react"
|
||||
import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize"
|
||||
import { useMessage } from "~/hooks/useMessage"
|
||||
import { toBase64 } from "~/libs/to-base64"
|
||||
import { Checkbox, Dropdown, Image, Tooltip } from "antd"
|
||||
import { Checkbox, Dropdown, Image, Switch, Tooltip } from "antd"
|
||||
import { useWebUI } from "~/store/webui"
|
||||
import { defaultEmbeddingModelForRag } from "~/services/ollama"
|
||||
import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import { ModelSelect } from "@/components/Common/ModelSelect"
|
||||
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
|
||||
import { PiGlobeX, PiGlobe } from "react-icons/pi"
|
||||
|
||||
type Props = {
|
||||
dropedFile: File | undefined
|
||||
@ -88,6 +89,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (webSearch) {
|
||||
const defaultEM = await defaultEmbeddingModelForRag()
|
||||
if (!defaultEM) {
|
||||
form.setFieldError("message", t("formError.noEmbeddingModel"))
|
||||
return
|
||||
}
|
||||
}
|
||||
form.reset()
|
||||
textAreaFocus()
|
||||
await sendMessage({
|
||||
@ -111,7 +119,9 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
speechToTextLanguage,
|
||||
stopStreamingRequest,
|
||||
streaming,
|
||||
setChatMode
|
||||
setChatMode,
|
||||
webSearch,
|
||||
setWebSearch
|
||||
} = useMessage()
|
||||
|
||||
React.useEffect(() => {
|
||||
@ -137,6 +147,30 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
}
|
||||
})
|
||||
|
||||
React.useEffect(() => {
|
||||
const handleDrop = (e: DragEvent) => {
|
||||
e.preventDefault()
|
||||
if (e.dataTransfer?.items) {
|
||||
for (let i = 0; i < e.dataTransfer.items.length; i++) {
|
||||
if (e.dataTransfer.items[i].type === "text/plain") {
|
||||
e.dataTransfer.items[i].getAsString((text) => {
|
||||
form.setFieldValue("message", text)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const handleDragOver = (e: DragEvent) => {
|
||||
e.preventDefault()
|
||||
}
|
||||
textareaRef.current?.addEventListener("drop", handleDrop)
|
||||
textareaRef.current?.addEventListener("dragover", handleDragOver)
|
||||
return () => {
|
||||
textareaRef.current?.removeEventListener("drop", handleDrop)
|
||||
textareaRef.current?.removeEventListener("dragover", handleDragOver)
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div className="px-3 pt-3 md:px-6 md:pt-6 bg-gray-50 dark:bg-[#262626] border rounded-t-xl border-black/10 dark:border-gray-600">
|
||||
<div
|
||||
@ -175,6 +209,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
return
|
||||
}
|
||||
}
|
||||
if (webSearch) {
|
||||
const defaultEM = await defaultEmbeddingModelForRag()
|
||||
if (!defaultEM) {
|
||||
form.setFieldError("message", t("formError.noEmbeddingModel"))
|
||||
return
|
||||
}
|
||||
}
|
||||
await stopListening()
|
||||
form.reset()
|
||||
textAreaFocus()
|
||||
@ -210,6 +251,20 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
|
||||
{...form.getInputProps("message")}
|
||||
/>
|
||||
<div className="flex mt-4 justify-end gap-3">
|
||||
<Tooltip title={t("tooltip.searchInternet")}>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setWebSearch(!webSearch)}
|
||||
className={`inline-flex items-center gap-2 ${
|
||||
chatMode === "rag" ? "hidden" : "block"
|
||||
}`}>
|
||||
{webSearch ? (
|
||||
<PiGlobe className="h-5 w-5 dark:text-gray-300" />
|
||||
) : (
|
||||
<PiGlobeX className="h-5 w-5 text-gray-600 dark:text-gray-400" />
|
||||
)}
|
||||
</button>
|
||||
</Tooltip>
|
||||
<ModelSelect />
|
||||
{browserSupportsSpeechRecognition && (
|
||||
<Tooltip title={t("tooltip.speechToText")}>
|
||||
|
@ -19,7 +19,7 @@ function IndexOption() {
|
||||
algorithm:
|
||||
mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm,
|
||||
token: {
|
||||
fontFamily: i18n.language === "ru" ? "Onest" : "Inter"
|
||||
fontFamily: "Arimo"
|
||||
}
|
||||
}}
|
||||
renderEmpty={() => (
|
||||
|
@ -2,7 +2,7 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Page Assist - A Web UI for Local AI Models</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="manifest.type" content="browser_action" />
|
||||
<link href="~/assets/tailwind.css" rel="stylesheet" />
|
||||
<meta charset="utf-8" />
|
||||
|
@ -20,7 +20,7 @@ function IndexSidepanel() {
|
||||
algorithm:
|
||||
mode === "dark" ? theme.darkAlgorithm : theme.defaultAlgorithm,
|
||||
token: {
|
||||
fontFamily: i18n.language === "ru" ? "Onest" : "Inter"
|
||||
fontFamily: "Arimo"
|
||||
}
|
||||
}}
|
||||
renderEmpty={() => (
|
||||
|
@ -2,7 +2,7 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Page Assist - A Web UI for Local AI Models</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="manifest.type" content="browser_action" />
|
||||
<meta name="manifest.open_at_install" content="false" />
|
||||
<meta name="manifest.browser_style" content="false" />
|
||||
|
@ -2,11 +2,12 @@ import React from "react"
|
||||
import { cleanUrl } from "~/libs/clean-url"
|
||||
import {
|
||||
defaultEmbeddingModelForRag,
|
||||
geWebSearchFollowUpPrompt,
|
||||
getOllamaURL,
|
||||
promptForRag,
|
||||
systemPromptForNonRag
|
||||
} from "~/services/ollama"
|
||||
import { type Message } from "~/store/option"
|
||||
import { useStoreMessageOption, type Message } from "~/store/option"
|
||||
import { useStoreMessage } from "~/store"
|
||||
import { HumanMessage, SystemMessage } from "@langchain/core/messages"
|
||||
import { getDataFromCurrentTab } from "~/libs/get-html"
|
||||
@ -29,6 +30,7 @@ import { useStorage } from "@plasmohq/storage/hook"
|
||||
import { useStoreChatModelSettings } from "@/store/model"
|
||||
import { ChatOllama } from "@/models/ChatOllama"
|
||||
import { getAllDefaultModelSettings } from "@/services/model-settings"
|
||||
import { getSystemPromptForWeb } from "@/web/web"
|
||||
|
||||
export const useMessage = () => {
|
||||
const {
|
||||
@ -42,6 +44,9 @@ export const useMessage = () => {
|
||||
const { t } = useTranslation("option")
|
||||
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
|
||||
const currentChatModelSettings = useStoreChatModelSettings()
|
||||
const { setIsSearchingInternet, webSearch, setWebSearch, isSearchingInternet } =
|
||||
useStoreMessageOption()
|
||||
|
||||
const {
|
||||
history,
|
||||
setHistory,
|
||||
@ -571,6 +576,249 @@ export const useMessage = () => {
|
||||
}
|
||||
}
|
||||
|
||||
const searchChatMode = async (
|
||||
message: string,
|
||||
image: string,
|
||||
isRegenerate: boolean,
|
||||
messages: Message[],
|
||||
history: ChatHistory,
|
||||
signal: AbortSignal
|
||||
) => {
|
||||
const url = await getOllamaURL()
|
||||
setStreaming(true)
|
||||
const userDefaultModelSettings = await getAllDefaultModelSettings()
|
||||
if (image.length > 0) {
|
||||
image = `data:image/jpeg;base64,${image.split(",")[1]}`
|
||||
}
|
||||
|
||||
const ollama = new ChatOllama({
|
||||
model: selectedModel!,
|
||||
baseUrl: cleanUrl(url),
|
||||
keepAlive:
|
||||
currentChatModelSettings?.keepAlive ??
|
||||
userDefaultModelSettings?.keepAlive,
|
||||
temperature:
|
||||
currentChatModelSettings?.temperature ??
|
||||
userDefaultModelSettings?.temperature,
|
||||
topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK,
|
||||
topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP,
|
||||
numCtx:
|
||||
currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx,
|
||||
seed: currentChatModelSettings?.seed
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
let generateMessageId = generateID()
|
||||
|
||||
if (!isRegenerate) {
|
||||
newMessage = [
|
||||
...messages,
|
||||
{
|
||||
isBot: false,
|
||||
name: "You",
|
||||
message,
|
||||
sources: [],
|
||||
images: [image]
|
||||
},
|
||||
{
|
||||
isBot: true,
|
||||
name: selectedModel,
|
||||
message: "▋",
|
||||
sources: [],
|
||||
id: generateMessageId
|
||||
}
|
||||
]
|
||||
} else {
|
||||
newMessage = [
|
||||
...messages,
|
||||
{
|
||||
isBot: true,
|
||||
name: selectedModel,
|
||||
message: "▋",
|
||||
sources: [],
|
||||
id: generateMessageId
|
||||
}
|
||||
]
|
||||
}
|
||||
setMessages(newMessage)
|
||||
let fullText = ""
|
||||
let contentToSave = ""
|
||||
|
||||
try {
|
||||
setIsSearchingInternet(true)
|
||||
|
||||
let query = message
|
||||
|
||||
if (newMessage.length > 2) {
|
||||
let questionPrompt = await geWebSearchFollowUpPrompt()
|
||||
const lastTenMessages = newMessage.slice(-10)
|
||||
lastTenMessages.pop()
|
||||
const chat_history = lastTenMessages
|
||||
.map((message) => {
|
||||
return `${message.isBot ? "Assistant: " : "Human: "}${message.message}`
|
||||
})
|
||||
.join("\n")
|
||||
const promptForQuestion = questionPrompt
|
||||
.replaceAll("{chat_history}", chat_history)
|
||||
.replaceAll("{question}", message)
|
||||
const questionOllama = new ChatOllama({
|
||||
model: selectedModel!,
|
||||
baseUrl: cleanUrl(url),
|
||||
keepAlive:
|
||||
currentChatModelSettings?.keepAlive ??
|
||||
userDefaultModelSettings?.keepAlive,
|
||||
temperature:
|
||||
currentChatModelSettings?.temperature ??
|
||||
userDefaultModelSettings?.temperature,
|
||||
topK:
|
||||
currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK,
|
||||
topP:
|
||||
currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP,
|
||||
numCtx:
|
||||
currentChatModelSettings?.numCtx ??
|
||||
userDefaultModelSettings?.numCtx,
|
||||
seed: currentChatModelSettings?.seed
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
}
|
||||
|
||||
const { prompt, source } = await getSystemPromptForWeb(query)
|
||||
setIsSearchingInternet(false)
|
||||
|
||||
// message = message.trim().replaceAll("\n", " ")
|
||||
|
||||
let humanMessage = new HumanMessage({
|
||||
content: [
|
||||
{
|
||||
text: message,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
})
|
||||
if (image.length > 0) {
|
||||
humanMessage = new HumanMessage({
|
||||
content: [
|
||||
{
|
||||
text: message,
|
||||
type: "text"
|
||||
},
|
||||
{
|
||||
image_url: image,
|
||||
type: "image_url"
|
||||
}
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
const applicationChatHistory = generateHistory(history)
|
||||
|
||||
if (prompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: prompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
const chunks = await ollama.stream(
|
||||
[...applicationChatHistory, humanMessage],
|
||||
{
|
||||
signal: signal
|
||||
}
|
||||
)
|
||||
let count = 0
|
||||
for await (const chunk of chunks) {
|
||||
contentToSave += chunk.content
|
||||
fullText += chunk.content
|
||||
if (count === 0) {
|
||||
setIsProcessing(true)
|
||||
}
|
||||
setMessages((prev) => {
|
||||
return prev.map((message) => {
|
||||
if (message.id === generateMessageId) {
|
||||
return {
|
||||
...message,
|
||||
message: fullText + "▋"
|
||||
}
|
||||
}
|
||||
return message
|
||||
})
|
||||
})
|
||||
count++
|
||||
}
|
||||
// update the message with the full text
|
||||
setMessages((prev) => {
|
||||
return prev.map((message) => {
|
||||
if (message.id === generateMessageId) {
|
||||
return {
|
||||
...message,
|
||||
message: fullText,
|
||||
sources: source
|
||||
}
|
||||
}
|
||||
return message
|
||||
})
|
||||
})
|
||||
|
||||
setHistory([
|
||||
...history,
|
||||
{
|
||||
role: "user",
|
||||
content: message,
|
||||
image
|
||||
},
|
||||
{
|
||||
role: "assistant",
|
||||
content: fullText
|
||||
}
|
||||
])
|
||||
|
||||
await saveMessageOnSuccess({
|
||||
historyId,
|
||||
setHistoryId,
|
||||
isRegenerate,
|
||||
selectedModel: selectedModel,
|
||||
message,
|
||||
image,
|
||||
fullText,
|
||||
source
|
||||
})
|
||||
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
} catch (e) {
|
||||
const errorSave = await saveMessageOnError({
|
||||
e,
|
||||
botMessage: fullText,
|
||||
history,
|
||||
historyId,
|
||||
image,
|
||||
selectedModel,
|
||||
setHistory,
|
||||
setHistoryId,
|
||||
userMessage: message,
|
||||
isRegenerating: isRegenerate
|
||||
})
|
||||
|
||||
if (!errorSave) {
|
||||
notification.error({
|
||||
message: t("error"),
|
||||
description: e?.message || t("somethingWentWrong")
|
||||
})
|
||||
}
|
||||
setIsProcessing(false)
|
||||
setStreaming(false)
|
||||
} finally {
|
||||
setAbortController(null)
|
||||
}
|
||||
}
|
||||
|
||||
const onSubmit = async ({
|
||||
message,
|
||||
image,
|
||||
@ -597,6 +845,16 @@ export const useMessage = () => {
|
||||
}
|
||||
|
||||
if (chatMode === "normal") {
|
||||
if (webSearch) {
|
||||
await searchChatMode(
|
||||
message,
|
||||
image,
|
||||
isRegenerate || false,
|
||||
messages,
|
||||
memory || history,
|
||||
signal
|
||||
)
|
||||
} else {
|
||||
await normalChatMode(
|
||||
message,
|
||||
image,
|
||||
@ -605,6 +863,7 @@ export const useMessage = () => {
|
||||
memory || history,
|
||||
signal
|
||||
)
|
||||
}
|
||||
} else {
|
||||
const newEmbeddingController = new AbortController()
|
||||
let embeddingSignal = newEmbeddingController.signal
|
||||
@ -714,6 +973,9 @@ export const useMessage = () => {
|
||||
isEmbedding,
|
||||
speechToTextLanguage,
|
||||
setSpeechToTextLanguage,
|
||||
regenerateLastMessage
|
||||
regenerateLastMessage,
|
||||
webSearch,
|
||||
setWebSearch,
|
||||
isSearchingInternet,
|
||||
}
|
||||
}
|
||||
|
@ -10,10 +10,7 @@ export const OptionRouting = () => {
|
||||
const { i18n } = useTranslation()
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`${mode === "dark" ? "dark" : "light"} ${
|
||||
i18n.language === "ru" ? "onest" : "inter"
|
||||
}`}>
|
||||
<div className={`${mode === "dark" ? "dark" : "light"} arimo`}>
|
||||
<Suspense fallback={<PageAssistLoader />}>
|
||||
{import.meta.env.BROWSER === "chrome" ? (
|
||||
<OptionRoutingChrome />
|
||||
@ -30,10 +27,7 @@ export const SidepanelRouting = () => {
|
||||
const { i18n } = useTranslation()
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`${mode === "dark" ? "dark" : "light"} ${
|
||||
i18n.language === "ru" ? "onest" : "inter"
|
||||
}`}>
|
||||
<div className={`${mode === "dark" ? "dark" : "light"} arimo`}>
|
||||
<Suspense fallback={<PageAssistLoader />}>
|
||||
{import.meta.env.BROWSER === "chrome" ? (
|
||||
<SidepanelRoutingChrome />
|
||||
|
@ -13,11 +13,42 @@ const DEFAULT_RAG_QUESTION_PROMPT =
|
||||
|
||||
const DEFAUTL_RAG_SYSTEM_PROMPT = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. If you don't know the answer, just say you don't know. DO NOT try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. {context} Question: {question} Helpful answer:`
|
||||
|
||||
const DEFAULT_WEBSEARCH_PROMP = `You are a helpful assistant that can answer any questions. You can use the following search results in case you want to answer questions about anything in real-time. The current date and time are {current_date_time}.
|
||||
const DEFAULT_WEBSEARCH_PROMP = `You are an AI model who is expert at searching the web and answering user's queries.
|
||||
|
||||
Search results:
|
||||
Generate a response that is informative and relevant to the user's query based on provided search results. the current date and time are {current_date_time}.
|
||||
|
||||
{search_results}`
|
||||
\`search-results\` block provides knowledge from the web search results. You can use this information to generate a meaningful response.
|
||||
|
||||
<search-results>
|
||||
{search_results}
|
||||
</search-results>
|
||||
`
|
||||
|
||||
const DEFAULT_WEBSEARCH_FOLLOWUP_PROMPT = `You will give a follow-up question. You need to rephrase the follow-up question if needed so it is a standalone question that can be used by the AI model to search the internet.
|
||||
|
||||
Example:
|
||||
|
||||
Follow-up question: What are the symptoms of a heart attack?
|
||||
|
||||
Rephrased question: Symptoms of a heart attack.
|
||||
|
||||
Follow-up question: Where is the upcoming Olympics being held?
|
||||
|
||||
Rephrased question: Location of the upcoming Olympics.
|
||||
|
||||
Follow-up question: Taylor Swift's latest album?
|
||||
|
||||
Rephrased question: Name of Taylor Swift's latest album.
|
||||
|
||||
|
||||
Previous Conversation:
|
||||
|
||||
{chat_history}
|
||||
|
||||
Follow-up question: {question}
|
||||
|
||||
Rephrased question:
|
||||
`
|
||||
|
||||
export const getOllamaURL = async () => {
|
||||
const ollamaURL = await storage.get("ollamaURL")
|
||||
@ -289,7 +320,7 @@ export const setWebSearchPrompt = async (prompt: string) => {
|
||||
export const geWebSearchFollowUpPrompt = async () => {
|
||||
const prompt = await storage.get("webSearchFollowUpPrompt")
|
||||
if (!prompt || prompt.length === 0) {
|
||||
return DEFAULT_RAG_QUESTION_PROMPT
|
||||
return DEFAULT_WEBSEARCH_FOLLOWUP_PROMPT
|
||||
}
|
||||
return prompt
|
||||
}
|
||||
|
@ -3,5 +3,5 @@ module.exports = {
|
||||
mode: "jit",
|
||||
darkMode: "class",
|
||||
content: ["./src/**/*.tsx"],
|
||||
plugins: [require("@tailwindcss/forms"), require("@tailwindcss/typography"),]
|
||||
plugins: [require("@tailwindcss/forms"), require("@tailwindcss/typography")]
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ export default defineConfig({
|
||||
outDir: "build",
|
||||
|
||||
manifest: {
|
||||
version: "1.1.11",
|
||||
version: "1.1.12",
|
||||
name:
|
||||
process.env.TARGET === "firefox"
|
||||
? "Page Assist - A Web UI for Local AI Models"
|
||||
|
Loading…
x
Reference in New Issue
Block a user