refactor: Update version to 1.1.12, fix selectedModel spacing, and update meta viewport tags

This commit is contained in:
n4ze3m 2024-06-08 23:58:40 +05:30
parent 4a5713d6e4
commit 68bd3e651b
9 changed files with 374 additions and 66 deletions

View File

@ -25,46 +25,6 @@
box-shadow: none !important;
}
.gradient-border {
--borderWidth: 3px;
position: relative;
border-radius: var(--borderWidth);
}
.gradient-border:after {
content: "";
position: absolute;
top: calc(-1 * var(--borderWidth));
left: calc(-1 * var(--borderWidth));
height: calc(100% + var(--borderWidth) * 2);
width: calc(100% + var(--borderWidth) * 2);
background: linear-gradient(
60deg,
#f79533,
#f37055,
#ef4e7b,
#a166ab,
#5073b8,
#1098ad,
#07b39b,
#6fba82
);
border-radius: calc(2 * var(--borderWidth));
z-index: -1;
animation: animatedgradient 3s ease alternate infinite;
background-size: 300% 300%;
}
@keyframes animatedgradient {
0% {
background-position: 0% 50%;
}
50% {
background-position: 100% 50%;
}
100% {
background-position: 0% 50%;
}
}
/* Hide scrollbar for Chrome, Safari and Opera */
.no-scrollbar::-webkit-scrollbar {
display: none;
@ -75,3 +35,37 @@
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
}
@keyframes gradient-border {
0% {
border-image-source: linear-gradient(
45deg,
#f79533,
#f37055,
#ef4e7b,
#a166ab
);
}
50% {
border-image-source: linear-gradient(45deg, #ef4e7b, #a166ab);
}
74% {
border-image-source: linear-gradient(60deg, #5073b8, #1098ad);
}
100% {
border-image-source: linear-gradient(
45deg,
#f79533,
#f37055,
#ef4e7b,
#a166ab
);
}
}
.animated-gradient-border {
border: 4px solid;
border-image-slice: 1;
animation: gradient-border 3s infinite;
border-radius: 10px;
}

View File

@ -4,7 +4,7 @@ import { useTranslation } from "react-i18next"
export const WebSearch = () => {
const {t} = useTranslation('common')
return (
<div className="gradient-border mt-4 flex w-56 items-center gap-4 rounded-lg bg-neutral-100 p-1ccc text-slate-900 dark:bg-neutral-800 dark:text-slate-50">
<div className="animated-gradient-border mt-4 flex w-56 items-center gap-4 !rounded-lg bg-neutral-100 p-1 text-slate-900 dark:bg-neutral-800 dark:text-slate-50">
<div className="rounded p-1">
<Globe className="w-6 h-6" />
</div>

View File

@ -3,11 +3,19 @@ import { PlaygroundMessage } from "~/components/Common/Playground/Message"
import { useMessage } from "~/hooks/useMessage"
import { EmptySidePanel } from "../Chat/empty"
import { useWebUI } from "@/store/webui"
import { MessageSourcePopup } from "@/components/Common/Playground/MessageSourcePopup"
export const SidePanelBody = () => {
const { messages, streaming, regenerateLastMessage, editMessage } =
useMessage()
const {
messages,
streaming,
regenerateLastMessage,
editMessage,
isSearchingInternet
} = useMessage()
const divRef = React.useRef<HTMLDivElement>(null)
const [isSourceOpen, setIsSourceOpen] = React.useState(false)
const [source, setSource] = React.useState<any>(null)
const { ttsEnabled } = useWebUI()
React.useEffect(() => {
if (divRef.current) {
@ -27,19 +35,26 @@ export const SidePanelBody = () => {
currentMessageIndex={index}
totalMessages={messages.length}
onRengerate={regenerateLastMessage}
isProcessing={streaming}
isSearchingInternet={isSearchingInternet}
sources={message.sources}
onEditFormSubmit={(value) => {
editMessage(index, value, !message.isBot)
}}
isProcessing={streaming}
onSourceClick={(data) => {
setSource(data)
setIsSourceOpen(true)
}}
isTTSEnabled={ttsEnabled}
/>
))}
{import.meta.env.BROWSER === "chrome" ? (
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>
) : (
<div className="w-full h-48 flex-shrink-0"></div>
)}
<div ref={divRef} />
<MessageSourcePopup
open={isSourceOpen}
setOpen={setIsSourceOpen}
source={source}
/>
</div>
)
}

View File

@ -4,10 +4,17 @@ import React from "react"
import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize"
import { useMessage } from "~/hooks/useMessage"
import { toBase64 } from "~/libs/to-base64"
import { Checkbox, Dropdown, Image, Tooltip } from "antd"
import { Checkbox, Dropdown, Image, Switch, Tooltip } from "antd"
import { useWebUI } from "~/store/webui"
import { defaultEmbeddingModelForRag } from "~/services/ollama"
import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react"
import {
ImageIcon,
MicIcon,
StopCircleIcon,
X,
Wifi,
WifiOff
} from "lucide-react"
import { useTranslation } from "react-i18next"
import { ModelSelect } from "@/components/Common/ModelSelect"
import { useSpeechRecognition } from "@/hooks/useSpeechRecognition"
@ -88,6 +95,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
return
}
}
if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError("message", t("formError.noEmbeddingModel"))
return
}
}
form.reset()
textAreaFocus()
await sendMessage({
@ -111,7 +125,9 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
speechToTextLanguage,
stopStreamingRequest,
streaming,
setChatMode
setChatMode,
webSearch,
setWebSearch
} = useMessage()
React.useEffect(() => {
@ -175,6 +191,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
return
}
}
if (webSearch) {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
form.setFieldError("message", t("formError.noEmbeddingModel"))
return
}
}
await stopListening()
form.reset()
textAreaFocus()
@ -211,6 +234,20 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
/>
<div className="flex mt-4 justify-end gap-3">
<ModelSelect />
<Tooltip title={t("tooltip.searchInternet")}>
<button
type="button"
onClick={() => setWebSearch(!webSearch)}
className={`inline-flex items-center gap-2 ${
chatMode === "rag" ? "hidden" : "block"
}`}>
{webSearch ? (
<Wifi className="h-5 w-5 text-gray-900 dark:text-gray-300" />
) : (
<WifiOff className="h-5 w-5 text-gray-600 dark:text-gray-400" />
)}
</button>
</Tooltip>
{browserSupportsSpeechRecognition && (
<Tooltip title={t("tooltip.speechToText")}>
<button

View File

@ -2,7 +2,7 @@
<html>
<head>
<title>Page Assist - A Web UI for Local AI Models</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="manifest.type" content="browser_action" />
<link href="~/assets/tailwind.css" rel="stylesheet" />
<meta charset="utf-8" />

View File

@ -2,7 +2,7 @@
<html>
<head>
<title>Page Assist - A Web UI for Local AI Models</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="manifest.type" content="browser_action" />
<meta name="manifest.open_at_install" content="false" />
<meta name="manifest.browser_style" content="false" />

View File

@ -2,11 +2,12 @@ import React from "react"
import { cleanUrl } from "~/libs/clean-url"
import {
defaultEmbeddingModelForRag,
geWebSearchFollowUpPrompt,
getOllamaURL,
promptForRag,
systemPromptForNonRag
} from "~/services/ollama"
import { type Message } from "~/store/option"
import { useStoreMessageOption, type Message } from "~/store/option"
import { useStoreMessage } from "~/store"
import { HumanMessage, SystemMessage } from "@langchain/core/messages"
import { getDataFromCurrentTab } from "~/libs/get-html"
@ -29,6 +30,7 @@ import { useStorage } from "@plasmohq/storage/hook"
import { useStoreChatModelSettings } from "@/store/model"
import { ChatOllama } from "@/models/ChatOllama"
import { getAllDefaultModelSettings } from "@/services/model-settings"
import { getSystemPromptForWeb } from "@/web/web"
export const useMessage = () => {
const {
@ -42,6 +44,9 @@ export const useMessage = () => {
const { t } = useTranslation("option")
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const currentChatModelSettings = useStoreChatModelSettings()
const { setIsSearchingInternet, webSearch, setWebSearch, isSearchingInternet } =
useStoreMessageOption()
const {
history,
setHistory,
@ -571,6 +576,249 @@ export const useMessage = () => {
}
}
const searchChatMode = async (
message: string,
image: string,
isRegenerate: boolean,
messages: Message[],
history: ChatHistory,
signal: AbortSignal
) => {
const url = await getOllamaURL()
setStreaming(true)
const userDefaultModelSettings = await getAllDefaultModelSettings()
if (image.length > 0) {
image = `data:image/jpeg;base64,${image.split(",")[1]}`
}
const ollama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url),
keepAlive:
currentChatModelSettings?.keepAlive ??
userDefaultModelSettings?.keepAlive,
temperature:
currentChatModelSettings?.temperature ??
userDefaultModelSettings?.temperature,
topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK,
topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP,
numCtx:
currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx,
seed: currentChatModelSettings?.seed
})
let newMessage: Message[] = []
let generateMessageId = generateID()
if (!isRegenerate) {
newMessage = [
...messages,
{
isBot: false,
name: "You",
message,
sources: [],
images: [image]
},
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
} else {
newMessage = [
...messages,
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
}
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
try {
setIsSearchingInternet(true)
let query = message
if (newMessage.length > 2) {
let questionPrompt = await geWebSearchFollowUpPrompt()
const lastTenMessages = newMessage.slice(-10)
lastTenMessages.pop()
const chat_history = lastTenMessages
.map((message) => {
return `${message.isBot ? "Assistant: " : "Human: "}${message.message}`
})
.join("\n")
const promptForQuestion = questionPrompt
.replaceAll("{chat_history}", chat_history)
.replaceAll("{question}", message)
const questionOllama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url),
keepAlive:
currentChatModelSettings?.keepAlive ??
userDefaultModelSettings?.keepAlive,
temperature:
currentChatModelSettings?.temperature ??
userDefaultModelSettings?.temperature,
topK:
currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK,
topP:
currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP,
numCtx:
currentChatModelSettings?.numCtx ??
userDefaultModelSettings?.numCtx,
seed: currentChatModelSettings?.seed
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
}
const { prompt, source } = await getSystemPromptForWeb(query)
setIsSearchingInternet(false)
// message = message.trim().replaceAll("\n", " ")
let humanMessage = new HumanMessage({
content: [
{
text: message,
type: "text"
}
]
})
if (image.length > 0) {
humanMessage = new HumanMessage({
content: [
{
text: message,
type: "text"
},
{
image_url: image,
type: "image_url"
}
]
})
}
const applicationChatHistory = generateHistory(history)
if (prompt) {
applicationChatHistory.unshift(
new SystemMessage({
content: [
{
text: prompt,
type: "text"
}
]
})
)
}
const chunks = await ollama.stream(
[...applicationChatHistory, humanMessage],
{
signal: signal
}
)
let count = 0
for await (const chunk of chunks) {
contentToSave += chunk.content
fullText += chunk.content
if (count === 0) {
setIsProcessing(true)
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText + "▋"
}
}
return message
})
})
count++
}
// update the message with the full text
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText,
sources: source
}
}
return message
})
})
setHistory([
...history,
{
role: "user",
content: message,
image
},
{
role: "assistant",
content: fullText
}
])
await saveMessageOnSuccess({
historyId,
setHistoryId,
isRegenerate,
selectedModel: selectedModel,
message,
image,
fullText,
source
})
setIsProcessing(false)
setStreaming(false)
} catch (e) {
const errorSave = await saveMessageOnError({
e,
botMessage: fullText,
history,
historyId,
image,
selectedModel,
setHistory,
setHistoryId,
userMessage: message,
isRegenerating: isRegenerate
})
if (!errorSave) {
notification.error({
message: t("error"),
description: e?.message || t("somethingWentWrong")
})
}
setIsProcessing(false)
setStreaming(false)
} finally {
setAbortController(null)
}
}
const onSubmit = async ({
message,
image,
@ -597,6 +845,16 @@ export const useMessage = () => {
}
if (chatMode === "normal") {
if (webSearch) {
await searchChatMode(
message,
image,
isRegenerate || false,
messages,
memory || history,
signal
)
} else {
await normalChatMode(
message,
image,
@ -605,6 +863,7 @@ export const useMessage = () => {
memory || history,
signal
)
}
} else {
const newEmbeddingController = new AbortController()
let embeddingSignal = newEmbeddingController.signal
@ -714,6 +973,9 @@ export const useMessage = () => {
isEmbedding,
speechToTextLanguage,
setSpeechToTextLanguage,
regenerateLastMessage
regenerateLastMessage,
webSearch,
setWebSearch,
isSearchingInternet,
}
}

View File

@ -48,7 +48,7 @@ export default defineConfig({
outDir: "build",
manifest: {
version: "1.1.11",
version: "1.1.12",
name:
process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models"