Merge pull request #270 from n4ze3m/next

v1.3.7
This commit is contained in:
Muhammed Nazeem 2024-12-08 23:38:10 +05:30 committed by GitHub
commit a0275201d3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 188 additions and 75 deletions

View File

@ -6,14 +6,24 @@ import { webUIResumeLastChat } from "@/services/app"
import {
formatToChatHistory,
formatToMessage,
getPromptById,
getRecentChatFromWebUI
} from "@/db"
import { getLastUsedChatSystemPrompt } from "@/services/model-settings"
import { useStoreChatModelSettings } from "@/store/model"
export const Playground = () => {
const drop = React.useRef<HTMLDivElement>(null)
const [dropedFile, setDropedFile] = React.useState<File | undefined>()
const { selectedKnowledge, messages, setHistoryId, setHistory, setMessages } =
useMessageOption()
const {
selectedKnowledge,
messages,
setHistoryId,
setHistory,
setMessages,
setSelectedSystemPrompt
} = useMessageOption()
const { setSystemPrompt } = useStoreChatModelSettings()
const [dropState, setDropState] = React.useState<
"idle" | "dragging" | "error"
@ -90,6 +100,19 @@ export const Playground = () => {
setHistoryId(recentChat.history.id)
setHistory(formatToChatHistory(recentChat.messages))
setMessages(formatToMessage(recentChat.messages))
const lastUsedPrompt = await getLastUsedChatSystemPrompt(
recentChat.history.id
)
if (lastUsedPrompt) {
if (lastUsedPrompt.prompt_id) {
const prompt = await getPromptById(lastUsedPrompt.prompt_id)
if (prompt) {
setSelectedSystemPrompt(lastUsedPrompt.prompt_id)
}
}
setSystemPrompt(lastUsedPrompt.prompt_content)
}
}
}
}

View File

@ -5,7 +5,8 @@ import {
formatToMessage,
deleteByHistoryId,
updateHistory,
pinHistory
pinHistory,
getPromptById
} from "@/db"
import { Empty, Skeleton, Dropdown, Menu } from "antd"
import { useMessageOption } from "~/hooks/useMessageOption"
@ -20,8 +21,10 @@ import { useNavigate } from "react-router-dom"
import { useTranslation } from "react-i18next"
import {
getLastUsedChatModel,
getLastUsedChatSystemPrompt,
lastUsedChatModelEnabled
} from "@/services/model-settings"
import { useStoreChatModelSettings } from "@/store/model"
type Props = {
onClose: () => void
@ -35,8 +38,12 @@ export const Sidebar = ({ onClose }: Props) => {
historyId,
clearChat,
setSelectedModel,
temporaryChat
temporaryChat,
setSelectedSystemPrompt
} = useMessageOption()
const { setSystemPrompt } = useStoreChatModelSettings()
const { t } = useTranslation(["option", "common"])
const client = useQueryClient()
const navigate = useNavigate()
@ -127,7 +134,8 @@ export const Sidebar = ({ onClose }: Props) => {
})
return (
<div className={`overflow-y-auto z-99 ${temporaryChat ? 'pointer-events-none opacity-50' : ''}`}>
<div
className={`overflow-y-auto z-99 ${temporaryChat ? "pointer-events-none opacity-50" : ""}`}>
{status === "success" && chatHistories.length === 0 && (
<div className="flex justify-center items-center mt-20 overflow-hidden">
<Empty description={t("common:noHistory")} />
@ -173,6 +181,19 @@ export const Sidebar = ({ onClose }: Props) => {
setSelectedModel(currentChatModel)
}
}
const lastUsedPrompt =
await getLastUsedChatSystemPrompt(chat.id)
if (lastUsedPrompt) {
if (lastUsedPrompt.prompt_id) {
const prompt = await getPromptById(
lastUsedPrompt.prompt_id
)
if (prompt) {
setSelectedSystemPrompt(lastUsedPrompt.prompt_id)
}
}
setSystemPrompt(lastUsedPrompt.prompt_content)
}
navigate("/")
onClose()
}}>

View File

@ -2,7 +2,7 @@ import { cleanUrl } from "@/libs/clean-url"
import { useStorage } from "@plasmohq/storage/hook"
import { useQuery, useQueryClient } from "@tanstack/react-query"
import { Select } from "antd"
import { RotateCcw } from "lucide-react"
import { Loader2, RotateCcw } from "lucide-react"
import { useEffect, useState } from "react"
import { Trans, useTranslation } from "react-i18next"
import { useMessage } from "~/hooks/useMessage"
@ -50,72 +50,8 @@ export const EmptySidePanel = () => {
const { setSelectedModel, selectedModel, chatMode, setChatMode } =
useMessage()
const renderSection = () => {
return (
<div className="mx-auto sm:max-w-md px-4 mt-10">
<div className="rounded-lg justify-center items-center flex flex-col border dark:border-gray-700 p-8 bg-white dark:bg-[#262626] shadow-sm">
{(ollamaStatus === "pending" || isRefetching) && (
<div className="inline-flex items-center space-x-2">
<div className="w-3 h-3 bg-blue-500 rounded-full animate-bounce"></div>
<p className="dark:text-gray-400 text-gray-900">
{t("ollamaState.searching")}
</p>
</div>
)}
{!isRefetching && ollamaStatus === "success" && checkOllamaStatus ? (
ollamaInfo.isOk ? (
<div className="inline-flex items-center space-x-2">
<div className="w-3 h-3 bg-green-500 rounded-full"></div>
<p className="dark:text-gray-400 text-gray-900">
{t("ollamaState.running")}
</p>
</div>
) : (
<div className="flex flex-col space-y-2 justify-center items-center">
<div className="inline-flex space-x-2">
<div className="w-3 h-3 bg-red-500 rounded-full"></div>
<p className="dark:text-gray-400 text-gray-900">
{t("ollamaState.notRunning")}
</p>
</div>
<input
className="bg-gray-100 dark:bg-black dark:text-gray-100 rounded-md px-4 py-2 mt-2 w-full"
type="url"
value={ollamaURL}
onChange={(e) => setOllamaURL(e.target.value)}
/>
<button
onClick={() => {
saveOllamaURL(ollamaURL)
refetch()
}}
className="inline-flex mt-4 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
<RotateCcw className="h-4 w-4 mr-3" />
{t("common:retry")}
</button>
{ollamaURL &&
cleanUrl(ollamaURL) !== "http://127.0.0.1:11434" && (
<p className="text-xs text-gray-700 dark:text-gray-400 mb-4 text-center">
<Trans
i18nKey="playground:ollamaState.connectionError"
components={{
anchor: (
<a
href="https://github.com/n4ze3m/page-assist/blob/main/docs/connection-issue.md"
target="__blank"
className="text-blue-600 dark:text-blue-400"></a>
)
}}
/>
</p>
)}
</div>
)
) : null}
{ollamaStatus === "success" && ollamaInfo.isOk && (
<div className="mt-4">
<Select
onChange={(e) => {
@ -175,7 +111,93 @@ export const EmptySidePanel = () => {
</div>
</div>
</div>
)
}
if (!checkOllamaStatus) {
return (
<div className="mx-auto sm:max-w-md px-4 mt-10">
<div className="rounded-lg justify-center items-center flex flex-col border dark:border-gray-700 p-8 bg-white dark:bg-[#262626] shadow-sm">
<div className="inline-flex items-center space-x-2">
<p className="dark:text-gray-400 text-gray-900">
<span>👋</span>
{t("welcome")}
</p>
</div>
{ollamaStatus === "pending" && (
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
)}
{ollamaStatus === "success" && ollamaInfo.isOk && renderSection()}
</div>
</div>
)
}
return (
<div className="mx-auto sm:max-w-md px-4 mt-10">
<div className="rounded-lg justify-center items-center flex flex-col border dark:border-gray-700 p-8 bg-white dark:bg-[#262626] shadow-sm">
{(ollamaStatus === "pending" || isRefetching) && (
<div className="inline-flex items-center space-x-2">
<div className="w-3 h-3 bg-blue-500 rounded-full animate-bounce"></div>
<p className="dark:text-gray-400 text-gray-900">
{t("ollamaState.searching")}
</p>
</div>
)}
{!isRefetching && ollamaStatus === "success" ? (
ollamaInfo.isOk ? (
<div className="inline-flex items-center space-x-2">
<div className="w-3 h-3 bg-green-500 rounded-full"></div>
<p className="dark:text-gray-400 text-gray-900">
{t("ollamaState.running")}
</p>
</div>
) : (
<div className="flex flex-col space-y-2 justify-center items-center">
<div className="inline-flex space-x-2">
<div className="w-3 h-3 bg-red-500 rounded-full"></div>
<p className="dark:text-gray-400 text-gray-900">
{t("ollamaState.notRunning")}
</p>
</div>
<input
className="bg-gray-100 dark:bg-black dark:text-gray-100 rounded-md px-4 py-2 mt-2 w-full"
type="url"
value={ollamaURL}
onChange={(e) => setOllamaURL(e.target.value)}
/>
<button
onClick={() => {
saveOllamaURL(ollamaURL)
refetch()
}}
className="inline-flex mt-4 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
<RotateCcw className="h-4 w-4 mr-3" />
{t("common:retry")}
</button>
{ollamaURL &&
cleanUrl(ollamaURL) !== "http://127.0.0.1:11434" && (
<p className="text-xs text-gray-700 dark:text-gray-400 mb-4 text-center">
<Trans
i18nKey="playground:ollamaState.connectionError"
components={{
anchor: (
<a
href="https://github.com/n4ze3m/page-assist/blob/main/docs/connection-issue.md"
target="__blank"
className="text-blue-600 dark:text-blue-400"></a>
)
}}
/>
</p>
)}
</div>
)
) : null}
{ollamaStatus === "success" && ollamaInfo.isOk && renderSection()}
</div>
</div>
)

View File

@ -1,5 +1,5 @@
import { saveHistory, saveMessage } from "@/db"
import { setLastUsedChatModel } from "@/services/model-settings"
import { setLastUsedChatModel, setLastUsedChatSystemPrompt } from "@/services/model-settings"
import { generateTitle } from "@/services/title"
import { ChatHistory } from "@/store/option"
@ -15,7 +15,9 @@ export const saveMessageOnError = async ({
setHistoryId,
isRegenerating,
message_source = "web-ui",
message_type
message_type,
prompt_content,
prompt_id
}: {
e: any
setHistory: (history: ChatHistory) => void
@ -29,6 +31,8 @@ export const saveMessageOnError = async ({
isRegenerating: boolean
message_source?: "copilot" | "web-ui"
message_type?: string
prompt_id?: string
prompt_content?: string
}) => {
if (
e?.name === "AbortError" ||
@ -73,6 +77,9 @@ export const saveMessageOnError = async ({
message_type
)
await setLastUsedChatModel(historyId, selectedModel)
if (prompt_id || prompt_content) {
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
}
} else {
const title = await generateTitle(selectedModel, userMessage, userMessage)
const newHistoryId = await saveHistory(title, false, message_source)
@ -100,6 +107,9 @@ export const saveMessageOnError = async ({
)
setHistoryId(newHistoryId.id)
await setLastUsedChatModel(newHistoryId.id, selectedModel)
if (prompt_id || prompt_content) {
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
}
}
return true
@ -118,7 +128,9 @@ export const saveMessageOnSuccess = async ({
fullText,
source,
message_source = "web-ui",
message_type, generationInfo
message_type, generationInfo,
prompt_id,
prompt_content
}: {
historyId: string | null
setHistoryId: (historyId: string) => void
@ -131,6 +143,8 @@ export const saveMessageOnSuccess = async ({
message_source?: "copilot" | "web-ui",
message_type?: string
generationInfo?: any
prompt_id?: string
prompt_content?: string
}) => {
if (historyId) {
if (!isRegenerate) {
@ -158,6 +172,9 @@ export const saveMessageOnSuccess = async ({
generationInfo
)
await setLastUsedChatModel(historyId, selectedModel!)
if (prompt_id || prompt_content) {
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
}
} else {
const title = await generateTitle(selectedModel, message, message)
const newHistoryId = await saveHistory(title, false, message_source)
@ -185,5 +202,8 @@ export const saveMessageOnSuccess = async ({
)
setHistoryId(newHistoryId.id)
await setLastUsedChatModel(newHistoryId.id, selectedModel!)
if (prompt_id || prompt_content) {
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
}
}
}

View File

@ -411,6 +411,8 @@ export const useMessageOption = () => {
) => {
const url = await getOllamaURL()
const userDefaultModelSettings = await getAllDefaultModelSettings()
let promptId: string | undefined = selectedSystemPrompt
let promptContent: string | undefined = undefined
if (image.length > 0) {
image = `data:image/jpeg;base64,${image.split(",")[1]}`
@ -525,6 +527,7 @@ export const useMessageOption = () => {
content: selectedPrompt.content
})
)
promptContent = selectedPrompt.content
}
if (isTempSystemprompt) {
@ -533,6 +536,7 @@ export const useMessageOption = () => {
content: currentChatModelSettings.systemPrompt
})
)
promptContent = currentChatModelSettings.systemPrompt
}
let generationInfo: any | undefined = undefined
@ -611,7 +615,9 @@ export const useMessageOption = () => {
image,
fullText,
source: [],
generationInfo
generationInfo,
prompt_content: promptContent,
prompt_id: promptId
})
setIsProcessing(false)
@ -629,7 +635,9 @@ export const useMessageOption = () => {
setHistory,
setHistoryId,
userMessage: message,
isRegenerating: isRegenerate
isRegenerating: isRegenerate,
prompt_content: promptContent,
prompt_id: promptId
})
if (!errorSave) {

View File

@ -125,4 +125,23 @@ export const setLastUsedChatModel = async (
await storage.set(`lastUsedChatModel-${historyId}`, model)
}
export const getLastUsedChatSystemPrompt = async (
historyId: string
): Promise<{ prompt_id?: string; prompt_content?: string } | undefined> => {
return await storage.get<{ prompt_id?: string; prompt_content?: string } | undefined>(
`lastUsedChatSystemPrompt-${historyId}`
)
}
export const setLastUsedChatSystemPrompt = async (
historyId: string,
prompt: {
prompt_id?: string
prompt_content?: string
}
): Promise<void> => {
await storage.set(`lastUsedChatSystemPrompt-${historyId}`, prompt)
}
export { getAllModelSettings, setModelSetting }

View File

@ -50,7 +50,7 @@ export default defineConfig({
outDir: "build",
manifest: {
version: "1.3.6",
version: "1.3.7",
name:
process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models"