feat: Add support for saving and restoring last used system prompt
This commit is contained in:
parent
d45b279348
commit
bd08526935
@ -6,14 +6,24 @@ import { webUIResumeLastChat } from "@/services/app"
|
||||
import {
|
||||
formatToChatHistory,
|
||||
formatToMessage,
|
||||
getPromptById,
|
||||
getRecentChatFromWebUI
|
||||
} from "@/db"
|
||||
import { getLastUsedChatSystemPrompt } from "@/services/model-settings"
|
||||
import { useStoreChatModelSettings } from "@/store/model"
|
||||
|
||||
export const Playground = () => {
|
||||
const drop = React.useRef<HTMLDivElement>(null)
|
||||
const [dropedFile, setDropedFile] = React.useState<File | undefined>()
|
||||
const { selectedKnowledge, messages, setHistoryId, setHistory, setMessages } =
|
||||
useMessageOption()
|
||||
const {
|
||||
selectedKnowledge,
|
||||
messages,
|
||||
setHistoryId,
|
||||
setHistory,
|
||||
setMessages,
|
||||
setSelectedSystemPrompt
|
||||
} = useMessageOption()
|
||||
const { setSystemPrompt } = useStoreChatModelSettings()
|
||||
|
||||
const [dropState, setDropState] = React.useState<
|
||||
"idle" | "dragging" | "error"
|
||||
@ -90,6 +100,19 @@ export const Playground = () => {
|
||||
setHistoryId(recentChat.history.id)
|
||||
setHistory(formatToChatHistory(recentChat.messages))
|
||||
setMessages(formatToMessage(recentChat.messages))
|
||||
|
||||
const lastUsedPrompt = await getLastUsedChatSystemPrompt(
|
||||
recentChat.history.id
|
||||
)
|
||||
if (lastUsedPrompt) {
|
||||
if (lastUsedPrompt.prompt_id) {
|
||||
const prompt = await getPromptById(lastUsedPrompt.prompt_id)
|
||||
if (prompt) {
|
||||
setSelectedSystemPrompt(lastUsedPrompt.prompt_id)
|
||||
}
|
||||
}
|
||||
setSystemPrompt(lastUsedPrompt.prompt_content)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,8 @@ import {
|
||||
formatToMessage,
|
||||
deleteByHistoryId,
|
||||
updateHistory,
|
||||
pinHistory
|
||||
pinHistory,
|
||||
getPromptById
|
||||
} from "@/db"
|
||||
import { Empty, Skeleton, Dropdown, Menu } from "antd"
|
||||
import { useMessageOption } from "~/hooks/useMessageOption"
|
||||
@ -20,8 +21,10 @@ import { useNavigate } from "react-router-dom"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import {
|
||||
getLastUsedChatModel,
|
||||
getLastUsedChatSystemPrompt,
|
||||
lastUsedChatModelEnabled
|
||||
} from "@/services/model-settings"
|
||||
import { useStoreChatModelSettings } from "@/store/model"
|
||||
|
||||
type Props = {
|
||||
onClose: () => void
|
||||
@ -35,8 +38,12 @@ export const Sidebar = ({ onClose }: Props) => {
|
||||
historyId,
|
||||
clearChat,
|
||||
setSelectedModel,
|
||||
temporaryChat
|
||||
temporaryChat,
|
||||
setSelectedSystemPrompt
|
||||
} = useMessageOption()
|
||||
|
||||
const { setSystemPrompt } = useStoreChatModelSettings()
|
||||
|
||||
const { t } = useTranslation(["option", "common"])
|
||||
const client = useQueryClient()
|
||||
const navigate = useNavigate()
|
||||
@ -127,7 +134,8 @@ export const Sidebar = ({ onClose }: Props) => {
|
||||
})
|
||||
|
||||
return (
|
||||
<div className={`overflow-y-auto z-99 ${temporaryChat ? 'pointer-events-none opacity-50' : ''}`}>
|
||||
<div
|
||||
className={`overflow-y-auto z-99 ${temporaryChat ? "pointer-events-none opacity-50" : ""}`}>
|
||||
{status === "success" && chatHistories.length === 0 && (
|
||||
<div className="flex justify-center items-center mt-20 overflow-hidden">
|
||||
<Empty description={t("common:noHistory")} />
|
||||
@ -173,6 +181,19 @@ export const Sidebar = ({ onClose }: Props) => {
|
||||
setSelectedModel(currentChatModel)
|
||||
}
|
||||
}
|
||||
const lastUsedPrompt =
|
||||
await getLastUsedChatSystemPrompt(chat.id)
|
||||
if (lastUsedPrompt) {
|
||||
if (lastUsedPrompt.prompt_id) {
|
||||
const prompt = await getPromptById(
|
||||
lastUsedPrompt.prompt_id
|
||||
)
|
||||
if (prompt) {
|
||||
setSelectedSystemPrompt(lastUsedPrompt.prompt_id)
|
||||
}
|
||||
}
|
||||
setSystemPrompt(lastUsedPrompt.prompt_content)
|
||||
}
|
||||
navigate("/")
|
||||
onClose()
|
||||
}}>
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { saveHistory, saveMessage } from "@/db"
|
||||
import { setLastUsedChatModel } from "@/services/model-settings"
|
||||
import { setLastUsedChatModel, setLastUsedChatSystemPrompt } from "@/services/model-settings"
|
||||
import { generateTitle } from "@/services/title"
|
||||
import { ChatHistory } from "@/store/option"
|
||||
|
||||
@ -15,7 +15,9 @@ export const saveMessageOnError = async ({
|
||||
setHistoryId,
|
||||
isRegenerating,
|
||||
message_source = "web-ui",
|
||||
message_type
|
||||
message_type,
|
||||
prompt_content,
|
||||
prompt_id
|
||||
}: {
|
||||
e: any
|
||||
setHistory: (history: ChatHistory) => void
|
||||
@ -29,6 +31,8 @@ export const saveMessageOnError = async ({
|
||||
isRegenerating: boolean
|
||||
message_source?: "copilot" | "web-ui"
|
||||
message_type?: string
|
||||
prompt_id?: string
|
||||
prompt_content?: string
|
||||
}) => {
|
||||
if (
|
||||
e?.name === "AbortError" ||
|
||||
@ -73,6 +77,9 @@ export const saveMessageOnError = async ({
|
||||
message_type
|
||||
)
|
||||
await setLastUsedChatModel(historyId, selectedModel)
|
||||
if (prompt_id || prompt_content) {
|
||||
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
|
||||
}
|
||||
} else {
|
||||
const title = await generateTitle(selectedModel, userMessage, userMessage)
|
||||
const newHistoryId = await saveHistory(title, false, message_source)
|
||||
@ -100,6 +107,9 @@ export const saveMessageOnError = async ({
|
||||
)
|
||||
setHistoryId(newHistoryId.id)
|
||||
await setLastUsedChatModel(newHistoryId.id, selectedModel)
|
||||
if (prompt_id || prompt_content) {
|
||||
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
@ -118,7 +128,9 @@ export const saveMessageOnSuccess = async ({
|
||||
fullText,
|
||||
source,
|
||||
message_source = "web-ui",
|
||||
message_type, generationInfo
|
||||
message_type, generationInfo,
|
||||
prompt_id,
|
||||
prompt_content
|
||||
}: {
|
||||
historyId: string | null
|
||||
setHistoryId: (historyId: string) => void
|
||||
@ -131,6 +143,8 @@ export const saveMessageOnSuccess = async ({
|
||||
message_source?: "copilot" | "web-ui",
|
||||
message_type?: string
|
||||
generationInfo?: any
|
||||
prompt_id?: string
|
||||
prompt_content?: string
|
||||
}) => {
|
||||
if (historyId) {
|
||||
if (!isRegenerate) {
|
||||
@ -158,6 +172,9 @@ export const saveMessageOnSuccess = async ({
|
||||
generationInfo
|
||||
)
|
||||
await setLastUsedChatModel(historyId, selectedModel!)
|
||||
if (prompt_id || prompt_content) {
|
||||
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
|
||||
}
|
||||
} else {
|
||||
const title = await generateTitle(selectedModel, message, message)
|
||||
const newHistoryId = await saveHistory(title, false, message_source)
|
||||
@ -185,5 +202,8 @@ export const saveMessageOnSuccess = async ({
|
||||
)
|
||||
setHistoryId(newHistoryId.id)
|
||||
await setLastUsedChatModel(newHistoryId.id, selectedModel!)
|
||||
if (prompt_id || prompt_content) {
|
||||
await setLastUsedChatSystemPrompt(historyId, { prompt_content, prompt_id })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,6 +411,8 @@ export const useMessageOption = () => {
|
||||
) => {
|
||||
const url = await getOllamaURL()
|
||||
const userDefaultModelSettings = await getAllDefaultModelSettings()
|
||||
let promptId: string | undefined = selectedSystemPrompt
|
||||
let promptContent: string | undefined = undefined
|
||||
|
||||
if (image.length > 0) {
|
||||
image = `data:image/jpeg;base64,${image.split(",")[1]}`
|
||||
@ -525,6 +527,7 @@ export const useMessageOption = () => {
|
||||
content: selectedPrompt.content
|
||||
})
|
||||
)
|
||||
promptContent = selectedPrompt.content
|
||||
}
|
||||
|
||||
if (isTempSystemprompt) {
|
||||
@ -533,6 +536,7 @@ export const useMessageOption = () => {
|
||||
content: currentChatModelSettings.systemPrompt
|
||||
})
|
||||
)
|
||||
promptContent = currentChatModelSettings.systemPrompt
|
||||
}
|
||||
|
||||
let generationInfo: any | undefined = undefined
|
||||
@ -611,7 +615,9 @@ export const useMessageOption = () => {
|
||||
image,
|
||||
fullText,
|
||||
source: [],
|
||||
generationInfo
|
||||
generationInfo,
|
||||
prompt_content: promptContent,
|
||||
prompt_id: promptId
|
||||
})
|
||||
|
||||
setIsProcessing(false)
|
||||
@ -629,7 +635,9 @@ export const useMessageOption = () => {
|
||||
setHistory,
|
||||
setHistoryId,
|
||||
userMessage: message,
|
||||
isRegenerating: isRegenerate
|
||||
isRegenerating: isRegenerate,
|
||||
prompt_content: promptContent,
|
||||
prompt_id: promptId
|
||||
})
|
||||
|
||||
if (!errorSave) {
|
||||
|
@ -125,4 +125,23 @@ export const setLastUsedChatModel = async (
|
||||
await storage.set(`lastUsedChatModel-${historyId}`, model)
|
||||
}
|
||||
|
||||
|
||||
export const getLastUsedChatSystemPrompt = async (
|
||||
historyId: string
|
||||
): Promise<{ prompt_id?: string; prompt_content?: string } | undefined> => {
|
||||
return await storage.get<{ prompt_id?: string; prompt_content?: string } | undefined>(
|
||||
`lastUsedChatSystemPrompt-${historyId}`
|
||||
)
|
||||
}
|
||||
|
||||
export const setLastUsedChatSystemPrompt = async (
|
||||
historyId: string,
|
||||
prompt: {
|
||||
prompt_id?: string
|
||||
prompt_content?: string
|
||||
}
|
||||
): Promise<void> => {
|
||||
await storage.set(`lastUsedChatSystemPrompt-${historyId}`, prompt)
|
||||
}
|
||||
|
||||
export { getAllModelSettings, setModelSetting }
|
||||
|
Loading…
x
Reference in New Issue
Block a user