feat: Add temporary system prompt
Adds a new setting that allows users to set a temporary system prompt for the current chat. This prompt will override the selected system prompt if it exists. The new setting is available in the "Current Chat Model Settings" modal. This feature provides a way to quickly experiment with different system prompts without having to change the default setting.
This commit is contained in:
parent
2e97f6470d
commit
0e44a7ad4b
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Anzahl GPUs",
|
||||
"placeholder": "Geben Sie die Anzahl der Ebenen ein, die an GPU(s) gesendet werden sollen"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Temporärer System-Prompt",
|
||||
"placeholder": "System-Prompt eingeben",
|
||||
"help": "Dies ist eine schnelle Möglichkeit, den System-Prompt im aktuellen Chat festzulegen, der den ausgewählten System-Prompt überschreibt, falls vorhanden."
|
||||
}
|
||||
},
|
||||
"advanced": "Weitere Modell-Einstellungen"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Enter number of layers to send to GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Temporary System Prompt",
|
||||
"placeholder": "Enter System Prompt",
|
||||
"help": "This is a quick way to set the system prompt in the current chat, which will override the selected system prompt if it exists."
|
||||
}
|
||||
},
|
||||
"advanced": "More Model Settings"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Ingrese el número de capas para enviar a la(s) GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Prompt de Sistema Temporal",
|
||||
"placeholder": "Ingrese el Prompt de Sistema",
|
||||
"help": "Esta es una forma rápida de establecer el prompt de sistema en el chat actual, que anulará el prompt de sistema seleccionado si existe."
|
||||
}
|
||||
},
|
||||
"advanced": "Más Configuraciones del Modelo"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "تعداد لایههایی که به GPU(ها) ارسال میشود را وارد کنید"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "پرامپت سیستم موقت",
|
||||
"placeholder": "پرامپت سیستم را وارد کنید",
|
||||
"help": "این یک روش سریع برای تنظیم پرامپت سیستم در گفتگوی فعلی است که در صورت وجود، پرامپت سیستم انتخاب شده را لغو خواهد کرد."
|
||||
}
|
||||
},
|
||||
"advanced": "تنظیمات بیشتر مدل"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Entrez le nombre de couches à envoyer au(x) GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Invite système temporaire",
|
||||
"placeholder": "Entrez l'invite système",
|
||||
"help": "C'est un moyen rapide de définir l'invite système dans le chat actuel, qui remplacera l'invite système sélectionnée si elle existe."
|
||||
}
|
||||
},
|
||||
"advanced": "Plus de paramètres du modèle"
|
||||
|
@ -84,7 +84,13 @@
|
||||
},
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Inserisci il numero di layer da inviare alla/e GPU" }
|
||||
"placeholder": "Inserisci il numero di layer da inviare alla/e GPU"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Prompt di Sistema Temporaneo",
|
||||
"placeholder": "Inserisci il Prompt di Sistema",
|
||||
"help": "Questo è un modo rapido per impostare il prompt di sistema nella chat corrente, che sovrascriverà il prompt di sistema selezionato se esiste."
|
||||
}
|
||||
},
|
||||
"advanced": "Altre Impostazioni del Modello"
|
||||
},
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "GPU(s)に送信するレイヤー数を入力してください"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "一時的なシステムプロンプト",
|
||||
"placeholder": "システムプロンプトを入力",
|
||||
"help": "これは現在のチャットでシステムプロンプトを素早く設定する方法で、選択されたシステムプロンプトが存在する場合はそれを上書きします。"
|
||||
}
|
||||
},
|
||||
"advanced": "その他のモデル設定"
|
||||
|
@ -84,6 +84,11 @@
|
||||
"numGpu": {
|
||||
"label": "ജിപിയു എണ്ണം",
|
||||
"placeholder": "ജിപിയു(കൾ)ക്ക് അയക്കേണ്ട ലേയറുകളുടെ എണ്ണം നൽകുക"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "താൽക്കാലിക സിസ്റ്റം പ്രോംപ്റ്റ്",
|
||||
"placeholder": "സിസ്റ്റം പ്രോംപ്റ്റ് നൽകുക",
|
||||
"help": "നിലവിലുള്ള ചാറ്റിൽ സിസ്റ്റം പ്രോംപ്റ്റ് സെറ്റ് ചെയ്യാനുള്ള വേഗത്തിലുള്ള മാർഗമാണിത്, ഇത് തിരഞ്ഞെടുത്ത സിസ്റ്റം പ്രോംപ്റ്റ് നിലവിലുണ്ടെങ്കിൽ അതിനെ മറികടക്കും."
|
||||
}
|
||||
},
|
||||
"advanced": "കൂടുതൽ മോഡൽ ക്രമീകരണങ്ങൾ"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPUs",
|
||||
"placeholder": "Digite o número de camadas para enviar para a(s) GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Prompt do Sistema Temporário",
|
||||
"placeholder": "Digite o Prompt do Sistema",
|
||||
"help": "Esta é uma maneira rápida de definir o prompt do sistema no chat atual, que substituirá o prompt do sistema selecionado, se existir."
|
||||
}
|
||||
},
|
||||
"advanced": "Mais Configurações do Modelo"
|
||||
|
@ -84,7 +84,13 @@
|
||||
},
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Введите количество слоев для отправки на GPU" }
|
||||
"placeholder": "Введите количество слоев для отправки на GPU"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Временный системный запрос",
|
||||
"placeholder": "Введите системный запрос",
|
||||
"help": "Это быстрый способ установить системный запрос в текущем чате, который переопределит выбранный системный запрос, если он существует."
|
||||
}
|
||||
},
|
||||
"advanced": "Больше настроек модели"
|
||||
},
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "输入要发送到 GPU 的层数"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "临时系统提示",
|
||||
"placeholder": "输入系统提示",
|
||||
"help": "这是一种在当前聊天中快速设置系统提示的方法,如果存在已选择的系统提示,它将覆盖该提示。"
|
||||
}
|
||||
},
|
||||
"advanced": "更多模型设置"
|
||||
|
@ -1,16 +1,29 @@
|
||||
import { getAllModelSettings } from "@/services/model-settings"
|
||||
import { useStoreChatModelSettings } from "@/store/model"
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import { Collapse, Form, Input, InputNumber, Modal, Skeleton } from "antd"
|
||||
import {
|
||||
Collapse,
|
||||
Drawer,
|
||||
Form,
|
||||
Input,
|
||||
InputNumber,
|
||||
Modal,
|
||||
Skeleton
|
||||
} from "antd"
|
||||
import React from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
|
||||
type Props = {
|
||||
open: boolean
|
||||
setOpen: (open: boolean) => void
|
||||
useDrawer?: boolean
|
||||
}
|
||||
|
||||
export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
|
||||
export const CurrentChatModelSettings = ({
|
||||
open,
|
||||
setOpen,
|
||||
useDrawer
|
||||
}: Props) => {
|
||||
const { t } = useTranslation("common")
|
||||
const [form] = Form.useForm()
|
||||
const cUserSettings = useStoreChatModelSettings()
|
||||
@ -26,12 +39,151 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
|
||||
numCtx: cUserSettings.numCtx ?? data.numCtx,
|
||||
seed: cUserSettings.seed,
|
||||
numGpu: cUserSettings.numGpu ?? data.numGpu,
|
||||
systemPrompt: cUserSettings.systemPrompt ?? ""
|
||||
})
|
||||
return data
|
||||
},
|
||||
enabled: open,
|
||||
refetchOnMount: true
|
||||
})
|
||||
|
||||
const renderBody = () => {
|
||||
return (
|
||||
<>
|
||||
{!isLoading ? (
|
||||
<Form
|
||||
onFinish={(values: {
|
||||
keepAlive: string
|
||||
temperature: number
|
||||
topK: number
|
||||
topP: number
|
||||
}) => {
|
||||
Object.entries(values).forEach(([key, value]) => {
|
||||
cUserSettings.setX(key, value)
|
||||
setOpen(false)
|
||||
})
|
||||
}}
|
||||
form={form}
|
||||
layout="vertical">
|
||||
{useDrawer && (
|
||||
<>
|
||||
<Form.Item
|
||||
name="systemPrompt"
|
||||
help={t("modelSettings.form.systemPrompt.help")}
|
||||
label={t("modelSettings.form.systemPrompt.label")}>
|
||||
<Input.TextArea
|
||||
rows={4}
|
||||
placeholder={t(
|
||||
"modelSettings.form.systemPrompt.placeholder"
|
||||
)}
|
||||
/>
|
||||
</Form.Item>
|
||||
</>
|
||||
)}
|
||||
<Form.Item
|
||||
name="keepAlive"
|
||||
help={t("modelSettings.form.keepAlive.help")}
|
||||
label={t("modelSettings.form.keepAlive.label")}>
|
||||
<Input
|
||||
placeholder={t("modelSettings.form.keepAlive.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="temperature"
|
||||
label={t("modelSettings.form.temperature.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.temperature.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="seed"
|
||||
help={t("modelSettings.form.seed.help")}
|
||||
label={t("modelSettings.form.seed.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.seed.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="numCtx"
|
||||
label={t("modelSettings.form.numCtx.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.numCtx.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Collapse
|
||||
ghost
|
||||
className="border-none bg-transparent"
|
||||
items={[
|
||||
{
|
||||
key: "1",
|
||||
label: t("modelSettings.advanced"),
|
||||
children: (
|
||||
<React.Fragment>
|
||||
<Form.Item
|
||||
name="topK"
|
||||
label={t("modelSettings.form.topK.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.topK.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="topP"
|
||||
label={t("modelSettings.form.topP.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.topP.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="numGpu"
|
||||
label={t("modelSettings.form.numGpu.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t(
|
||||
"modelSettings.form.numGpu.placeholder"
|
||||
)}
|
||||
/>
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
className="inline-flex justify-center w-full text-center mt-3 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
|
||||
{t("save")}
|
||||
</button>
|
||||
</Form>
|
||||
) : (
|
||||
<Skeleton active />
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
if (useDrawer) {
|
||||
return (
|
||||
<Drawer
|
||||
placement="right"
|
||||
open={open}
|
||||
onClose={() => setOpen(false)}
|
||||
width={500}
|
||||
title={t("currentChatModelSettings")}>
|
||||
{renderBody()}
|
||||
</Drawer>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t("currentChatModelSettings")}
|
||||
@ -39,111 +191,7 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
|
||||
onOk={() => setOpen(false)}
|
||||
onCancel={() => setOpen(false)}
|
||||
footer={null}>
|
||||
{!isLoading ? (
|
||||
<Form
|
||||
onFinish={(values: {
|
||||
keepAlive: string
|
||||
temperature: number
|
||||
topK: number
|
||||
topP: number
|
||||
}) => {
|
||||
Object.entries(values).forEach(([key, value]) => {
|
||||
cUserSettings.setX(key, value)
|
||||
setOpen(false)
|
||||
})
|
||||
}}
|
||||
form={form}
|
||||
layout="vertical">
|
||||
<Form.Item
|
||||
name="keepAlive"
|
||||
help={t("modelSettings.form.keepAlive.help")}
|
||||
label={t("modelSettings.form.keepAlive.label")}>
|
||||
<Input
|
||||
size="large"
|
||||
placeholder={t("modelSettings.form.keepAlive.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="temperature"
|
||||
label={t("modelSettings.form.temperature.label")}>
|
||||
<InputNumber
|
||||
size="large"
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.temperature.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="seed"
|
||||
help={t("modelSettings.form.seed.help")}
|
||||
label={t("modelSettings.form.seed.label")}>
|
||||
<InputNumber
|
||||
size="large"
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.seed.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item name="numCtx" label={t("modelSettings.form.numCtx.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.numCtx.placeholder")}
|
||||
size="large"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Collapse
|
||||
ghost
|
||||
className="border-none bg-transparent"
|
||||
items={[
|
||||
{
|
||||
key: "1",
|
||||
label: t("modelSettings.advanced"),
|
||||
children: (
|
||||
<React.Fragment>
|
||||
<Form.Item
|
||||
name="topK"
|
||||
label={t("modelSettings.form.topK.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.topK.placeholder")}
|
||||
size="large"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="topP"
|
||||
label={t("modelSettings.form.topP.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
size="large"
|
||||
placeholder={t("modelSettings.form.topP.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="numGpu"
|
||||
label={t("modelSettings.form.numGpu.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
size="large"
|
||||
placeholder={t("modelSettings.form.numGpu.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
className="inline-flex justify-center w-full text-center mt-4 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
|
||||
{t("save")}
|
||||
</button>
|
||||
</Form>
|
||||
) : (
|
||||
<Skeleton active />
|
||||
)}
|
||||
{renderBody()}
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
@ -76,6 +76,7 @@ export default function OptionLayout({
|
||||
<CurrentChatModelSettings
|
||||
open={openModelSettings}
|
||||
setOpen={setOpenModelSettings}
|
||||
useDrawer
|
||||
/>
|
||||
</>
|
||||
)
|
||||
|
@ -68,7 +68,7 @@ export const useMessageOption = () => {
|
||||
} = useStoreMessageOption()
|
||||
const currentChatModelSettings = useStoreChatModelSettings()
|
||||
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
|
||||
const [ speechToTextLanguage, setSpeechToTextLanguage ] = useStorage(
|
||||
const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage(
|
||||
"speechToTextLanguage",
|
||||
"en-US"
|
||||
)
|
||||
@ -450,7 +450,11 @@ export const useMessageOption = () => {
|
||||
)
|
||||
}
|
||||
|
||||
if (selectedPrompt) {
|
||||
const isTempSystemprompt =
|
||||
currentChatModelSettings.systemPrompt &&
|
||||
currentChatModelSettings.systemPrompt?.trim().length > 0
|
||||
|
||||
if (!isTempSystemprompt && selectedPrompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
@ -463,6 +467,19 @@ export const useMessageOption = () => {
|
||||
)
|
||||
}
|
||||
|
||||
if (isTempSystemprompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: currentChatModelSettings.systemPrompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
const chunks = await ollama.stream(
|
||||
[...applicationChatHistory, humanMessage],
|
||||
{
|
||||
|
@ -63,6 +63,8 @@ type CurrentChatModelSettings = {
|
||||
|
||||
setX: (key: string, value: any) => void
|
||||
reset: () => void
|
||||
systemPrompt?: string
|
||||
setSystemPrompt: (systemPrompt: string) => void
|
||||
}
|
||||
|
||||
export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
|
||||
@ -100,6 +102,8 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
|
||||
setVocabOnly: (vocabOnly: boolean) => set({ vocabOnly }),
|
||||
seetSeed: (seed: number) => set({ seed }),
|
||||
setX: (key: string, value: any) => set({ [key]: value }),
|
||||
systemPrompt: undefined,
|
||||
setSystemPrompt: (systemPrompt: string) => set({ systemPrompt }),
|
||||
reset: () =>
|
||||
set({
|
||||
f16KV: undefined,
|
||||
@ -130,7 +134,8 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
|
||||
useMLock: undefined,
|
||||
useMMap: undefined,
|
||||
vocabOnly: undefined,
|
||||
seed: undefined
|
||||
seed: undefined,
|
||||
systemPrompt: undefined
|
||||
})
|
||||
})
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user