feat: Add temporary system prompt

Adds a new setting that allows users to set a temporary system prompt for the current chat.
This prompt will override the selected system prompt if it exists.
The new setting is available in the "Current Chat Model Settings" modal.
This feature provides a way to quickly experiment with different system prompts without having to change the default setting.
This commit is contained in:
n4ze3m 2024-10-02 12:30:52 +05:30
parent 2e97f6470d
commit 0e44a7ad4b
15 changed files with 240 additions and 112 deletions

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Anzahl GPUs",
"placeholder": "Geben Sie die Anzahl der Ebenen ein, die an GPU(s) gesendet werden sollen"
},
"systemPrompt": {
"label": "Temporärer System-Prompt",
"placeholder": "System-Prompt eingeben",
"help": "Dies ist eine schnelle Möglichkeit, den System-Prompt im aktuellen Chat festzulegen, der den ausgewählten System-Prompt überschreibt, falls vorhanden."
}
},
"advanced": "Weitere Modell-Einstellungen"

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPU",
"placeholder": "Enter number of layers to send to GPU(s)"
},
"systemPrompt": {
"label": "Temporary System Prompt",
"placeholder": "Enter System Prompt",
"help": "This is a quick way to set the system prompt in the current chat, which will override the selected system prompt if it exists."
}
},
"advanced": "More Model Settings"

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPU",
"placeholder": "Ingrese el número de capas para enviar a la(s) GPU(s)"
},
"systemPrompt": {
"label": "Prompt de Sistema Temporal",
"placeholder": "Ingrese el Prompt de Sistema",
"help": "Esta es una forma rápida de establecer el prompt de sistema en el chat actual, que anulará el prompt de sistema seleccionado si existe."
}
},
"advanced": "Más Configuraciones del Modelo"

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPU",
"placeholder": "تعداد لایه‌هایی که به GPU(ها) ارسال می‌شود را وارد کنید"
},
"systemPrompt": {
"label": "پرامپت سیستم موقت",
"placeholder": "پرامپت سیستم را وارد کنید",
"help": "این یک روش سریع برای تنظیم پرامپت سیستم در گفتگوی فعلی است که در صورت وجود، پرامپت سیستم انتخاب شده را لغو خواهد کرد."
}
},
"advanced": "تنظیمات بیشتر مدل"

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPU",
"placeholder": "Entrez le nombre de couches à envoyer au(x) GPU(s)"
},
"systemPrompt": {
"label": "Invite système temporaire",
"placeholder": "Entrez l'invite système",
"help": "C'est un moyen rapide de définir l'invite système dans le chat actuel, qui remplacera l'invite système sélectionnée si elle existe."
}
},
"advanced": "Plus de paramètres du modèle"

View File

@ -84,7 +84,13 @@
},
"numGpu": {
"label": "Num GPU",
"placeholder": "Inserisci il numero di layer da inviare alla/e GPU" }
"placeholder": "Inserisci il numero di layer da inviare alla/e GPU"
},
"systemPrompt": {
"label": "Prompt di Sistema Temporaneo",
"placeholder": "Inserisci il Prompt di Sistema",
"help": "Questo è un modo rapido per impostare il prompt di sistema nella chat corrente, che sovrascriverà il prompt di sistema selezionato se esiste."
}
},
"advanced": "Altre Impostazioni del Modello"
},

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPU",
"placeholder": "GPU(s)に送信するレイヤー数を入力してください"
},
"systemPrompt": {
"label": "一時的なシステムプロンプト",
"placeholder": "システムプロンプトを入力",
"help": "これは現在のチャットでシステムプロンプトを素早く設定する方法で、選択されたシステムプロンプトが存在する場合はそれを上書きします。"
}
},
"advanced": "その他のモデル設定"

View File

@ -84,6 +84,11 @@
"numGpu": {
"label": "ജിപിയു എണ്ണം",
"placeholder": "ജിപിയു(കൾ)ക്ക് അയക്കേണ്ട ലേയറുകളുടെ എണ്ണം നൽകുക"
},
"systemPrompt": {
"label": "താൽക്കാലിക സിസ്റ്റം പ്രോംപ്റ്റ്",
"placeholder": "സിസ്റ്റം പ്രോംപ്റ്റ് നൽകുക",
"help": "നിലവിലുള്ള ചാറ്റിൽ സിസ്റ്റം പ്രോംപ്റ്റ് സെറ്റ് ചെയ്യാനുള്ള വേഗത്തിലുള്ള മാർഗമാണിത്, ഇത് തിരഞ്ഞെടുത്ത സിസ്റ്റം പ്രോംപ്റ്റ് നിലവിലുണ്ടെങ്കിൽ അതിനെ മറികടക്കും."
}
},
"advanced": "കൂടുതൽ മോഡൽ ക്രമീകരണങ്ങൾ"

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPUs",
"placeholder": "Digite o número de camadas para enviar para a(s) GPU(s)"
},
"systemPrompt": {
"label": "Prompt do Sistema Temporário",
"placeholder": "Digite o Prompt do Sistema",
"help": "Esta é uma maneira rápida de definir o prompt do sistema no chat atual, que substituirá o prompt do sistema selecionado, se existir."
}
},
"advanced": "Mais Configurações do Modelo"

View File

@ -84,7 +84,13 @@
},
"numGpu": {
"label": "Num GPU",
"placeholder": "Введите количество слоев для отправки на GPU" }
"placeholder": "Введите количество слоев для отправки на GPU"
},
"systemPrompt": {
"label": "Временный системный запрос",
"placeholder": "Введите системный запрос",
"help": "Это быстрый способ установить системный запрос в текущем чате, который переопределит выбранный системный запрос, если он существует."
}
},
"advanced": "Больше настроек модели"
},

View File

@ -85,6 +85,11 @@
"numGpu": {
"label": "Num GPU",
"placeholder": "输入要发送到 GPU 的层数"
},
"systemPrompt": {
"label": "临时系统提示",
"placeholder": "输入系统提示",
"help": "这是一种在当前聊天中快速设置系统提示的方法,如果存在已选择的系统提示,它将覆盖该提示。"
}
},
"advanced": "更多模型设置"

View File

@ -1,16 +1,29 @@
import { getAllModelSettings } from "@/services/model-settings"
import { useStoreChatModelSettings } from "@/store/model"
import { useQuery } from "@tanstack/react-query"
import { Collapse, Form, Input, InputNumber, Modal, Skeleton } from "antd"
import {
Collapse,
Drawer,
Form,
Input,
InputNumber,
Modal,
Skeleton
} from "antd"
import React from "react"
import { useTranslation } from "react-i18next"
type Props = {
open: boolean
setOpen: (open: boolean) => void
useDrawer?: boolean
}
export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
export const CurrentChatModelSettings = ({
open,
setOpen,
useDrawer
}: Props) => {
const { t } = useTranslation("common")
const [form] = Form.useForm()
const cUserSettings = useStoreChatModelSettings()
@ -26,19 +39,17 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
numCtx: cUserSettings.numCtx ?? data.numCtx,
seed: cUserSettings.seed,
numGpu: cUserSettings.numGpu ?? data.numGpu,
systemPrompt: cUserSettings.systemPrompt ?? ""
})
return data
},
enabled: open,
refetchOnMount: true
})
const renderBody = () => {
return (
<Modal
title={t("currentChatModelSettings")}
open={open}
onOk={() => setOpen(false)}
onCancel={() => setOpen(false)}
footer={null}>
<>
{!isLoading ? (
<Form
onFinish={(values: {
@ -54,12 +65,26 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
}}
form={form}
layout="vertical">
{useDrawer && (
<>
<Form.Item
name="systemPrompt"
help={t("modelSettings.form.systemPrompt.help")}
label={t("modelSettings.form.systemPrompt.label")}>
<Input.TextArea
rows={4}
placeholder={t(
"modelSettings.form.systemPrompt.placeholder"
)}
/>
</Form.Item>
</>
)}
<Form.Item
name="keepAlive"
help={t("modelSettings.form.keepAlive.help")}
label={t("modelSettings.form.keepAlive.label")}>
<Input
size="large"
placeholder={t("modelSettings.form.keepAlive.placeholder")}
/>
</Form.Item>
@ -68,7 +93,6 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
name="temperature"
label={t("modelSettings.form.temperature.label")}>
<InputNumber
size="large"
style={{ width: "100%" }}
placeholder={t("modelSettings.form.temperature.placeholder")}
/>
@ -78,16 +102,16 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
help={t("modelSettings.form.seed.help")}
label={t("modelSettings.form.seed.label")}>
<InputNumber
size="large"
style={{ width: "100%" }}
placeholder={t("modelSettings.form.seed.placeholder")}
/>
</Form.Item>
<Form.Item name="numCtx" label={t("modelSettings.form.numCtx.label")}>
<Form.Item
name="numCtx"
label={t("modelSettings.form.numCtx.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t("modelSettings.form.numCtx.placeholder")}
size="large"
/>
</Form.Item>
@ -106,7 +130,6 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
<InputNumber
style={{ width: "100%" }}
placeholder={t("modelSettings.form.topK.placeholder")}
size="large"
/>
</Form.Item>
@ -115,7 +138,6 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
label={t("modelSettings.form.topP.label")}>
<InputNumber
style={{ width: "100%" }}
size="large"
placeholder={t("modelSettings.form.topP.placeholder")}
/>
</Form.Item>
@ -125,8 +147,9 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
label={t("modelSettings.form.numGpu.label")}>
<InputNumber
style={{ width: "100%" }}
size="large"
placeholder={t("modelSettings.form.numGpu.placeholder")}
placeholder={t(
"modelSettings.form.numGpu.placeholder"
)}
/>
</Form.Item>
</React.Fragment>
@ -137,13 +160,38 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
<button
type="submit"
className="inline-flex justify-center w-full text-center mt-4 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
className="inline-flex justify-center w-full text-center mt-3 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
{t("save")}
</button>
</Form>
) : (
<Skeleton active />
)}
</>
)
}
if (useDrawer) {
return (
<Drawer
placement="right"
open={open}
onClose={() => setOpen(false)}
width={500}
title={t("currentChatModelSettings")}>
{renderBody()}
</Drawer>
)
}
return (
<Modal
title={t("currentChatModelSettings")}
open={open}
onOk={() => setOpen(false)}
onCancel={() => setOpen(false)}
footer={null}>
{renderBody()}
</Modal>
)
}

View File

@ -76,6 +76,7 @@ export default function OptionLayout({
<CurrentChatModelSettings
open={openModelSettings}
setOpen={setOpenModelSettings}
useDrawer
/>
</>
)

View File

@ -68,7 +68,7 @@ export const useMessageOption = () => {
} = useStoreMessageOption()
const currentChatModelSettings = useStoreChatModelSettings()
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const [ speechToTextLanguage, setSpeechToTextLanguage ] = useStorage(
const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage(
"speechToTextLanguage",
"en-US"
)
@ -450,7 +450,11 @@ export const useMessageOption = () => {
)
}
if (selectedPrompt) {
const isTempSystemprompt =
currentChatModelSettings.systemPrompt &&
currentChatModelSettings.systemPrompt?.trim().length > 0
if (!isTempSystemprompt && selectedPrompt) {
applicationChatHistory.unshift(
new SystemMessage({
content: [
@ -463,6 +467,19 @@ export const useMessageOption = () => {
)
}
if (isTempSystemprompt) {
applicationChatHistory.unshift(
new SystemMessage({
content: [
{
text: currentChatModelSettings.systemPrompt,
type: "text"
}
]
})
)
}
const chunks = await ollama.stream(
[...applicationChatHistory, humanMessage],
{

View File

@ -63,6 +63,8 @@ type CurrentChatModelSettings = {
setX: (key: string, value: any) => void
reset: () => void
systemPrompt?: string
setSystemPrompt: (systemPrompt: string) => void
}
export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
@ -100,6 +102,8 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
setVocabOnly: (vocabOnly: boolean) => set({ vocabOnly }),
seetSeed: (seed: number) => set({ seed }),
setX: (key: string, value: any) => set({ [key]: value }),
systemPrompt: undefined,
setSystemPrompt: (systemPrompt: string) => set({ systemPrompt }),
reset: () =>
set({
f16KV: undefined,
@ -130,7 +134,8 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
useMLock: undefined,
useMMap: undefined,
vocabOnly: undefined,
seed: undefined
seed: undefined,
systemPrompt: undefined
})
})
)