commit
2a2309d44c
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Anzahl GPUs",
|
||||
"placeholder": "Geben Sie die Anzahl der Ebenen ein, die an GPU(s) gesendet werden sollen"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Temporärer System-Prompt",
|
||||
"placeholder": "System-Prompt eingeben",
|
||||
"help": "Dies ist eine schnelle Möglichkeit, den System-Prompt im aktuellen Chat festzulegen, der den ausgewählten System-Prompt überschreibt, falls vorhanden."
|
||||
}
|
||||
},
|
||||
"advanced": "Weitere Modell-Einstellungen"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Enter number of layers to send to GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Temporary System Prompt",
|
||||
"placeholder": "Enter System Prompt",
|
||||
"help": "This is a quick way to set the system prompt in the current chat, which will override the selected system prompt if it exists."
|
||||
}
|
||||
},
|
||||
"advanced": "More Model Settings"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Ingrese el número de capas para enviar a la(s) GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Prompt de Sistema Temporal",
|
||||
"placeholder": "Ingrese el Prompt de Sistema",
|
||||
"help": "Esta es una forma rápida de establecer el prompt de sistema en el chat actual, que anulará el prompt de sistema seleccionado si existe."
|
||||
}
|
||||
},
|
||||
"advanced": "Más Configuraciones del Modelo"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "تعداد لایههایی که به GPU(ها) ارسال میشود را وارد کنید"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "پرامپت سیستم موقت",
|
||||
"placeholder": "پرامپت سیستم را وارد کنید",
|
||||
"help": "این یک روش سریع برای تنظیم پرامپت سیستم در گفتگوی فعلی است که در صورت وجود، پرامپت سیستم انتخاب شده را لغو خواهد کرد."
|
||||
}
|
||||
},
|
||||
"advanced": "تنظیمات بیشتر مدل"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Entrez le nombre de couches à envoyer au(x) GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Invite système temporaire",
|
||||
"placeholder": "Entrez l'invite système",
|
||||
"help": "C'est un moyen rapide de définir l'invite système dans le chat actuel, qui remplacera l'invite système sélectionnée si elle existe."
|
||||
}
|
||||
},
|
||||
"advanced": "Plus de paramètres du modèle"
|
||||
|
@ -84,7 +84,13 @@
|
||||
},
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Inserisci il numero di layer da inviare alla/e GPU" }
|
||||
"placeholder": "Inserisci il numero di layer da inviare alla/e GPU"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Prompt di Sistema Temporaneo",
|
||||
"placeholder": "Inserisci il Prompt di Sistema",
|
||||
"help": "Questo è un modo rapido per impostare il prompt di sistema nella chat corrente, che sovrascriverà il prompt di sistema selezionato se esiste."
|
||||
}
|
||||
},
|
||||
"advanced": "Altre Impostazioni del Modello"
|
||||
},
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "GPU(s)に送信するレイヤー数を入力してください"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "一時的なシステムプロンプト",
|
||||
"placeholder": "システムプロンプトを入力",
|
||||
"help": "これは現在のチャットでシステムプロンプトを素早く設定する方法で、選択されたシステムプロンプトが存在する場合はそれを上書きします。"
|
||||
}
|
||||
},
|
||||
"advanced": "その他のモデル設定"
|
||||
|
@ -84,6 +84,11 @@
|
||||
"numGpu": {
|
||||
"label": "ജിപിയു എണ്ണം",
|
||||
"placeholder": "ജിപിയു(കൾ)ക്ക് അയക്കേണ്ട ലേയറുകളുടെ എണ്ണം നൽകുക"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "താൽക്കാലിക സിസ്റ്റം പ്രോംപ്റ്റ്",
|
||||
"placeholder": "സിസ്റ്റം പ്രോംപ്റ്റ് നൽകുക",
|
||||
"help": "നിലവിലുള്ള ചാറ്റിൽ സിസ്റ്റം പ്രോംപ്റ്റ് സെറ്റ് ചെയ്യാനുള്ള വേഗത്തിലുള്ള മാർഗമാണിത്, ഇത് തിരഞ്ഞെടുത്ത സിസ്റ്റം പ്രോംപ്റ്റ് നിലവിലുണ്ടെങ്കിൽ അതിനെ മറികടക്കും."
|
||||
}
|
||||
},
|
||||
"advanced": "കൂടുതൽ മോഡൽ ക്രമീകരണങ്ങൾ"
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPUs",
|
||||
"placeholder": "Digite o número de camadas para enviar para a(s) GPU(s)"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Prompt do Sistema Temporário",
|
||||
"placeholder": "Digite o Prompt do Sistema",
|
||||
"help": "Esta é uma maneira rápida de definir o prompt do sistema no chat atual, que substituirá o prompt do sistema selecionado, se existir."
|
||||
}
|
||||
},
|
||||
"advanced": "Mais Configurações do Modelo"
|
||||
|
@ -84,7 +84,13 @@
|
||||
},
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Введите количество слоев для отправки на GPU" }
|
||||
"placeholder": "Введите количество слоев для отправки на GPU"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "Временный системный запрос",
|
||||
"placeholder": "Введите системный запрос",
|
||||
"help": "Это быстрый способ установить системный запрос в текущем чате, который переопределит выбранный системный запрос, если он существует."
|
||||
}
|
||||
},
|
||||
"advanced": "Больше настроек модели"
|
||||
},
|
||||
|
@ -85,6 +85,11 @@
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "输入要发送到 GPU 的层数"
|
||||
},
|
||||
"systemPrompt": {
|
||||
"label": "临时系统提示",
|
||||
"placeholder": "输入系统提示",
|
||||
"help": "这是一种在当前聊天中快速设置系统提示的方法,如果存在已选择的系统提示,它将覆盖该提示。"
|
||||
}
|
||||
},
|
||||
"advanced": "更多模型设置"
|
||||
|
@ -1,16 +1,29 @@
|
||||
import { getAllModelSettings } from "@/services/model-settings"
|
||||
import { useStoreChatModelSettings } from "@/store/model"
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import { Collapse, Form, Input, InputNumber, Modal, Skeleton } from "antd"
|
||||
import {
|
||||
Collapse,
|
||||
Drawer,
|
||||
Form,
|
||||
Input,
|
||||
InputNumber,
|
||||
Modal,
|
||||
Skeleton
|
||||
} from "antd"
|
||||
import React from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
|
||||
type Props = {
|
||||
open: boolean
|
||||
setOpen: (open: boolean) => void
|
||||
useDrawer?: boolean
|
||||
}
|
||||
|
||||
export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
|
||||
export const CurrentChatModelSettings = ({
|
||||
open,
|
||||
setOpen,
|
||||
useDrawer
|
||||
}: Props) => {
|
||||
const { t } = useTranslation("common")
|
||||
const [form] = Form.useForm()
|
||||
const cUserSettings = useStoreChatModelSettings()
|
||||
@ -26,12 +39,151 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
|
||||
numCtx: cUserSettings.numCtx ?? data.numCtx,
|
||||
seed: cUserSettings.seed,
|
||||
numGpu: cUserSettings.numGpu ?? data.numGpu,
|
||||
systemPrompt: cUserSettings.systemPrompt ?? ""
|
||||
})
|
||||
return data
|
||||
},
|
||||
enabled: open,
|
||||
refetchOnMount: true
|
||||
})
|
||||
|
||||
const renderBody = () => {
|
||||
return (
|
||||
<>
|
||||
{!isLoading ? (
|
||||
<Form
|
||||
onFinish={(values: {
|
||||
keepAlive: string
|
||||
temperature: number
|
||||
topK: number
|
||||
topP: number
|
||||
}) => {
|
||||
Object.entries(values).forEach(([key, value]) => {
|
||||
cUserSettings.setX(key, value)
|
||||
setOpen(false)
|
||||
})
|
||||
}}
|
||||
form={form}
|
||||
layout="vertical">
|
||||
{useDrawer && (
|
||||
<>
|
||||
<Form.Item
|
||||
name="systemPrompt"
|
||||
help={t("modelSettings.form.systemPrompt.help")}
|
||||
label={t("modelSettings.form.systemPrompt.label")}>
|
||||
<Input.TextArea
|
||||
rows={4}
|
||||
placeholder={t(
|
||||
"modelSettings.form.systemPrompt.placeholder"
|
||||
)}
|
||||
/>
|
||||
</Form.Item>
|
||||
</>
|
||||
)}
|
||||
<Form.Item
|
||||
name="keepAlive"
|
||||
help={t("modelSettings.form.keepAlive.help")}
|
||||
label={t("modelSettings.form.keepAlive.label")}>
|
||||
<Input
|
||||
placeholder={t("modelSettings.form.keepAlive.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="temperature"
|
||||
label={t("modelSettings.form.temperature.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.temperature.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="seed"
|
||||
help={t("modelSettings.form.seed.help")}
|
||||
label={t("modelSettings.form.seed.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.seed.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="numCtx"
|
||||
label={t("modelSettings.form.numCtx.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.numCtx.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Collapse
|
||||
ghost
|
||||
className="border-none bg-transparent"
|
||||
items={[
|
||||
{
|
||||
key: "1",
|
||||
label: t("modelSettings.advanced"),
|
||||
children: (
|
||||
<React.Fragment>
|
||||
<Form.Item
|
||||
name="topK"
|
||||
label={t("modelSettings.form.topK.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.topK.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="topP"
|
||||
label={t("modelSettings.form.topP.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.topP.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="numGpu"
|
||||
label={t("modelSettings.form.numGpu.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t(
|
||||
"modelSettings.form.numGpu.placeholder"
|
||||
)}
|
||||
/>
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
className="inline-flex justify-center w-full text-center mt-3 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
|
||||
{t("save")}
|
||||
</button>
|
||||
</Form>
|
||||
) : (
|
||||
<Skeleton active />
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
if (useDrawer) {
|
||||
return (
|
||||
<Drawer
|
||||
placement="right"
|
||||
open={open}
|
||||
onClose={() => setOpen(false)}
|
||||
width={500}
|
||||
title={t("currentChatModelSettings")}>
|
||||
{renderBody()}
|
||||
</Drawer>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t("currentChatModelSettings")}
|
||||
@ -39,111 +191,7 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => {
|
||||
onOk={() => setOpen(false)}
|
||||
onCancel={() => setOpen(false)}
|
||||
footer={null}>
|
||||
{!isLoading ? (
|
||||
<Form
|
||||
onFinish={(values: {
|
||||
keepAlive: string
|
||||
temperature: number
|
||||
topK: number
|
||||
topP: number
|
||||
}) => {
|
||||
Object.entries(values).forEach(([key, value]) => {
|
||||
cUserSettings.setX(key, value)
|
||||
setOpen(false)
|
||||
})
|
||||
}}
|
||||
form={form}
|
||||
layout="vertical">
|
||||
<Form.Item
|
||||
name="keepAlive"
|
||||
help={t("modelSettings.form.keepAlive.help")}
|
||||
label={t("modelSettings.form.keepAlive.label")}>
|
||||
<Input
|
||||
size="large"
|
||||
placeholder={t("modelSettings.form.keepAlive.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="temperature"
|
||||
label={t("modelSettings.form.temperature.label")}>
|
||||
<InputNumber
|
||||
size="large"
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.temperature.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="seed"
|
||||
help={t("modelSettings.form.seed.help")}
|
||||
label={t("modelSettings.form.seed.label")}>
|
||||
<InputNumber
|
||||
size="large"
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.seed.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item name="numCtx" label={t("modelSettings.form.numCtx.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.numCtx.placeholder")}
|
||||
size="large"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Collapse
|
||||
ghost
|
||||
className="border-none bg-transparent"
|
||||
items={[
|
||||
{
|
||||
key: "1",
|
||||
label: t("modelSettings.advanced"),
|
||||
children: (
|
||||
<React.Fragment>
|
||||
<Form.Item
|
||||
name="topK"
|
||||
label={t("modelSettings.form.topK.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.topK.placeholder")}
|
||||
size="large"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="topP"
|
||||
label={t("modelSettings.form.topP.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
size="large"
|
||||
placeholder={t("modelSettings.form.topP.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="numGpu"
|
||||
label={t("modelSettings.form.numGpu.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
size="large"
|
||||
placeholder={t("modelSettings.form.numGpu.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
className="inline-flex justify-center w-full text-center mt-4 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
|
||||
{t("save")}
|
||||
</button>
|
||||
</Form>
|
||||
) : (
|
||||
<Skeleton active />
|
||||
)}
|
||||
{renderBody()}
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
@ -7,12 +7,13 @@ import React from "react"
|
||||
import { useMutation } from "@tanstack/react-query"
|
||||
import { getPageShareUrl } from "~/services/ollama"
|
||||
import { cleanUrl } from "~/libs/clean-url"
|
||||
import { getUserId, saveWebshare } from "@/db"
|
||||
import { getTitleById, getUserId, saveWebshare } from "@/db"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import fetcher from "@/libs/fetcher"
|
||||
|
||||
type Props = {
|
||||
messages: Message[]
|
||||
historyId: string
|
||||
}
|
||||
|
||||
const reformatMessages = (messages: Message[], username: string) => {
|
||||
@ -76,7 +77,7 @@ export const PlaygroundMessage = (
|
||||
)
|
||||
}
|
||||
|
||||
export const ShareBtn: React.FC<Props> = ({ messages }) => {
|
||||
export const ShareBtn: React.FC<Props> = ({ messages, historyId }) => {
|
||||
const { t } = useTranslation("common")
|
||||
const [open, setOpen] = useState(false)
|
||||
const [form] = Form.useForm()
|
||||
@ -84,11 +85,13 @@ export const ShareBtn: React.FC<Props> = ({ messages }) => {
|
||||
|
||||
React.useEffect(() => {
|
||||
if (messages.length > 0) {
|
||||
form.setFieldsValue({
|
||||
title: messages[0].message
|
||||
getTitleById(historyId).then((title) => {
|
||||
form.setFieldsValue({
|
||||
title
|
||||
})
|
||||
})
|
||||
}
|
||||
}, [messages])
|
||||
}, [messages, historyId])
|
||||
|
||||
const onSubmit = async (values: { title: string; name: string }) => {
|
||||
const owner_id = await getUserId()
|
||||
|
@ -46,6 +46,7 @@ export const Header: React.FC<Props> = ({
|
||||
setSelectedSystemPrompt,
|
||||
messages,
|
||||
streaming,
|
||||
historyId
|
||||
} = useMessageOption()
|
||||
const {
|
||||
data: models,
|
||||
@ -205,7 +206,9 @@ export const Header: React.FC<Props> = ({
|
||||
{pathname === "/" &&
|
||||
messages.length > 0 &&
|
||||
!streaming &&
|
||||
shareModeEnabled && <ShareBtn messages={messages} />}
|
||||
shareModeEnabled && <ShareBtn
|
||||
historyId={historyId}
|
||||
messages={messages} />}
|
||||
<Tooltip title={t("githubRepository")}>
|
||||
<a
|
||||
href="https://github.com/n4ze3m/page-assist"
|
||||
|
@ -1,12 +1,16 @@
|
||||
import React, { useState } from "react"
|
||||
|
||||
import { Sidebar } from "../Option/Sidebar"
|
||||
import { Drawer } from "antd"
|
||||
import { Drawer, Tooltip } from "antd"
|
||||
|
||||
import { useTranslation } from "react-i18next"
|
||||
|
||||
import { CurrentChatModelSettings } from "../Common/Settings/CurrentChatModelSettings"
|
||||
import { Header } from "./Header"
|
||||
import { EraserIcon } from "lucide-react"
|
||||
import { PageAssitDatabase } from "@/db"
|
||||
import { useMessageOption } from "@/hooks/useMessageOption"
|
||||
import { useQueryClient } from "@tanstack/react-query"
|
||||
|
||||
export default function OptionLayout({
|
||||
children
|
||||
@ -14,8 +18,10 @@ export default function OptionLayout({
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
const [sidebarOpen, setSidebarOpen] = useState(false)
|
||||
const { t } = useTranslation(["option", "common"])
|
||||
const { t } = useTranslation(["option", "common", "settings"])
|
||||
const [openModelSettings, setOpenModelSettings] = useState(false)
|
||||
const { clearChat } = useMessageOption()
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -28,7 +34,38 @@ export default function OptionLayout({
|
||||
</div>
|
||||
|
||||
<Drawer
|
||||
title={t("sidebarTitle")}
|
||||
title={
|
||||
<div className="flex items-center justify-between">
|
||||
{t("sidebarTitle")}
|
||||
|
||||
<Tooltip
|
||||
title={t(
|
||||
"settings:generalSettings.system.deleteChatHistory.label"
|
||||
)}
|
||||
placement="right">
|
||||
<button
|
||||
onClick={async () => {
|
||||
const confirm = window.confirm(
|
||||
t(
|
||||
"settings:generalSettings.system.deleteChatHistory.confirm"
|
||||
)
|
||||
)
|
||||
|
||||
if (confirm) {
|
||||
const db = new PageAssitDatabase()
|
||||
await db.deleteAllChatHistory()
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["fetchChatHistory"]
|
||||
})
|
||||
clearChat()
|
||||
}
|
||||
}}
|
||||
className="text-gray-600 hover:text-gray-800 dark:text-gray-300 dark:hover:text-gray-100">
|
||||
<EraserIcon className="size-5" />
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
}
|
||||
placement="left"
|
||||
closeIcon={null}
|
||||
onClose={() => setSidebarOpen(false)}
|
||||
@ -39,6 +76,7 @@ export default function OptionLayout({
|
||||
<CurrentChatModelSettings
|
||||
open={openModelSettings}
|
||||
setOpen={setOpenModelSettings}
|
||||
useDrawer
|
||||
/>
|
||||
</>
|
||||
)
|
||||
|
@ -81,6 +81,12 @@ export class PageAssitDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
async getChatHistoryTitleById(id: string): Promise<string> {
|
||||
const chatHistories = await this.getChatHistories()
|
||||
const chatHistory = chatHistories.find((history) => history.id === id)
|
||||
return chatHistory?.title || ""
|
||||
}
|
||||
|
||||
async addChatHistory(history: HistoryInfo) {
|
||||
const chatHistories = await this.getChatHistories()
|
||||
const newChatHistories = [history, ...chatHistories]
|
||||
@ -483,3 +489,10 @@ export const getRecentChatFromCopilot = async () => {
|
||||
|
||||
return { history, messages }
|
||||
}
|
||||
|
||||
|
||||
export const getTitleById = async (id: string) => {
|
||||
const db = new PageAssitDatabase()
|
||||
const title = await db.getChatHistoryTitleById(id)
|
||||
return title
|
||||
}
|
@ -68,7 +68,7 @@ export const useMessageOption = () => {
|
||||
} = useStoreMessageOption()
|
||||
const currentChatModelSettings = useStoreChatModelSettings()
|
||||
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
|
||||
const [ speechToTextLanguage, setSpeechToTextLanguage ] = useStorage(
|
||||
const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage(
|
||||
"speechToTextLanguage",
|
||||
"en-US"
|
||||
)
|
||||
@ -450,7 +450,11 @@ export const useMessageOption = () => {
|
||||
)
|
||||
}
|
||||
|
||||
if (selectedPrompt) {
|
||||
const isTempSystemprompt =
|
||||
currentChatModelSettings.systemPrompt &&
|
||||
currentChatModelSettings.systemPrompt?.trim().length > 0
|
||||
|
||||
if (!isTempSystemprompt && selectedPrompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
@ -463,6 +467,19 @@ export const useMessageOption = () => {
|
||||
)
|
||||
}
|
||||
|
||||
if (isTempSystemprompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: currentChatModelSettings.systemPrompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
const chunks = await ollama.stream(
|
||||
[...applicationChatHistory, humanMessage],
|
||||
{
|
||||
|
@ -63,6 +63,8 @@ type CurrentChatModelSettings = {
|
||||
|
||||
setX: (key: string, value: any) => void
|
||||
reset: () => void
|
||||
systemPrompt?: string
|
||||
setSystemPrompt: (systemPrompt: string) => void
|
||||
}
|
||||
|
||||
export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
|
||||
@ -100,6 +102,8 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
|
||||
setVocabOnly: (vocabOnly: boolean) => set({ vocabOnly }),
|
||||
seetSeed: (seed: number) => set({ seed }),
|
||||
setX: (key: string, value: any) => set({ [key]: value }),
|
||||
systemPrompt: undefined,
|
||||
setSystemPrompt: (systemPrompt: string) => set({ systemPrompt }),
|
||||
reset: () =>
|
||||
set({
|
||||
f16KV: undefined,
|
||||
@ -130,7 +134,8 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
|
||||
useMLock: undefined,
|
||||
useMMap: undefined,
|
||||
vocabOnly: undefined,
|
||||
seed: undefined
|
||||
seed: undefined,
|
||||
systemPrompt: undefined
|
||||
})
|
||||
})
|
||||
)
|
||||
|
@ -50,7 +50,7 @@ export default defineConfig({
|
||||
outDir: "build",
|
||||
|
||||
manifest: {
|
||||
version: "1.2.3",
|
||||
version: "1.2.4",
|
||||
name:
|
||||
process.env.TARGET === "firefox"
|
||||
? "Page Assist - A Web UI for Local AI Models"
|
||||
|
Loading…
x
Reference in New Issue
Block a user