Merge pull request #285 from n4ze3m/next

v1.4.1
This commit is contained in:
Muhammed Nazeem 2025-01-05 16:43:02 +05:30 committed by GitHub
commit d980b8da85
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 588 additions and 121 deletions

View File

@ -334,6 +334,14 @@
"label": "عدد المستندات المسترجعة",
"placeholder": "أدخل عدد المستندات المسترجعة",
"required": "الرجاء إدخال عدد المستندات المسترجعة"
},
"splittingSeparator": {
"label": "الفاصل",
"placeholder": "أدخل الفاصل (مثال: \\n\\n)",
"required": "الرجاء إدخال الفاصل"
},
"splittingStrategy": {
"label": "مقسم النص"
}
},
"prompt": {
@ -355,4 +363,5 @@
},
"chromeAiSettings": {
"title": "إعدادات Chrome AI"
}}
}
}

View File

@ -331,6 +331,14 @@
"label": "Antal Hentede Dokumenter",
"placeholder": "Indtast Number of Retrieved Documents",
"required": "Venligst indtast the number of retrieved documents"
},
"splittingSeparator": {
"label": "Separator",
"placeholder": "Indtast Separator (f.eks. \\n\\n)",
"required": "Indtast venligst en separator"
},
"splittingStrategy": {
"label": "Tekst Splitter"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Anzahl der abgerufenen Dokumente",
"placeholder": "Anzahl der abgerufenen Dokumente eingeben",
"required": "Bitte geben Sie die Anzahl der abgerufenen Dokumente ein"
},
"splittingSeparator": {
"label": "Separator",
"placeholder": "Separator eingeben (z.B. \\n\\n)",
"required": "Bitte geben Sie einen Separator ein"
},
"splittingStrategy": {
"label": "Text-Splitter"
}
},
"prompt": {

View File

@ -90,6 +90,21 @@
"useMMap": {
"label": "useMmap"
},
"tfsZ": {
"label": "TFS-Z",
"placeholder": "e.g. 1.0, 1.1"
},
"numKeep": {
"label": "Num Keep",
"placeholder": "e.g. 256, 512"
},
"numThread": {
"label": "Num Thread",
"placeholder": "e.g. 8, 16"
},
"useMlock": {
"label": "useMlock"
},
"minP": {
"label": "Min P",
"placeholder": "e.g. 0.05"

View File

@ -72,7 +72,7 @@
}
},
"braveApi": {
"label": "Brave API Key",
"label": "Brave API Key",
"placeholder": "Enter your Brave API key"
},
"googleDomain": {
@ -337,6 +337,14 @@
"label": "Number of Retrieved Documents",
"placeholder": "Enter Number of Retrieved Documents",
"required": "Please enter the number of retrieved documents"
},
"splittingSeparator": {
"label": "Separator",
"placeholder": "Enter Separator (e.g., \\n\\n)",
"required": "Please enter a separator"
},
"splittingStrategy": {
"label": "Text Splitter"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Número de Documentos Recuperados",
"placeholder": "Ingrese el Número de Documentos Recuperados",
"required": "Por favor, ingrese el número de documentos recuperados"
},
"splittingSeparator": {
"label": "Separador",
"placeholder": "Ingrese el separador (ej., \\n\\n)",
"required": "Por favor, ingrese un separador"
},
"splittingStrategy": {
"label": "Divisor de Texto"
}
},
"prompt": {

View File

@ -327,6 +327,14 @@
"label": "تعداد اسناد بازیابی شده",
"placeholder": "تعداد اسناد بازیابی شده را وارد کنید",
"required": "لطفاً تعداد اسناد بازیابی شده را وارد کنید"
},
"splittingSeparator": {
"label": "جداکننده",
"placeholder": "جداکننده را وارد کنید (مثلاً \\n\\n)",
"required": "لطفاً یک جداکننده وارد کنید"
},
"splittingStrategy": {
"label": "تقسیم‌کننده متن"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Nombre de documents récupérés",
"placeholder": "Entrez le nombre de documents récupérés",
"required": "Veuillez saisir le nombre de documents récupérés"
},
"splittingSeparator": {
"label": "Séparateur",
"placeholder": "Entrez le séparateur (par exemple, \\n\\n)",
"required": "Veuillez saisir un séparateur"
},
"splittingStrategy": {
"label": "Diviseur de texte"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Numero di Documenti Recuperati",
"placeholder": "Inserisci il Numero di Documenti Recuperati",
"required": "Inserisci il numero di documenti recuperati"
},
"splittingSeparator": {
"label": "Separatore",
"placeholder": "Inserisci il Separatore (es. \\n\\n)",
"required": "Inserisci un separatore"
},
"splittingStrategy": {
"label": "Divisore di Testo"
}
},
"prompt": {

View File

@ -334,6 +334,14 @@
"label": "取得ドキュメント数",
"placeholder": "取得ドキュメント数を入力",
"required": "取得ドキュメント数を入力してください"
},
"splittingSeparator": {
"label": "セパレーター",
"placeholder": "セパレーターを入力(例:\\n\\n",
"required": "セパレーターを入力してください"
},
"splittingStrategy": {
"label": "テキスト分割方式"
}
},
"prompt": {

View File

@ -334,6 +334,14 @@
"label": "검색 문서 수",
"placeholder": "검색 문서 수 입력",
"required": "검색 문서 수를 입력해주세요"
},
"splittingSeparator": {
"label": "구분자",
"placeholder": "구분자 입력 (예: \\n\\n)",
"required": "구분자를 입력해주세요"
},
"splittingStrategy": {
"label": "텍스트 분할기"
}
},
"prompt": {

View File

@ -334,6 +334,14 @@
"label": "വീണ്ടെടുത്ത രേഖകളുടെ എണ്ണം",
"placeholder": "വീണ്ടെടുത്ത രേഖകളുടെ എണ്ണം നൽകുക",
"required": "ദയവായി വീണ്ടെടുത്ത രേഖകളുടെ എണ്ണം നൽകുക"
},
"splittingSeparator": {
"label": "വിഭജന ചിഹ്നം",
"placeholder": "വിഭജന ചിഹ്നം നൽകുക (ഉദാ: \\n\\n)",
"required": "ദയവായി ഒരു വിഭജന ചിഹ്നം നൽകുക"
},
"splittingStrategy": {
"label": "ടെക്സ്റ്റ് സ്പ്ലിറ്റർ"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Antall hentede dokumenter",
"placeholder": "Skriv inn antall hentede dokumenter",
"required": "Vennligst skriv inn antall hentede dokumenter"
},
"splittingSeparator": {
"label": "Separator",
"placeholder": "Skriv inn separator (f.eks. \\n\\n)",
"required": "Vennligst skriv inn en separator"
},
"splittingStrategy": {
"label": "Tekstdeler"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Número de Documentos Recuperados",
"placeholder": "Digite o Número de Documentos Recuperados",
"required": "Por favor, insira o número de documentos recuperados"
},
"splittingSeparator": {
"label": "Separador",
"placeholder": "Digite o Separador (ex: \\n\\n)",
"required": "Por favor, insira um separador"
},
"splittingStrategy": {
"label": "Divisor de Texto"
}
},
"prompt": {

View File

@ -333,6 +333,14 @@
"label": "Количество извлеченных документов",
"placeholder": "Введите количество извлеченных документов",
"required": "Пожалуйста, введите количество извлеченных документов"
},
"splittingSeparator": {
"label": "Разделитель",
"placeholder": "Введите разделитель (например, \\n\\n)",
"required": "Пожалуйста, введите разделитель"
},
"splittingStrategy": {
"label": "Разделитель текста"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Antal hämtade dokument",
"placeholder": "Ange antal hämtade dokument",
"required": "Vänligen ange antal hämtade dokument"
},
"splittingSeparator": {
"label": "Separator",
"placeholder": "Ange separator (t.ex. \\n\\n)",
"required": "Vänligen ange en separator"
},
"splittingStrategy": {
"label": "Textdelare"
}
},
"prompt": {

View File

@ -331,6 +331,14 @@
"label": "Кількість отриманих документів",
"placeholder": "Ввести кількість отриманих документів",
"required": "Будь ласка, введіть кількість документів"
},
"splittingSeparator": {
"label": "Роздільник",
"placeholder": "Введіть роздільник (напр., \\n\\n)",
"required": "Будь ласка, введіть роздільник"
},
"splittingStrategy": {
"label": "Розділювач тексту"
}
},
"prompt": {

View File

@ -336,6 +336,14 @@
"label": "检索文档数量",
"placeholder": "输入检索文档数量",
"required": "请输入检索文档数量"
},
"splittingSeparator": {
"label": "分隔符",
"placeholder": "输入分隔符(例如:\\n\\n",
"required": "请输入分隔符"
},
"splittingStrategy": {
"label": "文本分割器"
}
},
"prompt": {

View File

@ -8,6 +8,7 @@ import { TogtherMonoIcon } from "../Icons/Togther"
import { OpenRouterIcon } from "../Icons/OpenRouter"
import { LLamaFile } from "../Icons/Llamafile"
import { GeminiIcon } from "../Icons/GeminiIcon"
import { MistarlIcon } from "../Icons/Mistral"
export const ProviderIcons = ({
provider,
@ -37,6 +38,8 @@ export const ProviderIcons = ({
return <LLamaFile className={className} />
case "gemini":
return <GeminiIcon className={className} />
case "mistral":
return <MistarlIcon className={className} />
default:
return <OllamaIcon className={className} />
}

View File

@ -13,9 +13,8 @@ import {
Modal,
Skeleton,
Switch,
Button
} from "antd"
import React, { useState, useCallback } from "react"
import React, { useCallback } from "react"
import { useTranslation } from "react-i18next"
import { SaveButton } from "../SaveButton"
@ -79,7 +78,11 @@ export const CurrentChatModelSettings = ({
useMMap: cUserSettings.useMMap ?? data.useMMap,
minP: cUserSettings.minP ?? data.minP,
repeatLastN: cUserSettings.repeatLastN ?? data.repeatLastN,
repeatPenalty: cUserSettings.repeatPenalty ?? data.repeatPenalty
repeatPenalty: cUserSettings.repeatPenalty ?? data.repeatPenalty,
useMlock: cUserSettings.useMlock ?? data.useMlock,
tfsZ: cUserSettings.tfsZ ?? data.tfsZ,
numKeep: cUserSettings.numKeep ?? data.numKeep,
numThread: cUserSettings.numThread ?? data.numThread
})
return data
},
@ -230,11 +233,44 @@ export const CurrentChatModelSettings = ({
)}
/>
</Form.Item>
<Form.Item
name="tfsZ"
label={t("modelSettings.form.tfsZ.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t("modelSettings.form.tfsZ.placeholder")}
/>
</Form.Item>
<Form.Item
name="numKeep"
label={t("modelSettings.form.numKeep.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t(
"modelSettings.form.numKeep.placeholder"
)}
/>
</Form.Item>
<Form.Item
name="numThread"
label={t("modelSettings.form.numThread.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t(
"modelSettings.form.numThread.placeholder"
)}
/>
</Form.Item>
<Form.Item
name="useMMap"
label={t("modelSettings.form.useMMap.label")}>
<Switch />
</Form.Item>
<Form.Item
name="useMlock"
label={t("modelSettings.form.useMlock.label")}>
<Switch />
</Form.Item>
</React.Fragment>
)
}

View File

@ -0,0 +1,32 @@
import React from "react"
export const MistarlIcon = React.forwardRef<
SVGSVGElement,
React.SVGProps<SVGSVGElement>
>((props, ref) => {
return (
<svg
{...props}
fill="currentColor"
fillRule="evenodd"
ref={ref}
style={{ flex: "none", lineHeight: 1, ...props.style }}
viewBox="0 0 24 24"
xmlns="http://www.w3.org/2000/svg">
<g fill="none" fillRule="nonzero">
<path
d="M15 6v4h-2V6h2zm4-4v4h-2V2h2zM3 2H1h2zM1 2h2v20H1V2zm8 12h2v4H9v-4zm8 0h2v8h-2v-8z"
fill="#000"
/>
<path d="M19 2h4v4h-4V2zM3 2h4v4H3V2z" fill="#F7D046" />
<path d="M15 10V6h8v4h-8zM3 10V6h8v4H3z" fill="#F2A73B" />
<path d="M3 14v-4h20v4z" fill="#EE792F" />
<path
d="M11 14h4v4h-4v-4zm8 0h4v4h-4v-4zM3 14h4v4H3v-4z"
fill="#EB5829"
/>
<path d="M19 18h4v4h-4v-4zM3 18h4v4H3v-4z" fill="#EA3326" />
</g>
</svg>
)
})

View File

@ -150,11 +150,44 @@ export const ModelSettings = () => {
)}
/>
</Form.Item>
<Form.Item
name="tfsZ"
label={t("modelSettings.form.tfsZ.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t("modelSettings.form.tfsZ.placeholder")}
/>
</Form.Item>
<Form.Item
name="numKeep"
label={t("modelSettings.form.numKeep.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t(
"modelSettings.form.numKeep.placeholder"
)}
/>
</Form.Item>
<Form.Item
name="numThread"
label={t("modelSettings.form.numThread.label")}>
<InputNumber
style={{ width: "100%" }}
placeholder={t(
"modelSettings.form.numThread.placeholder"
)}
/>
</Form.Item>
<Form.Item
name="useMMap"
label={t("modelSettings.form.useMMap.label")}>
<Switch />
</Form.Item>
<Form.Item
name="useMlock"
label={t("modelSettings.form.useMlock.label")}>
<Switch />
</Form.Item>
</React.Fragment>
)
}

View File

@ -1,10 +1,12 @@
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
import { Form, InputNumber, Select, Skeleton } from "antd"
import { Form, Input, InputNumber, Select, Skeleton } from "antd"
import { SaveButton } from "~/components/Common/SaveButton"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
defaultSplittingStrategy,
defaultSsplttingSeparator,
getEmbeddingModels,
saveForRag
} from "~/services/ollama"
@ -16,7 +18,8 @@ import { ProviderIcons } from "@/components/Common/ProviderIcon"
export const RagSettings = () => {
const { t } = useTranslation("settings")
const [form] = Form.useForm()
const splittingStrategy = Form.useWatch("splittingStrategy", form)
const queryClient = useQueryClient()
const { data: ollamaInfo, status } = useQuery({
@ -28,14 +31,18 @@ export const RagSettings = () => {
chunkSize,
defaultEM,
totalFilePerKB,
noOfRetrievedDocs
noOfRetrievedDocs,
splittingStrategy,
splittingSeparator
] = await Promise.all([
getEmbeddingModels({ returnEmpty: true }),
defaultEmbeddingChunkOverlap(),
defaultEmbeddingChunkSize(),
defaultEmbeddingModelForRag(),
getTotalFilePerKB(),
getNoOfRetrievedDocs()
getNoOfRetrievedDocs(),
defaultSplittingStrategy(),
defaultSsplttingSeparator()
])
return {
models: allModels,
@ -43,7 +50,9 @@ export const RagSettings = () => {
chunkSize,
defaultEM,
totalFilePerKB,
noOfRetrievedDocs
noOfRetrievedDocs,
splittingStrategy,
splittingSeparator
}
}
})
@ -55,13 +64,17 @@ export const RagSettings = () => {
overlap: number
totalFilePerKB: number
noOfRetrievedDocs: number
strategy: string
separator: string
}) => {
await saveForRag(
data.model,
data.chunkSize,
data.overlap,
data.totalFilePerKB,
data.noOfRetrievedDocs
data.noOfRetrievedDocs,
data.strategy,
data.separator
)
return true
},
@ -85,6 +98,7 @@ export const RagSettings = () => {
<div className="border border-b border-gray-200 dark:border-gray-600 mt-3 mb-6"></div>
</div>
<Form
form={form}
layout="vertical"
onFinish={(data) => {
saveRAG({
@ -92,7 +106,9 @@ export const RagSettings = () => {
chunkSize: data.chunkSize,
overlap: data.chunkOverlap,
totalFilePerKB: data.totalFilePerKB,
noOfRetrievedDocs: data.noOfRetrievedDocs
noOfRetrievedDocs: data.noOfRetrievedDocs,
separator: data.splittingSeparator,
strategy: data.splittingStrategy
})
}}
initialValues={{
@ -100,7 +116,9 @@ export const RagSettings = () => {
chunkOverlap: ollamaInfo?.chunkOverlap,
defaultEM: ollamaInfo?.defaultEM,
totalFilePerKB: ollamaInfo?.totalFilePerKB,
noOfRetrievedDocs: ollamaInfo?.noOfRetrievedDocs
noOfRetrievedDocs: ollamaInfo?.noOfRetrievedDocs,
splittingStrategy: ollamaInfo?.splittingStrategy,
splittingSeparator: ollamaInfo?.splittingSeparator
}}>
<Form.Item
name="defaultEM"
@ -140,6 +158,50 @@ export const RagSettings = () => {
/>
</Form.Item>
<Form.Item
name="splittingStrategy"
label={t("rag.ragSettings.splittingStrategy.label")}
rules={[
{
required: true,
message: t("rag.ragSettings.model.required")
}
]}>
<Select
size="large"
showSearch
style={{ width: "100%" }}
className="mt-4"
options={[
"RecursiveCharacterTextSplitter",
"CharacterTextSplitter"
].map((e) => ({
label: e,
value: e
}))}
/>
</Form.Item>
{splittingStrategy !== "RecursiveCharacterTextSplitter" && (
<Form.Item
name="splittingSeparator"
label={t("rag.ragSettings.splittingSeparator.label")}
rules={[
{
required: true,
message: t("rag.ragSettings.splittingSeparator.required")
}
]}>
<Input
size="large"
style={{ width: "100%" }}
placeholder={t(
"rag.ragSettings.splittingSeparator.placeholder"
)}
/>
</Form.Item>
)}
<Form.Item
name="chunkSize"
label={t("rag.ragSettings.chunkSize.label")}

View File

@ -150,7 +150,15 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -293,7 +301,18 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ:
currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ??
userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ??
userDefaultModelSettings?.useMlock
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
@ -514,7 +533,15 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -758,7 +785,15 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -997,7 +1032,15 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -1087,7 +1130,18 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ:
currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ??
userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ??
userDefaultModelSettings?.useMlock
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
@ -1286,7 +1340,15 @@ export const useMessage = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []

View File

@ -141,7 +141,15 @@ export const useMessageOption = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -231,7 +239,18 @@ export const useMessageOption = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ:
currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ??
userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ??
userDefaultModelSettings?.useMlock
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
@ -464,7 +483,15 @@ export const useMessageOption = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -719,7 +746,15 @@ export const useMessageOption = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ: currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ?? userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ?? userDefaultModelSettings?.useMlock
})
let newMessage: Message[] = []
@ -825,7 +860,18 @@ export const useMessageOption = () => {
userDefaultModelSettings?.repeatLastN,
repeatPenalty:
currentChatModelSettings?.repeatPenalty ??
userDefaultModelSettings?.repeatPenalty
userDefaultModelSettings?.repeatPenalty,
tfsZ:
currentChatModelSettings?.tfsZ ?? userDefaultModelSettings?.tfsZ,
numKeep:
currentChatModelSettings?.numKeep ??
userDefaultModelSettings?.numKeep,
numThread:
currentChatModelSettings?.numThread ??
userDefaultModelSettings?.numThread,
useMlock:
currentChatModelSettings?.useMlock ??
userDefaultModelSettings?.useMlock
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()

View File

@ -1,11 +1,6 @@
import { getKnowledgeById, updateKnowledgeStatus } from "@/db/knowledge"
import { PageAssistPDFUrlLoader } from "@/loader/pdf-url"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
getOllamaURL
} from "@/services/ollama"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { getOllamaURL } from "@/services/ollama"
import { PageAssistVectorStore } from "./PageAssistVectorStore"
import { PageAssisCSVUrlLoader } from "@/loader/csv"
import { PageAssisTXTUrlLoader } from "@/loader/txt"
@ -13,7 +8,7 @@ import { PageAssistDocxLoader } from "@/loader/docx"
import { cleanUrl } from "./clean-url"
import { sendEmbeddingCompleteNotification } from "./send-notification"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
export const processKnowledge = async (msg: any, id: string): Promise<void> => {
console.log(`Processing knowledge with id: ${id}`)
@ -32,12 +27,8 @@ export const processKnowledge = async (msg: any, id: string): Promise<void> => {
baseUrl: cleanUrl(ollamaUrl),
model: knowledge.embedding_model
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
for (const doc of knowledge.source) {
if (doc.type === "pdf" || doc.type === "application/pdf") {
@ -65,13 +56,15 @@ export const processKnowledge = async (msg: any, id: string): Promise<void> => {
knownledge_id: knowledge.id,
file_id: doc.source_id
})
} else if (doc.type === "docx" || doc.type === "application/vnd.openxmlformats-officedocument.wordprocessingml.document") {
} else if (
doc.type === "docx" ||
doc.type ===
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
) {
try {
const loader = new PageAssistDocxLoader({
fileName: doc.filename,
buffer: await toArrayBufferFromBase64(
doc.content
)
buffer: await toArrayBufferFromBase64(doc.content)
})
let docs = await loader.load()

View File

@ -103,6 +103,8 @@ export class ChatOllama
useMMap?: boolean;
useMlock?: boolean;
vocabOnly?: boolean;
seed?: number;
@ -148,6 +150,7 @@ export class ChatOllama
this.typicalP = fields.typicalP;
this.useMLock = fields.useMLock;
this.useMMap = fields.useMMap;
this.useMlock = fields.useMlock;
this.vocabOnly = fields.vocabOnly;
this.format = fields.format;
this.seed = fields.seed;
@ -210,7 +213,7 @@ export class ChatOllama
top_p: this.topP,
min_p: this.minP,
typical_p: this.typicalP,
use_mlock: this.useMLock,
use_mlock: this.useMlock,
use_mmap: this.useMMap,
vocab_only: this.vocabOnly,
seed: this.seed,

View File

@ -20,7 +20,11 @@ export const pageAssistModel = async ({
useMMap,
minP,
repeatLastN,
repeatPenalty
repeatPenalty,
tfsZ,
numKeep,
numThread,
useMlock,
}: {
model: string
baseUrl: string
@ -36,6 +40,10 @@ export const pageAssistModel = async ({
minP?: number
repeatPenalty?: number
repeatLastN?: number
tfsZ?: number,
numKeep?: number,
numThread?: number,
useMlock?: boolean,
}) => {
if (model === "chrome::gemini-nano::page-assist") {
return new ChatChromeAI({
@ -80,7 +88,7 @@ export const pageAssistModel = async ({
}
}) as any
}
console.log('useMlock', useMlock)
return new ChatOllama({
baseUrl,
keepAlive,
@ -96,5 +104,9 @@ export const pageAssistModel = async ({
minP: minP,
repeatPenalty: repeatPenalty,
repeatLastN: repeatLastN,
tfsZ,
numKeep,
numThread,
useMlock
})
}

View File

@ -40,6 +40,7 @@ export interface OllamaInput {
useMLock?: boolean
useMMap?: boolean
vocabOnly?: boolean
useMlock?: boolean
seed?: number
format?: StringWithAutocomplete<"json">
}

View File

@ -33,6 +33,7 @@ type ModelSettings = {
useMMap?: boolean
vocabOnly?: boolean
minP?: number
useMlock?: boolean
}
const keys = [
@ -65,6 +66,7 @@ const keys = [
"useMMap",
"vocabOnly",
"minP",
"useMlock"
]
export const getAllModelSettings = async () => {

View File

@ -8,6 +8,9 @@ import { ollamaFormatAllCustomModels } from "@/db/models"
const storage = new Storage()
const storage2 = new Storage({
area: "local"
})
const DEFAULT_OLLAMA_URL = "http://127.0.0.1:11434"
const DEFAULT_ASK_FOR_MODEL_SELECTION_EVERY_TIME = true
@ -310,6 +313,22 @@ export const defaultEmbeddingChunkSize = async () => {
return parseInt(embeddingChunkSize)
}
export const defaultSplittingStrategy = async () => {
const splittingStrategy = await storage.get("defaultSplittingStrategy")
if (!splittingStrategy || splittingStrategy.length === 0) {
return "RecursiveCharacterTextSplitter"
}
return splittingStrategy
}
export const defaultSsplttingSeparator = async () => {
const splittingSeparator = await storage.get("defaultSplittingSeparator")
if (!splittingSeparator || splittingSeparator.length === 0) {
return "\\n\\n"
}
return splittingSeparator
}
export const defaultEmbeddingChunkOverlap = async () => {
const embeddingChunkOverlap = await storage.get(
"defaultEmbeddingChunkOverlap"
@ -320,6 +339,14 @@ export const defaultEmbeddingChunkOverlap = async () => {
return parseInt(embeddingChunkOverlap)
}
export const setDefaultSplittingStrategy = async (strategy: string) => {
await storage.set("defaultSplittingStrategy", strategy)
}
export const setDefaultSplittingSeparator = async (separator: string) => {
await storage.set("defaultSplittingSeparator", separator)
}
export const setDefaultEmbeddingModelForRag = async (model: string) => {
await storage.set("defaultEmbeddingModel", model)
}
@ -337,7 +364,9 @@ export const saveForRag = async (
chunkSize: number,
overlap: number,
totalFilePerKB: number,
noOfRetrievedDocs?: number
noOfRetrievedDocs?: number,
strategy?: string,
separator?: string
) => {
await setDefaultEmbeddingModelForRag(model)
await setDefaultEmbeddingChunkSize(chunkSize)
@ -346,6 +375,12 @@ export const saveForRag = async (
if (noOfRetrievedDocs) {
await setNoOfRetrievedDocs(noOfRetrievedDocs)
}
if (strategy) {
await setDefaultSplittingStrategy(strategy)
}
if (separator) {
await setDefaultSplittingSeparator(separator)
}
}
export const getWebSearchPrompt = async () => {

View File

@ -66,6 +66,8 @@ type CurrentChatModelSettings = {
reset: () => void
systemPrompt?: string
setSystemPrompt: (systemPrompt: string) => void
useMlock?: boolean
setUseMlock: (useMlock: boolean) => void
setMinP: (minP: number) => void
}
@ -108,6 +110,7 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
systemPrompt: undefined,
setMinP: (minP: number) => set({ minP }),
setSystemPrompt: (systemPrompt: string) => set({ systemPrompt }),
setUseMlock: (useMlock: boolean) => set({ useMlock }),
reset: () =>
set({
f16KV: undefined,
@ -141,6 +144,7 @@ export const useStoreChatModelSettings = create<CurrentChatModelSettings>(
seed: undefined,
systemPrompt: undefined,
minP: undefined,
useMlock: undefined,
})
})
)

View File

@ -1,12 +1,8 @@
import { PageAssistHtmlLoader } from "~/loader/html"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize
} from "@/services/ollama"
import { PageAssistPDFLoader } from "@/loader/pdf"
import { PAMemoryVectorStore } from "@/libs/PAMemoryVectorStore"
import { getPageAssistTextSplitter } from "./text-splitter"
export const getLoader = ({
html,
@ -54,12 +50,7 @@ export const memoryEmbedding = async ({
setIsEmbedding(true)
const loader = getLoader({ html, pdf, type, url })
const docs = await loader.load()
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs)

View File

@ -48,5 +48,10 @@ export const OAI_API_PROVIDERS = [
label: "Google AI",
value: "gemini",
baseUrl: "https://generativelanguage.googleapis.com/v1beta/openai"
},
{
label: "Mistral",
value: "mistral",
baseUrl: "https://api.mistral.ai/v1"
}
]

View File

@ -0,0 +1,37 @@
import {
RecursiveCharacterTextSplitter,
CharacterTextSplitter
} from "langchain/text_splitter"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultSsplttingSeparator,
defaultSplittingStrategy
} from "@/services/ollama"
export const getPageAssistTextSplitter = async () => {
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const splittingStrategy = await defaultSplittingStrategy()
switch (splittingStrategy) {
case "CharacterTextSplitter":
console.log("Using CharacterTextSplitter")
const splittingSeparator = await defaultSsplttingSeparator()
const processedSeparator = splittingSeparator
.replace(/\\n/g, "\n")
.replace(/\\t/g, "\t")
.replace(/\\r/g, "\r")
return new CharacterTextSplitter({
chunkSize,
chunkOverlap,
separator: processedSeparator
})
default:
return new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
}
}

View File

@ -2,15 +2,13 @@ import { cleanUrl } from "~/libs/clean-url"
import { getIsSimpleInternetSearch, totalSearchResults, getBraveApiKey } from "@/services/search"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import type { Document } from "@langchain/core/documents"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
import { PageAssistHtmlLoader } from "~/loader/html"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
getOllamaURL
} from "~/services/ollama"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
interface BraveAPIResult {
title: string
@ -70,12 +68,7 @@ export const braveAPISearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs)
const store = new MemoryVectorStore(ollamaEmbedding)

View File

@ -3,8 +3,6 @@ import { urlRewriteRuntime } from "@/libs/runtime"
import { PageAssistHtmlLoader } from "@/loader/html"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
getOllamaURL
} from "@/services/ollama"
@ -12,10 +10,10 @@ import {
getIsSimpleInternetSearch,
totalSearchResults
} from "@/services/search"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
import type { Document } from "@langchain/core/documents"
import * as cheerio from "cheerio"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
export const localBraveSearch = async (query: string) => {
@ -87,12 +85,8 @@ export const webBraveSearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter();
const chunks = await textSplitter.splitDocuments(docs)

View File

@ -3,8 +3,6 @@ import { urlRewriteRuntime } from "@/libs/runtime"
import { PageAssistHtmlLoader } from "@/loader/html"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
getOllamaURL
} from "@/services/ollama"
@ -12,9 +10,9 @@ import {
getIsSimpleInternetSearch,
totalSearchResults
} from "@/services/search"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
import type { Document } from "@langchain/core/documents"
import * as cheerio from "cheerio"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
export const localDuckDuckGoSearch = async (query: string) => {
@ -90,12 +88,7 @@ export const webDuckDuckGoSearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs)

View File

@ -4,15 +4,13 @@ import {
getIsSimpleInternetSearch,
totalSearchResults
} from "@/services/search"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
import type { Document } from "@langchain/core/documents"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
import { cleanUrl } from "~/libs/clean-url"
import { urlRewriteRuntime } from "~/libs/runtime"
import { PageAssistHtmlLoader } from "~/loader/html"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
getOllamaURL
} from "~/services/ollama"
@ -91,13 +89,9 @@ export const webGoogleSearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs)
const store = new MemoryVectorStore(ollamaEmbedding)

View File

@ -3,15 +3,13 @@ import { cleanUrl } from "~/libs/clean-url"
import { getSearxngURL, isSearxngJSONMode, getIsSimpleInternetSearch, totalSearchResults } from "@/services/search"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import type { Document } from "@langchain/core/documents"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
import { PageAssistHtmlLoader } from "~/loader/html"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
getOllamaURL
} from "~/services/ollama"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
interface SearxNGJSONResult {
title: string
@ -73,13 +71,9 @@ export const searxngSearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter();
const chunks = await textSplitter.splitDocuments(docs)
const store = new MemoryVectorStore(ollamaEmbedding)
await store.addDocuments(chunks)

View File

@ -3,8 +3,6 @@ import { urlRewriteRuntime } from "@/libs/runtime"
import { PageAssistHtmlLoader } from "@/loader/html"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize,
defaultEmbeddingModelForRag,
getOllamaURL
} from "@/services/ollama"
@ -12,9 +10,9 @@ import {
getIsSimpleInternetSearch,
totalSearchResults
} from "@/services/search"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
import type { Document } from "@langchain/core/documents"
import * as cheerio from "cheerio"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
const getCorrectTargeUrl = async (url: string) => {
if (!url) return ""
@ -104,12 +102,7 @@ export const webSogouSearch = async (query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs)

View File

@ -1,8 +1,9 @@
import { cleanUrl } from "@/libs/clean-url"
import { PageAssistHtmlLoader } from "@/loader/html"
import { pageAssistEmbeddingModel } from "@/models/embedding"
import { defaultEmbeddingChunkOverlap, defaultEmbeddingChunkSize, defaultEmbeddingModelForRag, getOllamaURL } from "@/services/ollama"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { defaultEmbeddingModelForRag, getOllamaURL } from "@/services/ollama"
import { getPageAssistTextSplitter } from "@/utils/text-splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
export const processSingleWebsite = async (url: string, query: string) => {
@ -20,12 +21,8 @@ export const processSingleWebsite = async (url: string, query: string) => {
baseUrl: cleanUrl(ollamaUrl)
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const textSplitter = await getPageAssistTextSplitter()
const chunks = await textSplitter.splitDocuments(docs)

View File

@ -51,7 +51,7 @@ export default defineConfig({
outDir: "build",
manifest: {
version: "1.4.0",
version: "1.4.1",
name:
process.env.TARGET === "firefox"
? "Page Assist - A Web UI for Local AI Models"