commit
bd5b3f91db
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Genoptag den sidste chat, når du åbner SidePanel (copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Genoptag den sidste chat, når du åbner Web UI'en"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Skjul nuværende chat model indstillinger"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Besøg websitet nævnt i samtalen"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Letzten Chat beim Öffnen des Seitenpanels fortsetzen (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Letzten Chat beim Öffnen der Web-UI fortsetzen"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Aktuelle Chat-Modell-Einstellungen ausblenden"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Die in der Nachricht erwähnte Website besuchen"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG-URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -87,6 +87,9 @@
|
||||
"label": "Top P",
|
||||
"placeholder": "Enter Top P value (e.g. 0.9, 0.95)"
|
||||
},
|
||||
"useMMap": {
|
||||
"label": "useMmap"
|
||||
},
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Enter number of layers to send to GPU(s)"
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Resume the last chat when opening the SidePanel (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Resume the last chat when opening the Web UI"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Hide the current Chat Model Settings"
|
||||
},
|
||||
@ -33,8 +36,8 @@
|
||||
"generateTitle": {
|
||||
"label": "Generate Title using AI"
|
||||
},
|
||||
"ollamaStatus" :{
|
||||
"label":"Enable or disable Ollama connection status check"
|
||||
"ollamaStatus": {
|
||||
"label": "Enable or disable Ollama connection status check"
|
||||
}
|
||||
},
|
||||
"sidepanelRag": {
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Visit the website mentioned in the message"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Retomar el último chat al abrir el Panel Lateral (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Retomar el último chat al abrir la Interfaz Web"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Ocultar Configuraciones del Modelo de Chat Actual"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Visita el sitio web mencionado en el mensaje"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "URL de SearXNG"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -62,6 +62,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "مراجعه به وب سایت ذکر شده در پیام"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "آدرس SearXNG"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Reprendre la dernière conversation lors de l'ouverture du sidepanel (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Reprendre la dernière conversation lors de l'ouverture de l'interface Web"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Masquer les paramètres actuels du modèle de chat"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Visitez le site web mentionné dans le message"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "URL SearXNG"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Riprendi l'ultima chat quando apri il Pannello Laterale (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Riprendi l'ultima chat quando apri l'interfaccia Web"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Nascondi le impostazioni correnti del modello Chat"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Visita il sito web menzionato nel messaggio"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "URL SearXNG"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -24,6 +24,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "サイドパネルを開いたときに最後のチャットを再開 (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Web UIを開いたときに最後のチャットを再開"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "現在のチャットモデル設定を非表示"
|
||||
},
|
||||
@ -65,6 +68,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "メッセージに記載されたウェブサイトを訪問してください"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -24,6 +24,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "사이드 패널을 열 때 마지막 채팅 재개 (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "웹 UI를 열 때 마지막 채팅 재개"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "현재 채팅 모델 설정 숨기기"
|
||||
},
|
||||
@ -65,6 +68,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "메시지에 언급된 웹사이트 방문"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -24,6 +24,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "സൈഡ്പാനൽ തുറക്കുമ്പോൾ അവസാനത്തെ ചാറ്റ് പുനരാരംഭിക്കുക (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "വെബ് UI തുറക്കുമ്പോൾ അവസാനത്തെ ചാറ്റ് പുനരാരംഭിക്കുക"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "നിലവിലുള്ള ചാറ്റ് മോഡൽ ക്രമീകരണങ്ങൾ മറയ്ക്കുക"
|
||||
},
|
||||
@ -65,6 +68,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "സന്ദേശത്തിൽ പറയുന്ന വെബ്സൈറ്റ് സന്ദർശിക്കുക."
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Gjenoppta siste chat ved åpning av SidePanel (copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Gjenoppta siste chat når Web UI åpnes"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Skjul gjeldende chat modell innstillinger"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Besøk nettstedet nevnt i samtalen"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
@ -241,25 +249,104 @@
|
||||
},
|
||||
"ollamaSettings": {
|
||||
"title": "Ollama Innstillinger",
|
||||
"heading": "Konfigurer Ollama Plugin",
|
||||
"enabled": {
|
||||
"label": "Aktiver Ollama Plugin"
|
||||
},
|
||||
"provider": {
|
||||
"label": "Ollama Tilbyder",
|
||||
"placeholder": "Velg en Ollama tilbyder"
|
||||
},
|
||||
"apiKey": {
|
||||
"label": "Ollama API Nøkkel",
|
||||
"placeholder": "Skriv inn Ollama API Nøkkel",
|
||||
"required": "API Nøkkel er påkrevd"
|
||||
},
|
||||
"saveBtn": {
|
||||
"save": "Lagre"
|
||||
},
|
||||
"notification": {
|
||||
"saveSuccess": "Ollama Plugin lagret vellykket",
|
||||
"someError": "Noe gikk galt. Vennligst prøv igjen senere"
|
||||
"heading": "Konfigurer Ollama",
|
||||
"settings": {
|
||||
"ollamaUrl": {
|
||||
"label": "Ollama URL",
|
||||
"placeholder": "Skriv inn Ollama URL"
|
||||
},
|
||||
"advanced": {
|
||||
"label": "Avansert Ollama URL-konfigurasjon",
|
||||
"urlRewriteEnabled": {
|
||||
"label": "Aktiver eller deaktiver tilpasset opprinnelses-URL"
|
||||
},
|
||||
"rewriteUrl": {
|
||||
"label": "Tilpasset opprinnelses-URL",
|
||||
"placeholder": "Skriv inn tilpasset opprinnelses-URL"
|
||||
},
|
||||
"headers": {
|
||||
"label": "Tilpass Headers",
|
||||
"LeggTil": "Legg til Header",
|
||||
"key": {
|
||||
"label": "Header Nøkkel",
|
||||
"placeholder": "Autorisasjon"
|
||||
},
|
||||
"value": {
|
||||
"label": "Header Verdi",
|
||||
"placeholder": "Bearer token"
|
||||
}
|
||||
},
|
||||
"help": "Hvis du har forbindelsesproblemer med Ollama på Page Assist, kan du konfigurere en brukerdefinert opprinnelses-URL. For mer informasjon om konfigurasjonen, <anchor>klikk her</anchor>."
|
||||
}
|
||||
}
|
||||
},
|
||||
"manageSearch": {
|
||||
"title": "Administrer Web Søk",
|
||||
"heading": "Konfigurer Web Søk"
|
||||
},
|
||||
"about": {
|
||||
"title": "Om",
|
||||
"heading": "Om",
|
||||
"chromeVersion": "Page Assist Versjon",
|
||||
"ollamaVersion": "Ollama Versjon",
|
||||
"support": "Du kan støtte Page Assist-prosjektet ved å donere eller sponse via følgende plattformer:",
|
||||
"koFi": "Støtt på Ko-fi",
|
||||
"githubSponsor": "Spons på GitHub",
|
||||
"githubRepo": "GitHub Repository"
|
||||
},
|
||||
"manageKnowledge": {
|
||||
"title": "Administrer Kunnskap",
|
||||
"heading": "Konfigurer Kunnskapsbase"
|
||||
},
|
||||
"rag": {
|
||||
"title": "RAG Innstillinger",
|
||||
"ragSettings": {
|
||||
"label": "RAG Innstillinger",
|
||||
"model": {
|
||||
"label": "Embedding Modell",
|
||||
"required": "Vennligst velg en modell",
|
||||
"help": "Det anbefales sterkt å bruke embeddingsmodeller som `nomic-embed-text`.",
|
||||
"placeholder": "Velg en modell"
|
||||
},
|
||||
"chunkSize": {
|
||||
"label": "Delstørrelse",
|
||||
"placeholder": "Skriv inn delstørrelse",
|
||||
"required": "Vennligst skriv inn en delstørrelse"
|
||||
},
|
||||
"chunkOverlap": {
|
||||
"label": "Deloverlapp",
|
||||
"placeholder": "Skriv inn deloverlapp",
|
||||
"required": "Vennligst skriv inn deloverlapp"
|
||||
},
|
||||
"totalFilePerKB": {
|
||||
"label": "Kunnskapsbase standard filopplastingsgrense",
|
||||
"placeholder": "Skriv inn standard filopplastingsgrense (f.eks. 10)",
|
||||
"required": "Vennligst skriv inn standard filopplastingsgrense"
|
||||
},
|
||||
"noOfRetrievedDocs": {
|
||||
"label": "Antall hentede dokumenter",
|
||||
"placeholder": "Skriv inn antall hentede dokumenter",
|
||||
"required": "Vennligst skriv inn antall hentede dokumenter"
|
||||
}
|
||||
},
|
||||
"prompt": {
|
||||
"label": "Konfigurer RAG Prompt",
|
||||
"option1": "Normal",
|
||||
"option2": "Web",
|
||||
"alert": "Konfigurering av systemprompt her er foreldet. Vennligst bruk Administrer Prompts-seksjonen for å legge til eller endre prompts. Denne seksjonen vil bli fjernet i fremtidige versjoner.",
|
||||
"systemPrompt": "Systemprompt",
|
||||
"systemPromptPlaceholder": "Skriv inn systemprompt",
|
||||
"webSearchPrompt": "Websøke-prompt",
|
||||
"webSearchPromptHelp": "Ikke fjern `{search_results}` fra prompten.",
|
||||
"webSearchPromptError": "Vennligst skriv inn en websøke-prompt",
|
||||
"webSearchPromptPlaceholder": "Skriv inn websøke-prompt",
|
||||
"webSearchFollowUpPrompt": "Oppfølgingsprompt for websøking",
|
||||
"webSearchFollowUpPromptHelp": "Ikke fjern `{chat_history}` og `{question}` fra prompten.",
|
||||
"webSearchFollowUpPromptError": "Vennligst skriv inn din oppfølgingsprompt for websøking!",
|
||||
"webSearchFollowUpPromptPlaceholder": "Din oppfølgingsprompt for websøking"
|
||||
}
|
||||
},
|
||||
"chromeAiSettings": {
|
||||
"title": "Chrome AI Innstillinger"
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Retomar o último chat ao abrir o Painel Lateral (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Retomar o último chat ao abrir a Interface Web"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Ocultar as Configurações Atuais do Modelo de Chat"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Visitar o site mencionado na mensagem"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "URL do SearXNG"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Возобновить последний чат при открытии боковой панели (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Возобновить последний чат при открытии веб-интерфейса"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Скрыть текущие настройки модели чата"
|
||||
},
|
||||
@ -63,6 +66,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Посетите веб-сайт, указанный в сообщении."
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "URL-адрес SearXNG"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Återuppta den senaste chatten när du öppnar sidopanelen (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Återuppta den senaste chatten när du öppnar webbgränssnittet"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Göm de nuvarande chattmodellinställningarna"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Besök webbplatsen som nämns i meddelandet"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -21,6 +21,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "Поновити останню розмову при відкритті бічної панелі (Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "Поновити останню розмову при відкритті веб-інтерфейсу"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "Приховати налаштування поточної моделі чату"
|
||||
},
|
||||
@ -62,6 +65,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "Відвідати веб-сайт, згаданий у повідомленні"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG URL-адреса"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -24,6 +24,9 @@
|
||||
"copilotResumeLastChat": {
|
||||
"label": "打开侧边栏时恢复上次聊天(Copilot)"
|
||||
},
|
||||
"webUIResumeLastChat": {
|
||||
"label": "打开Web UI时恢复上次聊天"
|
||||
},
|
||||
"hideCurrentChatModelSettings": {
|
||||
"label": "隐藏当前聊天模型设置"
|
||||
},
|
||||
@ -65,6 +68,11 @@
|
||||
},
|
||||
"visitSpecificWebsite": {
|
||||
"label": "访问消息中提到的网站。"
|
||||
},
|
||||
"searxng": {
|
||||
"url": {
|
||||
"label": "SearXNG 网址"
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
|
@ -5,15 +5,19 @@ import { useStoreChatModelSettings } from "@/store/model"
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import {
|
||||
Collapse,
|
||||
Divider,
|
||||
Drawer,
|
||||
Form,
|
||||
Input,
|
||||
InputNumber,
|
||||
Modal,
|
||||
Skeleton
|
||||
Skeleton,
|
||||
Switch,
|
||||
Button
|
||||
} from "antd"
|
||||
import React from "react"
|
||||
import React, { useState, useCallback } from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import { SaveButton } from "../SaveButton"
|
||||
|
||||
type Props = {
|
||||
open: boolean
|
||||
@ -30,12 +34,31 @@ export const CurrentChatModelSettings = ({
|
||||
const [form] = Form.useForm()
|
||||
const cUserSettings = useStoreChatModelSettings()
|
||||
const { selectedSystemPrompt } = useMessageOption()
|
||||
|
||||
const savePrompt = useCallback(
|
||||
(value: string) => {
|
||||
cUserSettings.setX("systemPrompt", value)
|
||||
},
|
||||
[cUserSettings]
|
||||
)
|
||||
|
||||
const saveSettings = useCallback(
|
||||
(values: any) => {
|
||||
Object.entries(values).forEach(([key, value]) => {
|
||||
if (key !== "systemPrompt") {
|
||||
cUserSettings.setX(key, value)
|
||||
}
|
||||
})
|
||||
},
|
||||
[cUserSettings]
|
||||
)
|
||||
|
||||
const { isPending: isLoading } = useQuery({
|
||||
queryKey: ["fetchModelConfig2", open],
|
||||
queryFn: async () => {
|
||||
const data = await getAllModelSettings()
|
||||
|
||||
let tempSystemPrompt = "";
|
||||
let tempSystemPrompt = ""
|
||||
|
||||
// i hate this method but i need this feature so badly that i need to do this
|
||||
if (selectedSystemPrompt) {
|
||||
@ -52,7 +75,8 @@ export const CurrentChatModelSettings = ({
|
||||
seed: cUserSettings.seed,
|
||||
numGpu: cUserSettings.numGpu ?? data.numGpu,
|
||||
numPredict: cUserSettings.numPredict ?? data.numPredict,
|
||||
systemPrompt: cUserSettings.systemPrompt ?? tempSystemPrompt
|
||||
systemPrompt: cUserSettings.systemPrompt ?? tempSystemPrompt,
|
||||
useMMap: cUserSettings.useMMap ?? data.useMMap
|
||||
})
|
||||
return data
|
||||
},
|
||||
@ -61,25 +85,17 @@ export const CurrentChatModelSettings = ({
|
||||
refetchOnWindowFocus: false
|
||||
})
|
||||
|
||||
|
||||
const renderBody = () => {
|
||||
return (
|
||||
<>
|
||||
{!isLoading ? (
|
||||
<Form
|
||||
onFinish={(values: {
|
||||
keepAlive: string
|
||||
temperature: number
|
||||
topK: number
|
||||
topP: number
|
||||
}) => {
|
||||
Object.entries(values).forEach(([key, value]) => {
|
||||
cUserSettings.setX(key, value)
|
||||
setOpen(false)
|
||||
})
|
||||
}}
|
||||
form={form}
|
||||
layout="vertical">
|
||||
layout="vertical"
|
||||
onFinish={(values) => {
|
||||
saveSettings(values)
|
||||
setOpen(false)
|
||||
}}>
|
||||
{useDrawer && (
|
||||
<>
|
||||
<Form.Item
|
||||
@ -91,8 +107,10 @@ export const CurrentChatModelSettings = ({
|
||||
placeholder={t(
|
||||
"modelSettings.form.systemPrompt.placeholder"
|
||||
)}
|
||||
onChange={(e) => savePrompt(e.target.value)}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Divider />
|
||||
</>
|
||||
)}
|
||||
<Form.Item
|
||||
@ -112,6 +130,7 @@ export const CurrentChatModelSettings = ({
|
||||
placeholder={t("modelSettings.form.temperature.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="seed"
|
||||
help={t("modelSettings.form.seed.help")}
|
||||
@ -121,6 +140,7 @@ export const CurrentChatModelSettings = ({
|
||||
placeholder={t("modelSettings.form.seed.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="numCtx"
|
||||
label={t("modelSettings.form.numCtx.label")}>
|
||||
@ -139,6 +159,8 @@ export const CurrentChatModelSettings = ({
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Divider />
|
||||
|
||||
<Collapse
|
||||
ghost
|
||||
className="border-none bg-transparent"
|
||||
@ -176,17 +198,18 @@ export const CurrentChatModelSettings = ({
|
||||
)}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="useMMap"
|
||||
label={t("modelSettings.form.useMMap.label")}>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
]}
|
||||
/>
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
className="inline-flex justify-center w-full text-center mt-3 items-center rounded-md border border-transparent bg-black px-2 py-2 text-sm font-medium leading-4 text-white shadow-sm hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50 ">
|
||||
{t("save")}
|
||||
</button>
|
||||
<SaveButton className="w-full text-center inline-flex items-center justify-center" btnType="submit" />
|
||||
</Form>
|
||||
) : (
|
||||
<Skeleton active />
|
||||
|
@ -2,11 +2,18 @@ import React from "react"
|
||||
import { PlaygroundForm } from "./PlaygroundForm"
|
||||
import { PlaygroundChat } from "./PlaygroundChat"
|
||||
import { useMessageOption } from "@/hooks/useMessageOption"
|
||||
import { webUIResumeLastChat } from "@/services/app"
|
||||
import {
|
||||
formatToChatHistory,
|
||||
formatToMessage,
|
||||
getRecentChatFromWebUI
|
||||
} from "@/db"
|
||||
|
||||
export const Playground = () => {
|
||||
const drop = React.useRef<HTMLDivElement>(null)
|
||||
const [dropedFile, setDropedFile] = React.useState<File | undefined>()
|
||||
const { selectedKnowledge } = useMessageOption()
|
||||
const { selectedKnowledge, messages, setHistoryId, setHistory, setMessages } =
|
||||
useMessageOption()
|
||||
|
||||
const [dropState, setDropState] = React.useState<
|
||||
"idle" | "dragging" | "error"
|
||||
@ -71,6 +78,26 @@ export const Playground = () => {
|
||||
}
|
||||
}
|
||||
}, [selectedKnowledge])
|
||||
|
||||
const setRecentMessagesOnLoad = async () => {
|
||||
const isEnabled = await webUIResumeLastChat()
|
||||
if (!isEnabled) {
|
||||
return
|
||||
}
|
||||
if (messages.length === 0) {
|
||||
const recentChat = await getRecentChatFromWebUI()
|
||||
if (recentChat) {
|
||||
setHistoryId(recentChat.history.id)
|
||||
setHistory(formatToChatHistory(recentChat.messages))
|
||||
setMessages(formatToMessage(recentChat.messages))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
React.useEffect(() => {
|
||||
setRecentMessagesOnLoad()
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={drop}
|
||||
@ -78,7 +105,7 @@ export const Playground = () => {
|
||||
dropState === "dragging" ? "bg-gray-100 dark:bg-gray-800 z-10" : ""
|
||||
} bg-white dark:bg-[#171717]`}>
|
||||
<PlaygroundChat />
|
||||
|
||||
|
||||
<div className="flex flex-col items-center">
|
||||
<div className="flex-grow">
|
||||
<div className="w-full flex justify-center">
|
||||
|
@ -39,6 +39,13 @@ export const PlaygroundEmpty = () => {
|
||||
enabled: checkOllamaStatus
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (ollamaInfo?.ollamaURL) {
|
||||
setOllamaURL(ollamaInfo.ollamaURL)
|
||||
}
|
||||
}, [ollamaInfo])
|
||||
|
||||
|
||||
if (!checkOllamaStatus) {
|
||||
return (
|
||||
<div className="mx-auto sm:max-w-xl px-4 mt-10">
|
||||
|
@ -27,6 +27,11 @@ export const GeneralSettings = () => {
|
||||
false
|
||||
)
|
||||
|
||||
const [webUIResumeLastChat, setWebUIResumeLastChat] = useStorage(
|
||||
"webUIResumeLastChat",
|
||||
false
|
||||
)
|
||||
|
||||
const [restoreLastChatModel, setRestoreLastChatModel] = useStorage(
|
||||
"restoreLastChatModel",
|
||||
false
|
||||
@ -113,6 +118,17 @@ export const GeneralSettings = () => {
|
||||
onChange={(checked) => setCopilotResumeLastChat(checked)}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-row justify-between">
|
||||
<div className="inline-flex items-center gap-2">
|
||||
<span className="text-gray-700 dark:text-neutral-50">
|
||||
{t("generalSettings.settings.webUIResumeLastChat.label")}
|
||||
</span>
|
||||
</div>
|
||||
<Switch
|
||||
checked={webUIResumeLastChat}
|
||||
onChange={(checked) => setWebUIResumeLastChat(checked)}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-row justify-between">
|
||||
<div className="inline-flex items-center gap-2">
|
||||
<span className="text-gray-700 dark:text-neutral-50">
|
||||
|
@ -2,7 +2,7 @@ import { BetaTag } from "@/components/Common/Beta"
|
||||
import { SaveButton } from "@/components/Common/SaveButton"
|
||||
import { getAllModelSettings, setModelSetting } from "@/services/model-settings"
|
||||
import { useQuery, useQueryClient } from "@tanstack/react-query"
|
||||
import { Form, Skeleton, Input, InputNumber, Collapse } from "antd"
|
||||
import { Form, Skeleton, Input, InputNumber, Collapse, Switch } from "antd"
|
||||
import React from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
|
||||
@ -78,13 +78,13 @@ export const ModelSettings = () => {
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="numPredict"
|
||||
label={t("modelSettings.form.numPredict.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.numPredict.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
name="numPredict"
|
||||
label={t("modelSettings.form.numPredict.label")}>
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
placeholder={t("modelSettings.form.numPredict.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Collapse
|
||||
ghost
|
||||
className="border-none bg-transparent"
|
||||
@ -119,11 +119,14 @@ export const ModelSettings = () => {
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
size="large"
|
||||
placeholder={t(
|
||||
"modelSettings.form.numGpu.placeholder"
|
||||
)}
|
||||
placeholder={t("modelSettings.form.numGpu.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="useMMap"
|
||||
label={t("modelSettings.form.useMMap.label")}>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ import { getSearchSettings, setSearchSettings } from "@/services/search"
|
||||
import { SUPPORTED_SERACH_PROVIDERS } from "@/utils/search-provider"
|
||||
import { useForm } from "@mantine/form"
|
||||
import { useQuery, useQueryClient } from "@tanstack/react-query"
|
||||
import { Select, Skeleton, Switch, InputNumber } from "antd"
|
||||
import { Select, Skeleton, Switch, InputNumber, Input } from "antd"
|
||||
import { useTranslation } from "react-i18next"
|
||||
|
||||
export const SearchModeSettings = () => {
|
||||
@ -14,7 +14,9 @@ export const SearchModeSettings = () => {
|
||||
isSimpleInternetSearch: false,
|
||||
searchProvider: "",
|
||||
totalSearchResults: 0,
|
||||
visitSpecificWebsite: false
|
||||
visitSpecificWebsite: false,
|
||||
searxngURL: "",
|
||||
searxngJSONMode: false
|
||||
}
|
||||
})
|
||||
|
||||
@ -62,6 +64,23 @@ export const SearchModeSettings = () => {
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{form.values.searchProvider === "searxng" && (
|
||||
<>
|
||||
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
|
||||
<span className="text-gray-700 dark:text-neutral-50">
|
||||
{t("generalSettings.webSearch.searxng.url.label")}
|
||||
</span>
|
||||
<div>
|
||||
<Input
|
||||
placeholder="https://searxng.example.com"
|
||||
className="w-full mt-4 sm:mt-0 sm:w-[200px]"
|
||||
required
|
||||
{...form.getInputProps("searxngURL")}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
|
||||
<span className="text-gray-700 dark:text-neutral-50 ">
|
||||
{t("generalSettings.webSearch.searchMode.label")}
|
||||
|
@ -356,11 +356,11 @@ export const updateMessageByIndex = async (
|
||||
message: string
|
||||
) => {
|
||||
try {
|
||||
const db = new PageAssitDatabase()
|
||||
const chatHistory = (await db.getChatHistory(history_id)).reverse()
|
||||
chatHistory[index].content = message
|
||||
await db.db.set({ [history_id]: chatHistory.reverse() })
|
||||
} catch(e) {
|
||||
const db = new PageAssitDatabase()
|
||||
const chatHistory = (await db.getChatHistory(history_id)).reverse()
|
||||
chatHistory[index].content = message
|
||||
await db.db.set({ [history_id]: chatHistory.reverse() })
|
||||
} catch (e) {
|
||||
// temp chat will break
|
||||
}
|
||||
}
|
||||
@ -515,6 +515,20 @@ export const getRecentChatFromCopilot = async () => {
|
||||
return { history, messages }
|
||||
}
|
||||
|
||||
export const getRecentChatFromWebUI = async () => {
|
||||
const db = new PageAssitDatabase()
|
||||
const chatHistories = await db.getChatHistories()
|
||||
if (chatHistories.length === 0) return null
|
||||
const history = chatHistories.find(
|
||||
(history) => history.message_source === "web-ui"
|
||||
)
|
||||
if (!history) return null
|
||||
|
||||
const messages = await db.getChatHistory(history.id)
|
||||
|
||||
return { history, messages }
|
||||
}
|
||||
|
||||
export const getTitleById = async (id: string) => {
|
||||
const db = new PageAssitDatabase()
|
||||
const title = await db.getChatHistoryTitleById(id)
|
||||
|
@ -139,7 +139,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -271,7 +273,10 @@ export const useMessage = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
@ -482,7 +487,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -526,7 +533,7 @@ export const useMessage = () => {
|
||||
const prompt = await systemPromptForNonRag()
|
||||
const selectedPrompt = await getPromptById(selectedSystemPrompt)
|
||||
|
||||
const applicationChatHistory = generateHistory(history, selectedModel)
|
||||
const applicationChatHistory = []
|
||||
|
||||
const data = await getScreenshotFromCurrentTab()
|
||||
console.log(
|
||||
@ -716,7 +723,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -944,7 +953,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -1023,7 +1034,10 @@ export const useMessage = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
@ -1211,7 +1225,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
|
@ -130,7 +130,9 @@ export const useMessageOption = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -209,7 +211,10 @@ export const useMessageOption = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
@ -429,7 +434,9 @@ export const useMessageOption = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -667,7 +674,9 @@ export const useMessageOption = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -762,7 +771,10 @@ export const useMessageOption = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
|
@ -16,6 +16,7 @@ export const pageAssistModel = async ({
|
||||
seed,
|
||||
numGpu,
|
||||
numPredict,
|
||||
useMMap
|
||||
}: {
|
||||
model: string
|
||||
baseUrl: string
|
||||
@ -27,6 +28,7 @@ export const pageAssistModel = async ({
|
||||
seed?: number
|
||||
numGpu?: number
|
||||
numPredict?: number
|
||||
useMMap?: boolean
|
||||
}) => {
|
||||
|
||||
if (model === "chrome::gemini-nano::page-assist") {
|
||||
@ -73,7 +75,8 @@ export const pageAssistModel = async ({
|
||||
seed,
|
||||
model,
|
||||
numGpu,
|
||||
numPredict
|
||||
numPredict,
|
||||
useMMap,
|
||||
})
|
||||
|
||||
|
||||
|
@ -39,6 +39,10 @@ export const copilotResumeLastChat = async () => {
|
||||
return await storage.get<boolean>("copilotResumeLastChat")
|
||||
}
|
||||
|
||||
export const webUIResumeLastChat = async () => {
|
||||
return await storage.get<boolean>("webUIResumeLastChat")
|
||||
}
|
||||
|
||||
export const defaultSidebarOpen = async () => {
|
||||
const sidebarOpen = await storage.get("sidebarOpen")
|
||||
if (!sidebarOpen || sidebarOpen === "") {
|
||||
|
@ -62,20 +62,45 @@ export const setTotalSearchResults = async (totalSearchResults: number) => {
|
||||
await storage.set("totalSearchResults", totalSearchResults.toString())
|
||||
}
|
||||
|
||||
export const getSearxngURL = async () => {
|
||||
const searxngURL = await storage.get("searxngURL")
|
||||
return searxngURL || ""
|
||||
}
|
||||
|
||||
export const isSearxngJSONMode = async () => {
|
||||
const searxngJSONMode = await storage.get<boolean>("searxngJSONMode")
|
||||
return searxngJSONMode ?? false
|
||||
}
|
||||
|
||||
export const setSearxngJSONMode = async (searxngJSONMode: boolean) => {
|
||||
await storage.set("searxngJSONMode", searxngJSONMode)
|
||||
}
|
||||
|
||||
export const setSearxngURL = async (searxngURL: string) => {
|
||||
await storage.set("searxngURL", searxngURL)
|
||||
}
|
||||
|
||||
export const getSearchSettings = async () => {
|
||||
const [isSimpleInternetSearch, searchProvider, totalSearchResult, visitSpecificWebsite] =
|
||||
const [isSimpleInternetSearch, searchProvider, totalSearchResult, visitSpecificWebsite,
|
||||
searxngURL,
|
||||
searxngJSONMode
|
||||
] =
|
||||
await Promise.all([
|
||||
getIsSimpleInternetSearch(),
|
||||
getSearchProvider(),
|
||||
totalSearchResults(),
|
||||
getIsVisitSpecificWebsite()
|
||||
getIsVisitSpecificWebsite(),
|
||||
getSearxngURL(),
|
||||
isSearxngJSONMode()
|
||||
])
|
||||
|
||||
return {
|
||||
isSimpleInternetSearch,
|
||||
searchProvider,
|
||||
totalSearchResults: totalSearchResult,
|
||||
visitSpecificWebsite
|
||||
visitSpecificWebsite,
|
||||
searxngURL,
|
||||
searxngJSONMode
|
||||
}
|
||||
}
|
||||
|
||||
@ -83,17 +108,23 @@ export const setSearchSettings = async ({
|
||||
isSimpleInternetSearch,
|
||||
searchProvider,
|
||||
totalSearchResults,
|
||||
visitSpecificWebsite
|
||||
visitSpecificWebsite,
|
||||
searxngJSONMode,
|
||||
searxngURL
|
||||
}: {
|
||||
isSimpleInternetSearch: boolean
|
||||
searchProvider: string
|
||||
totalSearchResults: number
|
||||
visitSpecificWebsite: boolean
|
||||
searxngURL: string
|
||||
searxngJSONMode: boolean
|
||||
}) => {
|
||||
await Promise.all([
|
||||
setIsSimpleInternetSearch(isSimpleInternetSearch),
|
||||
setSearchProvider(searchProvider),
|
||||
setTotalSearchResults(totalSearchResults),
|
||||
setIsVisitSpecificWebsite(visitSpecificWebsite)
|
||||
setIsVisitSpecificWebsite(visitSpecificWebsite),
|
||||
setSearxngJSONMode(searxngJSONMode),
|
||||
setSearxngURL(searxngURL)
|
||||
])
|
||||
}
|
||||
|
@ -14,5 +14,9 @@ export const SUPPORTED_SERACH_PROVIDERS = [
|
||||
{
|
||||
label: "Brave",
|
||||
value: "brave"
|
||||
},
|
||||
{
|
||||
label: "Searxng",
|
||||
value: "searxng"
|
||||
}
|
||||
]
|
158
src/web/search-engines/searxng.ts
Normal file
158
src/web/search-engines/searxng.ts
Normal file
@ -0,0 +1,158 @@
|
||||
import { urlRewriteRuntime } from "~/libs/runtime"
|
||||
import { cleanUrl } from "~/libs/clean-url"
|
||||
import { getSearxngURL, isSearxngJSONMode, getIsSimpleInternetSearch, totalSearchResults } from "@/services/search"
|
||||
import { pageAssistEmbeddingModel } from "@/models/embedding"
|
||||
import type { Document } from "@langchain/core/documents"
|
||||
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
|
||||
import { MemoryVectorStore } from "langchain/vectorstores/memory"
|
||||
import { PageAssistHtmlLoader } from "~/loader/html"
|
||||
import {
|
||||
defaultEmbeddingChunkOverlap,
|
||||
defaultEmbeddingChunkSize,
|
||||
defaultEmbeddingModelForRag,
|
||||
getOllamaURL
|
||||
} from "~/services/ollama"
|
||||
|
||||
interface SearxNGJSONResult {
|
||||
title: string
|
||||
url: string
|
||||
content: string
|
||||
}
|
||||
|
||||
interface SearxNGJSONResponse {
|
||||
results: SearxNGJSONResult[]
|
||||
}
|
||||
|
||||
export const searxngSearch = async (query: string) => {
|
||||
const searxngURL = await getSearxngURL()
|
||||
if (!searxngURL) {
|
||||
throw new Error("SearXNG URL not configured")
|
||||
}
|
||||
|
||||
const isJSONMode = await isSearxngJSONMode()
|
||||
const results = isJSONMode
|
||||
? await searxngJSONSearch(searxngURL, query)
|
||||
: await searxngWebSearch(searxngURL, query)
|
||||
|
||||
const TOTAL_SEARCH_RESULTS = await totalSearchResults()
|
||||
const searchResults = results.slice(0, TOTAL_SEARCH_RESULTS)
|
||||
|
||||
const isSimpleMode = await getIsSimpleInternetSearch()
|
||||
|
||||
if (isSimpleMode) {
|
||||
await getOllamaURL()
|
||||
return searchResults.map((result) => {
|
||||
return {
|
||||
url: result.link,
|
||||
content: result.content
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const docs: Document<Record<string, any>>[] = []
|
||||
try {
|
||||
for (const result of searchResults) {
|
||||
const loader = new PageAssistHtmlLoader({
|
||||
html: "",
|
||||
url: result.link
|
||||
})
|
||||
|
||||
const documents = await loader.loadByURL()
|
||||
documents.forEach((doc) => {
|
||||
docs.push(doc)
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
|
||||
const ollamaUrl = await getOllamaURL()
|
||||
const embeddingModel = await defaultEmbeddingModelForRag()
|
||||
const ollamaEmbedding = await pageAssistEmbeddingModel({
|
||||
model: embeddingModel || "",
|
||||
baseUrl: cleanUrl(ollamaUrl)
|
||||
})
|
||||
|
||||
const chunkSize = await defaultEmbeddingChunkSize()
|
||||
const chunkOverlap = await defaultEmbeddingChunkOverlap()
|
||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
||||
chunkSize,
|
||||
chunkOverlap
|
||||
})
|
||||
|
||||
const chunks = await textSplitter.splitDocuments(docs)
|
||||
const store = new MemoryVectorStore(ollamaEmbedding)
|
||||
await store.addDocuments(chunks)
|
||||
|
||||
const resultsWithEmbeddings = await store.similaritySearch(query, 3)
|
||||
|
||||
const searchResult = resultsWithEmbeddings.map((result) => {
|
||||
return {
|
||||
url: result.metadata.url,
|
||||
content: result.pageContent
|
||||
}
|
||||
})
|
||||
|
||||
return searchResult
|
||||
}
|
||||
|
||||
const searxngJSONSearch = async (baseURL: string, query: string) => {
|
||||
const searchURL = `${cleanUrl(baseURL)}?q=${encodeURIComponent(query)}&format=json`
|
||||
|
||||
const abortController = new AbortController()
|
||||
setTimeout(() => abortController.abort(), 20000)
|
||||
|
||||
try {
|
||||
const response = await fetch(searchURL, {
|
||||
signal: abortController.signal,
|
||||
headers: {
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`SearXNG search failed: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json() as SearxNGJSONResponse
|
||||
|
||||
return data.results.map(result => ({
|
||||
title: result.title,
|
||||
link: result.url,
|
||||
content: result.content
|
||||
}))
|
||||
} catch (error) {
|
||||
console.error('SearXNG JSON search failed:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
const searxngWebSearch = async (baseURL: string, query: string) => {
|
||||
const searchURL = `${cleanUrl(baseURL)}?q=${encodeURIComponent(query)}`
|
||||
|
||||
await urlRewriteRuntime(cleanUrl(searchURL), "searxng")
|
||||
|
||||
const abortController = new AbortController()
|
||||
setTimeout(() => abortController.abort(), 10000)
|
||||
|
||||
try {
|
||||
const htmlString = await fetch(searchURL, {
|
||||
signal: abortController.signal
|
||||
}).then(response => response.text())
|
||||
|
||||
const parser = new DOMParser()
|
||||
const doc = parser.parseFromString(htmlString, "text/html")
|
||||
|
||||
const searchResults = Array.from(doc.querySelectorAll("article.result")).map(result => {
|
||||
const title = result.querySelector("h3")?.textContent?.trim()
|
||||
const link = result.querySelector("a.url_header")?.getAttribute("href")
|
||||
const content = result.querySelector("p.content")?.textContent?.trim()
|
||||
return { title, link, content }
|
||||
}).filter(result => result.title && result.link && result.content)
|
||||
|
||||
return searchResults
|
||||
} catch (error) {
|
||||
console.error('SearXNG web search failed:', error)
|
||||
return []
|
||||
}
|
||||
}
|
@ -5,6 +5,7 @@ import { getIsVisitSpecificWebsite, getSearchProvider } from "@/services/search"
|
||||
import { webSogouSearch } from "./search-engines/sogou"
|
||||
import { webBraveSearch } from "./search-engines/brave"
|
||||
import { getWebsiteFromQuery, processSingleWebsite } from "./website"
|
||||
import { searxngSearch } from "./search-engines/searxng"
|
||||
|
||||
const getHostName = (url: string) => {
|
||||
try {
|
||||
@ -23,6 +24,8 @@ const searchWeb = (provider: string, query: string) => {
|
||||
return webSogouSearch(query)
|
||||
case "brave":
|
||||
return webBraveSearch(query)
|
||||
case "searxng":
|
||||
return searxngSearch(query)
|
||||
default:
|
||||
return webGoogleSearch(query)
|
||||
}
|
||||
@ -51,7 +54,6 @@ export const getSystemPromptForWeb = async (query: string) => {
|
||||
}
|
||||
|
||||
|
||||
|
||||
const search_results = search
|
||||
.map(
|
||||
(result, idx) =>
|
||||
|
@ -50,7 +50,7 @@ export default defineConfig({
|
||||
outDir: "build",
|
||||
|
||||
manifest: {
|
||||
version: "1.3.5",
|
||||
version: "1.3.6",
|
||||
name:
|
||||
process.env.TARGET === "firefox"
|
||||
? "Page Assist - A Web UI for Local AI Models"
|
||||
|
Loading…
x
Reference in New Issue
Block a user