From e2e3655c4720261997c27d8a63266c9d32c6bc79 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 28 Sep 2024 16:08:02 +0530 Subject: [PATCH 01/33] feat: OpenAI settings page Add a new settings page for OpenAI configuration, including a dedicated tab in the settings layout, translations, and routing. --- src/assets/locale/en/openai.json | 38 +++ .../Layouts/SettingsOptionLayout.tsx | 20 +- src/components/Option/Settings/openai.tsx | 218 ++++++++++++++++++ src/db/openai.ts | 146 ++++++++++++ src/i18n/lang/en.ts | 4 +- src/routes/chrome.tsx | 2 + src/routes/firefox.tsx | 2 + src/routes/option-settings-openai.tsx | 15 ++ 8 files changed, 436 insertions(+), 9 deletions(-) create mode 100644 src/assets/locale/en/openai.json create mode 100644 src/components/Option/Settings/openai.tsx create mode 100644 src/db/openai.ts create mode 100644 src/routes/option-settings-openai.tsx diff --git a/src/assets/locale/en/openai.json b/src/assets/locale/en/openai.json new file mode 100644 index 0000000..e9babc3 --- /dev/null +++ b/src/assets/locale/en/openai.json @@ -0,0 +1,38 @@ +{ + "settings": "OpenAI API Settings", + "heading": "OpenAI API Settings", + "subheading": "Manage and configure your OpenAI API Compatible providers here.", + "addBtn": "Add Provider", + "table": { + "name": "Provider Name", + "baseUrl": "Base URL", + "actions": "Action" + }, + "modal": { + "titleAdd": "Add New Provider", + "name": { + "label": "Provider Name", + "required": "Provider name is required.", + "placeholder": "Enter provider name" + }, + "baseUrl": { + "label": "Base URL", + "help": "The base URL of the OpenAI API provider. eg (http://loocalhost:8080/v1)", + "required": "Base URL is required.", + "placeholder": "Enter base URL" + }, + "apiKey": { + "label": "API Key", + "required": "API Key is required.", + "placeholder": "Enter API Key" + }, + "submit": "Submit", + "update": "Update", + "deleteConfirm": "Are you sure you want to delete this provider?" + }, + "addSuccess": "Provider added successfully.", + "deleteSuccess": "Provider deleted successfully.", + "updateSuccess": "Provider updated successfully.", + "delete": "Delete", + "edit": "Edit" +} \ No newline at end of file diff --git a/src/components/Layouts/SettingsOptionLayout.tsx b/src/components/Layouts/SettingsOptionLayout.tsx index ce96a52..6365381 100644 --- a/src/components/Layouts/SettingsOptionLayout.tsx +++ b/src/components/Layouts/SettingsOptionLayout.tsx @@ -6,12 +6,12 @@ import { BlocksIcon, InfoIcon, CombineIcon, - ChromeIcon + ChromeIcon, + CloudCogIcon } from "lucide-react" import { useTranslation } from "react-i18next" import { Link, useLocation } from "react-router-dom" import { OllamaIcon } from "../Icons/Ollama" -import { Tag } from "antd" import { BetaTag } from "../Common/Beta" function classNames(...classes: string[]) { @@ -22,12 +22,11 @@ const LinkComponent = (item: { href: string name: string | JSX.Element icon: any - current: string, + current: string beta?: boolean }) => { return (
  • - {item.name} - { - item.beta && - } + {item.beta && }
  • ) } export const SettingsLayout = ({ children }: { children: React.ReactNode }) => { const location = useLocation() - const { t } = useTranslation(["settings", "common"]) + const { t } = useTranslation(["settings", "common", "openai"]) return ( <> @@ -93,6 +90,13 @@ export const SettingsLayout = ({ children }: { children: React.ReactNode }) => { beta /> )} + { + const { t } = useTranslation("openai") + const [open, setOpen] = useState(false) + const [editingConfig, setEditingConfig] = useState(null) + const queryClient = useQueryClient() + const [form] = Form.useForm() + + const { data: configs, isLoading } = useQuery({ + queryKey: ["openAIConfigs"], + queryFn: getAllOpenAIConfig + }) + + const addMutation = useMutation({ + mutationFn: addOpenAICofig, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["openAIConfigs"] + }) + setOpen(false) + message.success(t("addSuccess")) + } + }) + + const updateMutation = useMutation({ + mutationFn: updateOpenAIConfig, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["openAIConfigs"] + }) + setOpen(false) + message.success(t("updateSuccess")) + } + }) + + const deleteMutation = useMutation({ + mutationFn: deleteOpenAIConfig, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["openAIConfigs"] + }) + message.success(t("deleteSuccess")) + } + }) + + const handleSubmit = (values: { + id?: string + name: string + baseUrl: string + apiKey: string + }) => { + if (editingConfig) { + updateMutation.mutate({ id: editingConfig.id, ...values }) + } else { + addMutation.mutate(values) + } + } + + const handleEdit = (record: any) => { + setEditingConfig(record) + setOpen(true) + form.setFieldsValue(record) + } + + const handleDelete = (id: string) => { + deleteMutation.mutate(id) + } + + return ( +
    +
    +
    +

    + {t("heading")} +

    +

    + {t("subheading")} +

    +
    +
    +
    +
    +
    + +
    +
    +
    + + ( +
    + + + + + + +
    + ) + } + ]} + dataSource={configs} + loading={isLoading} + rowKey="id" + /> + + { + setOpen(false) + setEditingConfig(null) + form.resetFields() + }} + footer={null}> +
    + + + + + + + + + + + + + + +
    + + + ) +} diff --git a/src/db/openai.ts b/src/db/openai.ts new file mode 100644 index 0000000..501ecfd --- /dev/null +++ b/src/db/openai.ts @@ -0,0 +1,146 @@ +type OpenAIModelConfig = { + id: string + name: string + baseUrl: string + apiKey?: string + createdAt: number +} +export const generateID = () => { + return "openai-xxxx-xxx-xxxx".replace(/[x]/g, () => { + const r = Math.floor(Math.random() * 16) + return r.toString(16) + }) +} + +export class OpenAIModelDb { + db: chrome.storage.StorageArea + + + constructor() { + this.db = chrome.storage.local + } + + + getAll = async (): Promise => { + return new Promise((resolve, reject) => { + this.db.get(null, (result) => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + const data = Object.keys(result).map((key) => result[key]) + resolve(data) + } + }) + }) + } + + + create = async (config: OpenAIModelConfig): Promise => { + return new Promise((resolve, reject) => { + this.db.set({ [config.id]: config }, () => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } + + + getById = async (id: string): Promise => { + return new Promise((resolve, reject) => { + this.db.get(id, (result) => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve(result[id]) + } + }) + }) + } + + + update = async (config: OpenAIModelConfig): Promise => { + return new Promise((resolve, reject) => { + this.db.set({ [config.id]: config }, () => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } + + + delete = async (id: string): Promise => { + return new Promise((resolve, reject) => { + this.db.remove(id, () => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } + +} + + +export const addOpenAICofig = async ({ name, baseUrl, apiKey }: { name: string, baseUrl: string, apiKey: string }) => { + const openaiDb = new OpenAIModelDb() + const id = generateID() + const config: OpenAIModelConfig = { + id, + name, + baseUrl, + apiKey, + createdAt: Date.now() + } + await openaiDb.create(config) + return id +} + + +export const getAllOpenAIConfig = async () => { + const openaiDb = new OpenAIModelDb() + const configs = await openaiDb.getAll() + return configs +} + +export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => { + const openaiDb = new OpenAIModelDb() + const config: OpenAIModelConfig = { + id, + name, + baseUrl, + apiKey, + createdAt: Date.now() + } + + await openaiDb.update(config) + + return config +} + + +export const deleteOpenAIConfig = async (id: string) => { + const openaiDb = new OpenAIModelDb() + await openaiDb.delete(id) +} + + +export const updateOpenAIConfigApiKey = async (id: string, { name, baseUrl, apiKey }: { name: string, baseUrl: string, apiKey: string }) => { + const openaiDb = new OpenAIModelDb() + const config: OpenAIModelConfig = { + id, + name, + baseUrl, + apiKey, + createdAt: Date.now() + } + + await openaiDb.update(config) +} \ No newline at end of file diff --git a/src/i18n/lang/en.ts b/src/i18n/lang/en.ts index dcb0dc6..0eb1b8a 100644 --- a/src/i18n/lang/en.ts +++ b/src/i18n/lang/en.ts @@ -5,6 +5,7 @@ import sidepanel from "@/assets/locale/en/sidepanel.json"; import settings from "@/assets/locale/en/settings.json"; import knowledge from "@/assets/locale/en/knowledge.json"; import chrome from "@/assets/locale/en/chrome.json"; +import openai from "@/assets/locale/en/openai.json"; export const en = { option, @@ -13,5 +14,6 @@ export const en = { sidepanel, settings, knowledge, - chrome + chrome, + openai } \ No newline at end of file diff --git a/src/routes/chrome.tsx b/src/routes/chrome.tsx index 4e78ba0..647bb33 100644 --- a/src/routes/chrome.tsx +++ b/src/routes/chrome.tsx @@ -11,6 +11,7 @@ import SidepanelChat from "./sidepanel-chat" import SidepanelSettings from "./sidepanel-settings" import OptionRagSettings from "./option-rag" import OptionChrome from "./option-settings-chrome" +import OptionOpenAI from "./option-settings-openai" export const OptionRoutingChrome = () => { return ( @@ -21,6 +22,7 @@ export const OptionRoutingChrome = () => { } /> } /> } /> + } /> } /> } /> } /> diff --git a/src/routes/firefox.tsx b/src/routes/firefox.tsx index 40264f4..901e584 100644 --- a/src/routes/firefox.tsx +++ b/src/routes/firefox.tsx @@ -14,6 +14,7 @@ const OptionShare = lazy(() => import("./option-settings-share")) const OptionKnowledgeBase = lazy(() => import("./option-settings-knowledge")) const OptionAbout = lazy(() => import("./option-settings-about")) const OptionRagSettings = lazy(() => import("./option-rag")) +const OptionOpenAI = lazy(() => import("./option-settings-openai")) export const OptionRoutingFirefox = () => { return ( @@ -23,6 +24,7 @@ export const OptionRoutingFirefox = () => { } /> } /> } /> + } /> } /> } /> } /> diff --git a/src/routes/option-settings-openai.tsx b/src/routes/option-settings-openai.tsx new file mode 100644 index 0000000..3ddbc4f --- /dev/null +++ b/src/routes/option-settings-openai.tsx @@ -0,0 +1,15 @@ +import { SettingsLayout } from "~/components/Layouts/SettingsOptionLayout" +import OptionLayout from "~/components/Layouts/Layout" +import { OpenAIApp } from "@/components/Option/Settings/openai" + +const OptionOpenAI = () => { + return ( + + + + + + ) +} + +export default OptionOpenAI From 2a2610afb8643bfefc1751ec6fa931cd381a7e27 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 29 Sep 2024 19:12:19 +0530 Subject: [PATCH 02/33] feat: add model management UI This commit introduces a new UI for managing models within the OpenAI integration. This UI allows users to view, add, and delete OpenAI models associated with their OpenAI providers. It includes functionality to fetch and refresh model lists, as well as to search for specific models. These changes enhance the user experience by offering greater control over their OpenAI model interactions. This commit also includes improvements to the existing OpenAI configuration UI, enabling users to seamlessly manage multiple OpenAI providers and associated models. --- src/assets/locale/en/common.json | 6 +- src/assets/locale/en/openai.json | 30 ++- .../Option/Models/CustomModelsTable.tsx | 85 ++++++++ .../Option/Models/OllamaModelsTable.tsx | 199 ++++++++++++++++++ src/components/Option/Models/index.tsx | 176 +++------------- .../Option/Settings/openai-fetch-model.tsx | 132 ++++++++++++ src/components/Option/Settings/openai.tsx | 34 ++- src/db/models.ts | 176 ++++++++++++++++ src/db/openai.ts | 27 ++- src/libs/openai.ts | 25 +++ 10 files changed, 729 insertions(+), 161 deletions(-) create mode 100644 src/components/Option/Models/CustomModelsTable.tsx create mode 100644 src/components/Option/Models/OllamaModelsTable.tsx create mode 100644 src/components/Option/Settings/openai-fetch-model.tsx create mode 100644 src/db/models.ts create mode 100644 src/libs/openai.ts diff --git a/src/assets/locale/en/common.json b/src/assets/locale/en/common.json index fe6fae2..70fde08 100644 --- a/src/assets/locale/en/common.json +++ b/src/assets/locale/en/common.json @@ -96,5 +96,9 @@ "translate": "Translate", "custom": "Custom" }, - "citations": "Citations" + "citations": "Citations", + "segmented": { + "ollama": "Ollama Models", + "custom": "Custom Models" + } } \ No newline at end of file diff --git a/src/assets/locale/en/openai.json b/src/assets/locale/en/openai.json index e9babc3..48f0430 100644 --- a/src/assets/locale/en/openai.json +++ b/src/assets/locale/en/openai.json @@ -26,13 +26,37 @@ "required": "API Key is required.", "placeholder": "Enter API Key" }, - "submit": "Submit", + "submit": "Save", "update": "Update", - "deleteConfirm": "Are you sure you want to delete this provider?" + "deleteConfirm": "Are you sure you want to delete this provider?", + "model": { + "title": "Model List", + "subheading": "Please select the models you want to use with this provider.", + "success": "Successfully added new models." + } }, "addSuccess": "Provider added successfully.", "deleteSuccess": "Provider deleted successfully.", "updateSuccess": "Provider updated successfully.", "delete": "Delete", - "edit": "Edit" + "edit": "Edit", + "refetch": "Refech Model List", + "searchModel": "Search Model", + "selectAll": "Select All", + "save": "Save", + "saving": "Saving...", + "manageModels": { + "columns": { + "name": "Model Name", + "model_id": "Model ID", + "provider": "Provider Name", + "actions": "Action" + }, + "tooltip": { + "delete": "Delete" + }, + "confirm": { + "delete": "Are you sure you want to delete this model?" + } + } } \ No newline at end of file diff --git a/src/components/Option/Models/CustomModelsTable.tsx b/src/components/Option/Models/CustomModelsTable.tsx new file mode 100644 index 0000000..4bc57b9 --- /dev/null +++ b/src/components/Option/Models/CustomModelsTable.tsx @@ -0,0 +1,85 @@ +import { getAllCustomModels, deleteModel } from "@/db/models" +import { useStorage } from "@plasmohq/storage/hook" +import { useQuery, useQueryClient, useMutation } from "@tanstack/react-query" +import { Skeleton, Table, Tooltip } from "antd" +import { Trash2 } from "lucide-react" +import { useTranslation } from "react-i18next" + +export const CustomModelsTable = () => { + const [selectedModel, setSelectedModel] = useStorage("selectedModel") + + const { t } = useTranslation(["openai", "common"]) + + + const queryClient = useQueryClient() + + const { data, status } = useQuery({ + queryKey: ["fetchCustomModels"], + queryFn: () => getAllCustomModels() + }) + + const { mutate: deleteCustomModel } = useMutation({ + mutationFn: deleteModel, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["fetchCustomModels"] + }) + } + }) + + + return ( +
    +
    + {status === "pending" && } + + {status === "success" && ( +
    +
    record.provider.name + }, + { + title: t("manageModels.columns.actions"), + render: (_, record) => ( + + + + ) + } + ]} + bordered + dataSource={data} + /> + + )} + + + ) +} diff --git a/src/components/Option/Models/OllamaModelsTable.tsx b/src/components/Option/Models/OllamaModelsTable.tsx new file mode 100644 index 0000000..72335fa --- /dev/null +++ b/src/components/Option/Models/OllamaModelsTable.tsx @@ -0,0 +1,199 @@ +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" +import { Skeleton, Table, Tag, Tooltip, notification, Modal, Input } from "antd" +import { bytePerSecondFormatter } from "~/libs/byte-formater" +import { deleteModel, getAllModels } from "~/services/ollama" +import dayjs from "dayjs" +import relativeTime from "dayjs/plugin/relativeTime" +import { useForm } from "@mantine/form" +import { RotateCcw, Trash2 } from "lucide-react" +import { useTranslation } from "react-i18next" +import { useStorage } from "@plasmohq/storage/hook" + +dayjs.extend(relativeTime) + +export const OllamaModelsTable = () => { + const queryClient = useQueryClient() + const { t } = useTranslation(["settings", "common"]) + const [selectedModel, setSelectedModel] = useStorage("selectedModel") + + const form = useForm({ + initialValues: { + model: "" + } + }) + + const { data, status } = useQuery({ + queryKey: ["fetchAllModels"], + queryFn: () => getAllModels({ returnEmpty: true }) + }) + + const { mutate: deleteOllamaModel } = useMutation({ + mutationFn: deleteModel, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["fetchAllModels"] + }) + notification.success({ + message: t("manageModels.notification.success"), + description: t("manageModels.notification.successDeleteDescription") + }) + }, + onError: (error) => { + notification.error({ + message: "Error", + description: error?.message || t("manageModels.notification.someError") + }) + } + }) + + const pullModel = async (modelName: string) => { + notification.info({ + message: t("manageModels.notification.pullModel"), + description: t("manageModels.notification.pullModelDescription", { + modelName + }) + }) + + form.reset() + + browser.runtime.sendMessage({ + type: "pull_model", + modelName + }) + + return true + } + + const { mutate: pullOllamaModel } = useMutation({ + mutationFn: pullModel + }) + + return ( +
    +
    + {status === "pending" && } + + {status === "success" && ( +
    +
    ( + + {`${text?.slice(0, 5)}...${text?.slice(-4)}`} + + ) + }, + { + title: t("manageModels.columns.modifiedAt"), + dataIndex: "modified_at", + key: "modified_at", + render: (text: string) => dayjs(text).fromNow(true) + }, + { + title: t("manageModels.columns.size"), + dataIndex: "size", + key: "size", + render: (text: number) => bytePerSecondFormatter(text) + }, + { + title: t("manageModels.columns.actions"), + render: (_, record) => ( +
    + + + + + + +
    + ) + } + ]} + expandable={{ + expandedRowRender: (record) => ( +
    + ), + defaultExpandAllRows: false + }} + bordered + dataSource={data} + rowKey={(record) => `${record.model}-${record.digest}`} + /> + + )} + + + ) +} diff --git a/src/components/Option/Models/index.tsx b/src/components/Option/Models/index.tsx index 1fd12ba..af3c866 100644 --- a/src/components/Option/Models/index.tsx +++ b/src/components/Option/Models/index.tsx @@ -1,22 +1,30 @@ import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" -import { Skeleton, Table, Tag, Tooltip, notification, Modal, Input } from "antd" -import { bytePerSecondFormatter } from "~/libs/byte-formater" -import { deleteModel, getAllModels } from "~/services/ollama" +import { + Skeleton, + Table, + Tag, + Tooltip, + notification, + Modal, + Input, + Segmented +} from "antd" import dayjs from "dayjs" import relativeTime from "dayjs/plugin/relativeTime" import { useState } from "react" import { useForm } from "@mantine/form" -import { Download, RotateCcw, Trash2 } from "lucide-react" +import { Download } from "lucide-react" import { useTranslation } from "react-i18next" -import { useStorage } from "@plasmohq/storage/hook" +import { OllamaModelsTable } from "./OllamaModelsTable" +import { CustomModelsTable } from "./CustomModelsTable" dayjs.extend(relativeTime) export const ModelsBody = () => { - const queryClient = useQueryClient() const [open, setOpen] = useState(false) - const { t } = useTranslation(["settings", "common"]) - const [selectedModel, setSelectedModel] = useStorage("selectedModel") + const [segmented, setSegmented] = useState("ollama") + + const { t } = useTranslation(["settings", "common", "openai"]) const form = useForm({ initialValues: { @@ -24,30 +32,6 @@ export const ModelsBody = () => { } }) - const { data, status } = useQuery({ - queryKey: ["fetchAllModels"], - queryFn: () => getAllModels({ returnEmpty: true }) - }) - - const { mutate: deleteOllamaModel } = useMutation({ - mutationFn: deleteModel, - onSuccess: () => { - queryClient.invalidateQueries({ - queryKey: ["fetchAllModels"] - }) - notification.success({ - message: t("manageModels.notification.success"), - description: t("manageModels.notification.successDeleteDescription") - }) - }, - onError: (error) => { - notification.error({ - message: "Error", - description: error?.message || t("manageModels.notification.someError") - }) - } - }) - const pullModel = async (modelName: string) => { notification.info({ message: t("manageModels.notification.pullModel"), @@ -86,130 +70,26 @@ export const ModelsBody = () => { - - - {status === "pending" && } - - {status === "success" && ( -
    -
    + ( - - {`${text?.slice(0, 5)}...${text?.slice(-4)}`} - - ) - }, - { - title: t("manageModels.columns.modifiedAt"), - dataIndex: "modified_at", - key: "modified_at", - render: (text: string) => dayjs(text).fromNow(true) - }, - { - title: t("manageModels.columns.size"), - dataIndex: "size", - key: "size", - render: (text: number) => bytePerSecondFormatter(text) - }, - { - title: t("manageModels.columns.actions"), - render: (_, record) => ( -
    - - - - - - -
    - ) + label: t("common:segmented.custom"), + value: "custom" } ]} - expandable={{ - expandedRowRender: (record) => ( -
    - ), - defaultExpandAllRows: false + onChange={(value) => { + setSegmented(value) }} - bordered - dataSource={data} - rowKey={(record) => `${record.model}-${record.digest}`} /> - )} + + + {segmented === "ollama" ? : } void +} + +export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => { + const { t } = useTranslation(["openai"]) + const [selectedModels, setSelectedModels] = useState([]) + const [searchTerm, setSearchTerm] = useState("") + + const { data, status } = useQuery({ + queryKey: ["openAIConfigs", openaiId], + queryFn: async () => { + const config = await getOpenAIConfigById(openaiId) + const models = await getAllOpenAIModels(config.baseUrl, config.apiKey) + return models + }, + enabled: !!openaiId + }) + + const filteredModels = useMemo(() => { + return ( + data?.filter((model) => + (model.name ?? model.id) + .toLowerCase() + .includes(searchTerm.toLowerCase()) + ) || [] + ) + }, [data, searchTerm]) + + const handleSelectAll = (checked: boolean) => { + if (checked) { + setSelectedModels(filteredModels.map((model) => model.id)) + } else { + setSelectedModels([]) + } + } + + const handleModelSelect = (modelId: string, checked: boolean) => { + if (checked) { + setSelectedModels((prev) => [...prev, modelId]) + } else { + setSelectedModels((prev) => prev.filter((id) => id !== modelId)) + } + } + + const onSave = async (models: string[]) => { + const payload = models.map((id) => ({ + model_id: id, + name: filteredModels.find((model) => model.id === id)?.name ?? id, + provider_id: openaiId + })) + + await createManyModels(payload) + + return true + } + + const { mutate: saveModels, isPending: isSaving } = useMutation({ + mutationFn: onSave, + onSuccess: () => { + setOpenModelModal(false) + message.success(t("modal.model.success")) + } + }) + + const handleSave = () => { + saveModels(selectedModels) + } + + if (status === "pending") { + return + } + + if (status === "error" || !data || data.length === 0) { + return
    {t("noModelFound")}
    + } + + return ( +
    +

    + {t("modal.model.subheading")} +

    + setSearchTerm(e.target.value)} + className="w-full" + /> +
    + 0 && + selectedModels.length < filteredModels.length + } + onChange={(e) => handleSelectAll(e.target.checked)}> + {t("selectAll")} + +
    + {`${selectedModels?.length} / ${data?.length}`} +
    +
    +
    +
    + {filteredModels.map((model) => ( + handleModelSelect(model.id, e.target.checked)}> + {model?.name || model.id} + + ))} +
    +
    + +
    + ) +} diff --git a/src/components/Option/Settings/openai.tsx b/src/components/Option/Settings/openai.tsx index ff3c9b4..5178628 100644 --- a/src/components/Option/Settings/openai.tsx +++ b/src/components/Option/Settings/openai.tsx @@ -8,7 +8,8 @@ import { updateOpenAIConfig } from "@/db/openai" import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" -import { Pencil, Trash2, Plus } from "lucide-react" +import { Pencil, Trash2, RotateCwIcon } from "lucide-react" +import { OpenAIFetchModel } from "./openai-fetch-model" export const OpenAIApp = () => { const { t } = useTranslation("openai") @@ -16,6 +17,8 @@ export const OpenAIApp = () => { const [editingConfig, setEditingConfig] = useState(null) const queryClient = useQueryClient() const [form] = Form.useForm() + const [openaiId, setOpenaiId] = useState(null) + const [openModelModal, setOpenModelModal] = useState(false) const { data: configs, isLoading } = useQuery({ queryKey: ["openAIConfigs"], @@ -24,12 +27,14 @@ export const OpenAIApp = () => { const addMutation = useMutation({ mutationFn: addOpenAICofig, - onSuccess: () => { + onSuccess: (data) => { queryClient.invalidateQueries({ queryKey: ["openAIConfigs"] }) setOpen(false) message.success(t("addSuccess")) + setOpenaiId(data) + setOpenModelModal(true) } }) @@ -129,6 +134,18 @@ export const OpenAIApp = () => { + + + +
    + + setOpenModelModal(false)}> + {openaiId ? ( + + ) : null} + ) diff --git a/src/db/models.ts b/src/db/models.ts new file mode 100644 index 0000000..207fe97 --- /dev/null +++ b/src/db/models.ts @@ -0,0 +1,176 @@ +import { getOpenAIConfigById as providerInfo } from "./openai" + +type Model = { + id: string + model_id: string + name: string + provider_id: string + lookup: string + db_type: string +} +export const generateID = () => { + return "model-xxxx-xxxx-xxx-xxxx".replace(/[x]/g, () => { + const r = Math.floor(Math.random() * 16) + return r.toString(16) + }) +} + +export const removeModelPrefix = (id: string) => { + return id.replace(/^model-/, "") +} +export class ModelDb { + db: chrome.storage.StorageArea + + constructor() { + this.db = chrome.storage.local + } + + getAll = async (): Promise => { + return new Promise((resolve, reject) => { + this.db.get(null, (result) => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + const data = Object.keys(result).map((key) => result[key]) + resolve(data) + } + }) + }) + } + + create = async (model: Model): Promise => { + return new Promise((resolve, reject) => { + this.db.set({ [model.id]: model }, () => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } + + getById = async (id: string): Promise => { + return new Promise((resolve, reject) => { + this.db.get(id, (result) => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve(result[id]) + } + }) + }) + } + + update = async (model: Model): Promise => { + return new Promise((resolve, reject) => { + this.db.set({ [model.id]: model }, () => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } + + delete = async (id: string): Promise => { + return new Promise((resolve, reject) => { + this.db.remove(id, () => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } + + deleteAll = async (): Promise => { + return new Promise((resolve, reject) => { + this.db.clear(() => { + if (chrome.runtime.lastError) { + reject(chrome.runtime.lastError) + } else { + resolve() + } + }) + }) + } +} + +export const createManyModels = async ( + data: { model_id: string; name: string; provider_id: string }[] +) => { + const db = new ModelDb() + + const models = data.map((item) => { + return { + ...item, + lookup: `${item.model_id}_${item.provider_id}`, + id: `${item.model_id}_${generateID()}`, + db_type: "openai_model" + } + }) + + for (const model of models) { + const isExist = await isLookupExist(model.lookup) + + if (isExist) { + continue + } + + await db.create(model) + } +} + +export const createModel = async ( + model_id: string, + name: string, + provider_id: string +) => { + const db = new ModelDb() + const id = generateID() + const model: Model = { + id: `${model_id}_${id}`, + model_id, + name, + provider_id, + lookup: `${model_id}_${provider_id}`, + db_type: "openai_model" + } + await db.create(model) + return model +} + +export const getModelInfo = async (id: string) => { + const db = new ModelDb() + const model = await db.getById(id) + return model +} + +export const getAllCustomModels = async () => { + const db = new ModelDb() + const models = (await db.getAll()).filter( + (model) => model.db_type === "openai_model" + ) + const modelsWithProvider = await Promise.all( + models.map(async (model) => { + const provider = await providerInfo(model.provider_id) + return { ...model, provider } + }) + ) + return modelsWithProvider +} + +export const deleteModel = async (id: string) => { + const db = new ModelDb() + await db.delete(id) +} + +export const isLookupExist = async (lookup: string) => { + const db = new ModelDb() + const models = await db.getAll() + const model = models.find((model) => model.lookup === lookup) + return model ? true : false +} diff --git a/src/db/openai.ts b/src/db/openai.ts index 501ecfd..45963cf 100644 --- a/src/db/openai.ts +++ b/src/db/openai.ts @@ -1,9 +1,12 @@ +import { cleanUrl } from "@/libs/clean-url" + type OpenAIModelConfig = { id: string name: string baseUrl: string apiKey?: string createdAt: number + db_type: string } export const generateID = () => { return "openai-xxxx-xxx-xxxx".replace(/[x]/g, () => { @@ -95,9 +98,10 @@ export const addOpenAICofig = async ({ name, baseUrl, apiKey }: { name: string, const config: OpenAIModelConfig = { id, name, - baseUrl, + baseUrl: cleanUrl(baseUrl), apiKey, - createdAt: Date.now() + createdAt: Date.now(), + db_type: "openai" } await openaiDb.create(config) return id @@ -107,7 +111,7 @@ export const addOpenAICofig = async ({ name, baseUrl, apiKey }: { name: string, export const getAllOpenAIConfig = async () => { const openaiDb = new OpenAIModelDb() const configs = await openaiDb.getAll() - return configs + return configs.filter(config => config.db_type === "openai") } export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => { @@ -115,9 +119,10 @@ export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: st const config: OpenAIModelConfig = { id, name, - baseUrl, + baseUrl: cleanUrl(baseUrl), apiKey, - createdAt: Date.now() + createdAt: Date.now(), + db_type: "openai" } await openaiDb.update(config) @@ -137,10 +142,18 @@ export const updateOpenAIConfigApiKey = async (id: string, { name, baseUrl, apiK const config: OpenAIModelConfig = { id, name, - baseUrl, + baseUrl: cleanUrl(baseUrl), apiKey, - createdAt: Date.now() + createdAt: Date.now(), + db_type: "openai" } await openaiDb.update(config) +} + + +export const getOpenAIConfigById = async (id: string) => { + const openaiDb = new OpenAIModelDb() + const config = await openaiDb.getById(id) + return config } \ No newline at end of file diff --git a/src/libs/openai.ts b/src/libs/openai.ts new file mode 100644 index 0000000..8b6230e --- /dev/null +++ b/src/libs/openai.ts @@ -0,0 +1,25 @@ +type Model = { + id: string + name?: string +} + +export const getAllOpenAIModels = async (baseUrl: string, apiKey?: string) => { + const url = `${baseUrl}/models` + const headers = apiKey + ? { + Authorization: `Bearer ${apiKey}` + } + : {} + + const res = await fetch(url, { + headers + }) + + if (!res.ok) { + return [] + } + + const data = (await res.json()) as { data: Model[] } + + return data.data +} From c8620637f804f6d06ec870a703c56aa826b34c03 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 29 Sep 2024 19:57:26 +0530 Subject: [PATCH 03/33] feat: add OpenAI model support Adds support for OpenAI models, allowing users to leverage various OpenAI models directly from the application. This includes custom OpenAI models and OpenAI-specific configurations for seamless integration. --- bun.lockb | Bin 439218 -> 440402 bytes package.json | 2 + src/components/Common/ModelSelect.tsx | 4 +- src/components/Common/ProviderIcon.tsx | 4 +- src/components/Layouts/Header.tsx | 1 - src/components/Option/Models/index.tsx | 8 +-- src/db/models.ts | 32 ++++++++++++ src/models/index.ts | 65 ++++++++++++++++++------- src/services/ollama.ts | 12 +++-- 9 files changed, 97 insertions(+), 31 deletions(-) diff --git a/bun.lockb b/bun.lockb index 546445c09c822c3dbc01c60525679e180948265b..deaa93f5559b303cdfcdc7f40997e7fbad481eeb 100644 GIT binary patch delta 78979 zcmeFaeVCQw-v7U@nO4o!qmT+o5lxwDqSj<;mQs3-(xW6(O|`0Nnx_=HXWB%uL;I4m z5JEK3CZs}}5QPxh3L!)jAvS8;_`TlOd9E?HqkA9wdmP{U`}?PLwB~(&Ugz`Ye7Mf* zn$?=ezBuu%Z%@3XU3y*f>+4&JfBiJ?;M^O#fB$K-pMN^_y!u_AIDX#v$ifS1KJ)(b z>uN^y8n9?Y^OQT|i)NkbznU6})Qv>S&s?;;anmPjL?XvvtUw#0bBSz><}BLYeDHVV zt&dTO`sl)1kw|ScKaro8F=1Tfzpxu%4^Fe`a*JnAA3rrwvWxU8w4}6X`hR^EX?CSVu|)R5OHSQYfZCaU}uN|US7@gMytk5QW%R?3eSs9LKA zT!?)J+RpKYsGRT>k*&~fCq^RtD?gbZ%J&-l)JR#ReA7vh2>;5@rFrteT^OgK^U$W~ zSCpcRZ=&kK6Hb?*)KPw&(*(-D^7|>cHJXm9;Zvwhd^oBOent1?;k#Sn(J}%P^2U`; z!o~T;{NVKo5%R!wv_dZLf~|tir2sYHG*lJWK%2{^d+NZQs45sgzMyn^2@H=Z-xIh+ z<(+h9=<@O#PxpKbX)sNB35RXMwem%fIo*PEQ(09Cpt z9M7FmR4^_#(kAj7(^^YmWoO&GIx*{xU$8YU%du79-ND>w4#9oi8%R{=` zW*1>=O(jqt%?&oyY}|B24_o;ds4BY{RhRn(2Wy^L)(uNFcJF0r?)ZWt&7b?2#+t1y zF0f_RLp7Ub!R3-gsJcF}w{7XAs7h_w$8Hb{Q*8R$*jj)8Af6>!ehvB5vj!{|T2oeD zvtJ}~3g8Y@3x7U(96GkYt@tHW8IoI}=hoRODo`ii8enz&v|{a?kxIBK+%VAc8AXZW z{3*qe=pb9pZl`afwIfp_<EFfH zc4A~M^!;0 zGi!R@tddCaB{sgrXv>!qFTY=ds+@gr&Gi|%C6hbihk_Y-({mG%$TyeT^rOev{5fbt zT>(BMLOpyFRS)mF%vSsywmi{(tc`!frMm*(s$sP*w>?SPvs{C}hRYMvOY<{|3yMkx z4fn(g{T;HWO)JeCmnfPr2wM)zEt)hVx2QPp zb#_NN?mW2qKaGBmF7kIvM0*^v(v7q(j_l*Ka*eWK8<|pW)oEQ zl;Vupx%tx~Nw?H<{9hbvG(F~fp|&hfPN?KWx@x8^D1SmH`6bdbI9#VW3)N1;d^Q1f zX9B8bjS33uHf~tRbyC)suJH4NrFEN^CAZ~dQ=jI1psI7HmuNvpI{wMpO;Bx3m8iCs zqVX9Tf=F@+HGr%1ugh)S9=2Rex{{*2yvS46+O|DD*UpSf=Y{2TEWZ+G+$hUwv#f>=50R-}~HbcYv3%wSrcmTClgE^8Z}4L8L8nhd^V%l;VQv z9rFt#-4|JhkAkb9?bz~P?_f>+#tonOi=~gCYG&hLSN-N?@7!h!Uyo{?+~Mp$q1yH* zpt>%dhn|j}ik^z5IR5eCNTen9CRFR{UZ>Zh%9rQtzNpG?hw8G@0;LsY$1inuUl$*9{4~ewp(l|3 z@Le|FCun1B#IF%h#0FFauW$(hRPHZxb^=wp;i%GeM;oCTF21pgPj&XeWj24iTW!Zq zMpfRg57=@xqjkFic0Fh#)}eCcov2*2&m~-sYQVOm8t#7t_N@Oxu;!S?WqlvAxz9y4 zU3Ff~omL!q>0x_oQh{pr+=Qyswl3W>tL$`m<*#U zos>M_k76gOL;i`6+SaT^8K<)Hd8GQbF}vX^KY1|c06#5JToUFY4nwuBUdOLzpeZSq=RRfgf9crRP&Chs9W{+mt5{c~bQZgFw4OMkql zc$akAQU2>W>(WA0JsP^n@;{74S-BgGssw+$tw`Q(ByTGwzTkEWH>fXDp~fzGyK(i4 zwxL%{PU!wRk|@rbrOSKjW;@CLj;cXA?PN?X%bT6Nb`<3m7fhR(7diNn9n3G#+A0{; zATz7D-HJroZm|V)e%kt$9Uz=Fr{OWBI|kJvdyRN`q9(S=Y5a=KcL=U@&tlguBQQgL z=k*XkgMNSn8my08!X2mzIR7(Yt2#U+IlP?T3%tTw(0SI-u`X=mkbRO|LP z3f2%zm^M@OMgHm<%>C)q#01VOpf}&OuHS%aq0iiD*XI=+qxEX~o?Tz{Q4P_Ri5VI6ED}C%B0hP0Is1J( z6qkNr8!{MG`YCXE?5f?ioC9S9)Y4B-C17=h73Dkr&4<<#y+5+9Ux_Mxi;pc&rk{?j zxlk9?#*nP=AEa-BosG()Md3*_5*a~5G?Zmp&-v_0Z9lOMm{eeUYCg3EeUHjRe|$!f zya%|BczG_4s)CWI8hGzs>-yv~jj4IF!^u(kxvl3W;u)&4@^mt&<;8%L(3#k(aH7-x zsK)SPS19FaKCNngfHzJ8G+WL#_|CK>QK;UBv3^+ zd}R}S9ppCdUAFIQ8~Y)uW`0eqs^5VvO`cL`l+Mr$iwyYIrZ35#A+yTSgWQbhifRfk zIcU?J?RcB-Y>gVjV~)Xodg2e-`fV9h+VxQ$&Gp=@y$q=S5#C`l#w^< z@IPz?^+>O7m6qyA!o#Q^Y<_lyEA&jF{g1XIlZv>@Wgkj)<^Kw=BmaCyK#u+d)xhLW z)mr9xP_U^<T-!!bL;=pHe(DbyP!Cucyb~?GTh~#SWKHs zigJ_BOLQ=tn3$ILB`ZzcpOrtYBhLV;ZuMOH?5Ygw{$(r3osgfwvW$E}#&Cf0#_@P> zeBRrxpyZ+8vR{M0G(E%nBG}Nhy}$D}Tg$V-!KRHn)=BZgHYV@?zV*CtYVL5Fd;$=4 zRh&>GIQPUe%96*wA=L#O)DCl@{=cdudwjIf7|HBDhIIBMo=APU*{nI8k_V1_`%`2!1vxkMe*?)8P z+F;Ykjmx@V9ZPKdB-_`HsB*oCs_J&cO9z~6+s;8`yn6LN)v}iK@>#*eyzqAe4NvjH zy>&ufafy8Lbwk_M53QD!zwUqzl|>yRk)fOH&?L`H=bvhwmpnTaO`I@Z&U?1Gb@pW~ ztR^4b{_ZR5%Yx~7CH#gZ@@PwI4dZB9A_-TpK1o5+ZHikq_D+q2s<&^Y4-Z}hFv+uKD&nb<| zK4;FT(B%04ahwY{D(dJtefEreRgxU{qJWF$Do*2*BD|z=6 z-fj`^?y^p+y3LxFm_9X6kCP_Po}v9V6<^A|$7Ke$H_s`{JKK6YIo~eD4!x^~SQ+`n z$@%ujcAe453l9ww6O&3g6-T<0?<9&%ZfwbYB)Q)tH?HKq;x@3d@_wDY@V+y-Atg7Q zq>Wl*tV&x$`AAs^kus+)0xgTyiJ1J0tTmxyAnR#&&!nBQbqqL1cU; ztA^LSZgyoAjqeiLZ!scrWJ&R~#5l-0Y$0Ll55 z+rwS&(xoC}KvP}#4bI^R8x>cD!3 zv#&=_;Cxj+hk&lPg{T(5$vuMoEza<3_O(;^=b&lJ=H8N^OUw4T69(8dbFtH3oSsg5 z&mikZ1MM#GHmahYK~-mRdM0nZ)tl`N|4pA!{aO4eVdjx?Y*b-9{cs$&W|sNy;;xeA0&FkXgwcxFGJm6jbo@UqK~5bAt5VasQfVBr=>-Ua;o=te~QI-0K-s@^f~O z-Y4!o7$o|{QzEs3Ref^&p0%nMXoB>^;<(x9?m+`A=6?;rO)d>$jFcCh9R>hB--)&`aQ{3%Eu z5cfs}i2-pxV1}tDDZ#;ES!o#OgvOd)G2g?#=V8?d*7S+_-LVvx8m#FZ^GmTh*pyf+ zuttVfVgHzy8>A14dp8G(L2>_C_QQ6>-~t+aAgF{j=L)I3Q8E8QEL&&hfLQbztc!xx zeX^r(6B?qB-->NXrH5ri=U@#N_udXFE{glda2!+z(t^xBF|S{cJ~-}Q3%dZ8qPBWp z2Ni?k-r}H=n62C<^dY83Kui73TsT!SEx~m+1r@0N}wL! z#_XM9R=4-M)@Z`TGc0oop%PM1^MIKDA=VUY1YgQss$0 zPH0M)yBV8=ZMXXP0oG+UcduCVLN*jq4qYR$^VG9(cWj^wDvDWlKAX z){Bf7>{K?hv%>tERRepF=!jHCWe&~$EvIt z6Zao&Wd|&(wd-qBvD2?`Xv{wcO9j`^(D=_`X#vovF)=^mOgjOXI>Te$twH+Ocyv3g zUy#8hJ*l;2)O}iF6KgsqYtK*%A*9j``PPWrbzP-OmQ;6XO0)j#*!|r=Xle;Ae>s)Knkf1Qh~8J6;kvuv4NC|(mRH7xUHEV(2tw6-~m?hKFl zbvoIBWHK?V1F@|0G^{sbg}a!)$EBiY7sUM27#+3Wws-`VoE2K$x*&a8+;7T`UgZgY zOqHcN9>!7~w6uRLx*zM@&@1g3SG)je9f}rO7#_%3c?o;X9ejc zaqp5KQ4;r;#O=JYll~Pf+dSn-A%lEW%U*A=6i0P~W8U>VVQEyNx>ovE zV^O(vh4)O5m=*UA!|Z}w(>p7TzRT}aF)8Mc$8vsWkGUgA%#QopU|LeFQFf5uu&7;~ z9U9}*lzyd7B8e;6uzOjr+jZWV0aHiVNtr71u`aT;a1q`aB(CD3&Ng7Dh@4o0MbWfF z>DOba6P$O5%VK>DwYj;}|2e3*I_`e}8wpDd7v4F2lPR?0&JPlE;{Ho8b)BipCHQnE z#Ds9b*AluE&HyoO8}_$*tle=-u+(Zhl<#1vO{BydO@j2YxIcs@(^1Y_nU`dxVK8sY zj}6uoXVs7q9JnCcKVe{%PZ@#>utt%nLFm6%v8?km^J3m{L1lT|?>8vfcP-s7oyDew zzuRAEJz=e>Sen_^dJ#+O!dkU)@MYFw4NS(mq{@08OTK1DE{XZ|2ir33WbTJ0?@|hz zNooV;}m;J2gnZHXfaYgE+$U&GudkDz1&Eq+T5Czcz=8AYKj&hb z!8Yj;EUgTt&X}0@X;3*Y?wu2)Ul;eT2@==Eqm|U#J;=WnVeOFX=4v* zV+g6|;a2Le!D98PIOfH7SnPJ?4MVFf1E@vXASfIZ^Ck!BH?XK-w$Q?5Sv4?ng4IK^ z{SJ6tt*ReRnx#SdjT|9hoP;Fb+NdFl-G#JUoCpI4KDAM=Kl+-rO4o19ZM+~6Rf&BC;HMDW{Bo_^cW@sp_{V( zO9;uOB*YmNSUFhCcuosnVCnK>H=Ogv+WEy{`Maz%3}s_y>K#jYeQeO?zMPcfE)S+4 zLoW|j-IwFvbh#~y%ioV#HFB9`;Sw7~C@ajVky(VLc5*G65cA*0(h{@#U8`})xP$#- zeleDoEVBpeS!bn$yPP*Z*nd-wKWDrh3D#f#Sjto5gDE%X_&)(Tm1v$_9rIgFNX|M< z_EA`wVO-{=G5-cEF7?_rZjAX`v21;7xMulz$q^`=9rFvY-0*TOe-^8A*tZ`D**w}- zI!&bX;J~&VE<@BM22T#!+?tc}=;UC^tvUWJQ*HemLP=78TCgA3dzvjJV%D6* z1qMr{uuO2kOIRJO#fttBOQW9_6i&-Z%eUiXul3hrDH}^)2V<*b}+EvaiIl1!*$;h^48)p#iJijAWICk7cD{ zsMK%;L|0>R{_C5avUf%>Wob@I$HHLM(i}gxuxfKB8QATFit>|p=> zIezgRI~eR2-0b17RI4+3!ajagHL2t&zylaEXinzBmNM99?9y2f8+Q?f% zXk?HPWc#lXs%mLU&Fg|T59Rm+ud`mFOIODH8?ky;b@x*&%^%{1WTjniYi1qb>Rc>2 zm#CtccXv?naNK_v7K7CfF9`MLCudmZLrfejrDsRyfdnU_)$!mQpZ}=*>zjwTf+aUd;cz%EAvRr{5H`c|6B2xhc7Fl;Lk!>LL^4 ziL4rm3Nr4_j*h&U>t3*WQnq(bka!{<{RmqRX1a2k-Ty?6Klheowu582ox$oyAuLTU z&UF^Kl@yNMg;*M7Ry)_z088y-wNKzd&XaNftiRZjI8%Pe9EWU!XcdIpX9{;`Q`Y5p zJ%WmLasR5oUXB?$3LAcF)$%Ik2^^LNlEGrRd=*rzk4F#Q7Kw}wRwlZ zl8~*p!JWDeWIWw9jezDDCk#&G%Y({im@6ET#OUTCFN66;*k`MVXUvNA%0 zLbi>Njcs~gRVjpQ-UkWU^1iHMXWyU9QAWtNZ!;lITh!5DMKbRQLbl}ls@Ru=YPb$vTDFdRuvC>jgzm(;2rHaEe(Oh*3tpFz;-K;so;bt0 z+7uVWqCaEh1sR*OqnEEyZEF1cggTOft0x;ny~lz!ujTOYH}M+%eax=q@IE!A?c>4z z*K+(D9%mqJi9=#3ZvyA#q||>RShX$3U-3k;m-IRM3l>jOU&{7I1c}$<(Vx~bSV2br zY(Mi!yDivT(n(m|DUr>QTi!!K;*GdJd|k4iY$G(M@{PE+JxG5uo^smyV9J|0-jtx? z&3JUn`fyg!jrtpGv+Vvn6l*v|)(UWNbQRWgbvC8RQ^Ef2InmOmc(fKA*q*KX{I}x% z2JsV=*Fs3$fQ^_8|$p#Kz_DAhmbZeE<+p@Uv{bJ4-a8} zsfsJSJ}d2SRr_H{Opo2)iF;oL={w?n&u48HIr}lNS7T`>s2vpU%}T@2v}UU(>L8Xp zZg2Qw&)GOSIw2Ol35#*OHQRsFagM9Yv(h#t*M+V~>#@|>@QURBh^76-7MJyWa)KOW zzb?U2#oSU(&q{Mq_Hwe{S=JA-e7Tp`9f4~9dWscn1P@^*maeF&F+kcEuFVf-D8)E)XSlVcL&_mr>TWtB^1nmom4;-yYlTJhm=#-E2J588ITfYr0gs`Xm3E-o$p04%kG9x_9(!?NvH8((xO z*{L`>*58)gZwmjKm4=~g_WoqCv%-gL-g`mCCvm^d>&XS8Ve5~j4pI&7zaGmCKGkf- za<1UO*Zd9J1hy`2PAwaUOzHd=~dlc+-||ACnBgvIC_jFw3ycBNt_G zGW-fl(}I+Fv1rHb;bW2k*?u9Rb~Y7T@7-8#Lu9VLk7c)0E#nq%RkucA$({C*;xfer zt9fR%gOJMO+_F0>?QOevu{)0qjS<8!5%9$wM5%R5zG>8ibx~;PHn;6Nb|J9kah59% z(ktWshcJ6`TC+J8ZM}os!GTxUY6x`*OVu)2jnx~A)x|^8e*}sB@o4+M>%5-7Kij{8 zP)A}}+keTbfpLMFT<;q~YDz>ewnY0>r2V_e=c#(!JMG#@`@*1@$gZ+}pE zAnrH*AUP*B1*Txh-E{0~{Q{-p>v;44?1CWw)2?Z|?W2(JH@(rHv3dp>1G1y>52-SI zTr`J}M#Wb55S9wF^Y&+~^RW1(D6{gCkL&^s@5KBkv2wzES{NB0+u>wee>>*S!m=|* zk2xN|vNJ~yOFqFGKtARkk9^wg3EF&@6V2a4GsEBgJxl03qQgBQ`Wx0+LH=jie&BTVy%sAgyRkG{c!cLaXYLKA{E*{6ve&lD?ik--*$m2l_UCp#;tIk(P>Q9v@K*!ghM?ld zc=R7I>+X(UR88mntTYUH#%_Rr!_q3S_d1RC+4kCz7=xuffVD$64OR~JS=|q3mEa&(>BFvYV_%^b=>Xv*%`|eQWm-J3B^T*^-oSF;+XuWqmO&FJKJ{8=#5M z@L+PxNSV_A;NtyWj(;Ce+3juK9+xD%jgEHyjsXk%HItCGWTxPbtTYU}*B1UOG~71! z2{-BACzqCq)%#|^P)qkm}J_*JJrme9D6>k_sJOJ%cexwh2!Avt!M zS3R)KCytX9k0)=&>JVgHne9JINVAqD#|Zz7rJ-RiveRY#X!{(_%xDqTIpO1@2MF1j zjxVDAPt+53v^OEmQ9JXl$LfxSr*Fwh!%)E-qI0uq9CDK-W$Ga_r528S2%^Epk(>|r znaWxozoa`{)d4P91z0W#r5f!)G|7H6WUWEJ}SZYcg zlbPaWrC}*w_?$8NA{Li1PFpp8aZ4-Zf?v#(x^#UG*jIgk113qrdOc<1%a=vG#9OLD9fvGr#;v1!S$uXi` zy|LtSJUKfSorBdi$QYgNZy=;TvK!nKiyp+fD9E_CYg#ISFgYXSEi#E?y?E*esb28V zfNp+^8lD?4P0X=aRdc~#xT(--93g9&BC9i}bT;iwO3y!KZ!?rleKTe!mC*@_X>T z--9jvsV9yoKn|JIT6fznFRaeyKrJuZyUJ8F^WxEs z5Y7{9cMbUOVp=QVii`He;#mQguNw)eef9$Qt+;Iwo8qN>0QVAWQS+}@Zcg%4A%~A( z+gZWgLG(&2T1$Bk6Vh}KKgAF|i%)nnLs^Aa6H-0Ye_>V{h89@V9871yV`=q-zdiO_ z9&fAh%)u<;uzC__m*SnKvIQRg0H`g?_BHrw0G53#g*Qp}q6 z6wN1o6~}R(-zs3(%|mPUS}eOrWIc%`mveW@ja3sqY|hgxJ{&WVkX&jn&G%sSsj~KA z+3il7PJccGZkKH)2QUm3&RsFLw;0-0Y#BY9TKBUYb(4jq?bKezU&2xuwqw7#xX>?t z&WY7qG7C!`3l9nYU$Me`=3r|&jWwXEzCL`Y-EN?2<)5%L_zV&E6mOV{*7&+UpL3V5 zZ39PR+1aAY*K#aIo#!!c6H+&9b6Pa3nnj#H#$(w_n|9p$uvDeJM(oDY&@+EW$5L9J zV)nOTSxw_J^70Tjfdlxx6P6r{Q`l-h|8G`1KAZ01sA(paO5^-l!fzb0R8zQ~{M1uz zD>%ul&Pv13erfme2eIN<{GO9SKF6{POg9xL@acF>bLQq3S!vD)pJznx#~L1FOz)b+ z_CqF}TiOX|C+C$|8Vh@R{aaO>T#<5GvPp90KrDW(%~H9Mkj8;Y%jZ6}ne+~fL)z&{ zFKVdzV5wQQ%4@NPW7(Nf z;IOa-;|Vz*aLEX;Y-PHDyy4>Pgs6L_tqf0Yj`@SII#KK~!5XfE*I=pdq-5)R152kU zHoJ*2|Jc^H0i>kNLM(exW{9ItV{zL#AUnlNH*Gr6+#c!4C8PCNf~D5sVO()P)W6^uRnk5n%36tUH1NE5SAv1ozr(= zx&4dV{?D-FRlCGmp5=wV_pWW$)MklcX%^aJ^7UBZiZPirY@C*#9k-?#wiNrzqY+q2 z$yAw;m4;z=AYIYlH|ggx2kNx5(6+A|mYpV=5oK5!Bpdg%OUZG(AD@-Q;?X&G7-zTl zk`D=)fYY(GHL&1E$GitjdRH&*ePR+w^yCh>)#M+`wd7BxQW5W&^lmhwd8QrJaF_6h zVaXA8Zw#>bHR%98Ol;C~NL-_%t%3eN5%arZ*-mLfOvlnN;zQQ;T`r}4R`_?UF2wO0 zU*Z~^ZS(ozwm1XJ&S>oif5p}@)h-R!jZR$|F}_Ombh zvE)7byX~A#Uie_2F0utI#IoC|%6tJ!vm&f7<+#pfN)Jv$mv?4DnfxA{hVCcG@k7D= z1Uca-*fCa>U?xFF|3;9LlM>X*f|L08zV1YOz>2)eYbyHwE`1UZJOguf9?7IyCM zg%NZmEFtL1{or?cdUjQ|83bL^pCrgRP31=x5dXJy}D>6WTdcoo@+rJ0y!Kkw4r zv>8DD3%c7G$r|E>GZRbK7Q6ochIJVhec^1J-NUpQ$hcpBz84v3G6v$NkA4rvdNM~W zy`JE38{AFsG8@e9RYeyO9BS!afG%k`tK5q(S~qU`}KRr+NdwX9u}+N7>%Xn#ioR{6ie4e zT*}kJohC8Fi>GwyXI2ftUu*k$kV{$>j5-`d{`A2izELA;NZ zO|aLClZLLCNc& ztU@eZ`FPmKY4~LY7uU5oeSyk$?|t;#3zR{BZ2_4*IAtSX@joX%SX7V%a= z#k}?UKT$QLl(+N>-YRDnZ@sFkyl@r&`!JG5;2Op9k}Bdl-YQ@|ZxyhBw;Hz4**Bqj zNfm#y(_5S_LX|(@t(R2!4R6(VH*du+SN~POy}YBmA9VIgXRk(U@_yX$Cr~wT9d8x% zth1j-^^&T*7kD4T`)%H;_+8$5NoBw9wCp1n_!L#apF5jpkKq{em@I7R_m2OB*5#c_ zEe+A*QB}x|PWX~4$Q?}hs;=sB6S(T(Yu5Li`kn6s;^55EkqA(`OftE+lA5w567E?%nOWM@ki zOh`{9FvWqXs0uDrtW(b%80|IIEQypjELA--oh{XnUg>P9+IbBsbS*#R@#~#EUy#@T zfrj-TF@qX%gR5YnD_E-FP0p^aLN_}uRsKaz1IMeY(5)WZxL&uqgtwuJF#7QaRe?)g zx;w%|-Nty0GnMR4Op#?y?~=hws^H!HkR$IyQ_O0znP$dK*J1X+QlDqTw2p~8|(Ue*x`StYR0cFom9c!oc-@q-SG6osj{Rv8!Zb9N_9Z0{?|f< ze8;8Qf}5f$;6%rzN`DHf4eksVf2NDCuJThGxHQAXXQZ$k1v)rTT~$#gTorUeWp{SU z1J>|WU4^>1_;X#nR1NEa3iWinO2hf)B2+;y5=bv_+S_H2D%jWYevV6(zCWst3_=xk zk$(ISs`P`2mzE8238l3GmpZ$;s^GDXOO^3*RGyjOxKvYgskylwVa70zY zd{k?2bR6U!pMWpmMYy!XG=Bq4?A0`4zEeI^%JmTyx!@vsK$7+Q++YIUQ%Uz z6IFwEx%iJzt@AHj{8vs7qMAF0oc#-`d^Kr}($_B|po)%n;AE74kyH7hids2sconLbR2lw+s-XEU zej%!Si&4F#D)2U^#&N0g--W8&`%slrVdKlffWRXF74WEy2vr%^Ia{ju^{9HX$?+qp z%6pl3O~$ueI;l?CpE}zr_b;-$*p%IO)X%4vf-EYu_4Ab?HVY(hNB!4tj zaaXwNq#CQM(HiE!bgy|h!~cY-WY@T4QrXu!`$($zc`m-XY9=pq`EPQ1vrBgbwfDZa zxCH-BHI9FA>7=r6b+%LuTI_78?Ax3zRlo0W_V0Q=)I})6oi2e?!F!!uU6t-W$3r#E zW_V|st~0ze&DY3k7bjICA3>GpQOA#@O7|G?nsiUPbVpK6>J2Vls(eqQ>fCcR-NyR7 zOCVK&FFE@^QKf&yrI)IL*PJa?N47azs`T5Py}c&;gEMfd-~o zFo z?0=)-(Jf4(c@EtEyXs{;Y52Hfg&vE#Nh?-S5&#Rly2pOJzTRs=SrXu5j@cWyt_44?KbjJ?Z$7RIc6N z;-#wKDQ8z#RrrjH-{|6{8p4;H-6>oQWmAwa5`#*E`UT5z^^{TEK;RB9K)vSZgM(xm51L3oa!!E&*R9k0M7xX`< z8dlTsBdPq7=HjJF@29z`;{$XkY~d38Khv`R#R|0tw{zu7<$(^UdY^JS;YTbNCLQPUNM(B{w@xF#ME6998`Jyj5XOr@c_Uq$;?N3|>c4GBS$|Zk;=q4`XLFX!+-m*gjU}~Jfl@JNk5z_ zs1HZzB~|d~ha~WV9Cq|W5}FoA`mlti${NCGDu>BACQNJl>;;dYMdACAB;;fE%)vmE`9#L*8)gqIk7I6^O} zCdQFI9HDq|#aI7ugt%-yi(csBkA6r(`_Iu2NgVxK~2}m#xkE=!Ybnqx7MPqaTvc)c8+6#c}jQ694~y zNaB=#{~?KWUGDT=9Cu#Ew^`}uC7S=%Hub7c{rZP8?ikbdw+=7wcS}|v!O}K z2y^H+ubyev6mXhJG))3MRwn&q5{&c!3r_}|X?6*ur2x7$1EiY;%>e5K z4hXb0T}}Z^P6hn(tJmMlFnxdXMwliw0IPokv^R$Zwg487AxGxo3Np-!5?{~5KHH4) z0Ih2RHVSkyehOfxKz<4!W;O`SuLWq93g}`Iser6Bz&3$w)3gR)pTL|NfNo~1z!D#j z5e3A}>?oj5ZNP4U?k2q^;E=$=nt=1oE`gPG0NrWH{X%2mB&1(DbbhXwm?%x;EfKb68-Dz?eFK!KR`PVAe5!dUXL8 zn^AQEts4S13Jf!TJ-|+Z{Ca>9W`n@|V*$viOr^kjfq};ZrkbV4112{G{34KV`ZfYIIT5hB5um^v7T6*%<^({YsW<^J z>m)$E#(-iosxhGT$$*UlrN-AZ-YKw6)A$P0^hCh?W`H>-0%n`70$Ha3GBmNTGP6$t z>=Rga3V?5J(X?IC95Apspv)|74(M|#;1_|prthhMLjtQ$1zcke3#@Db7}EkU&s4Mk zjBE+0*Aj5O8PyVyb{b%#zyjl+23RkUe;VLMvq50;>40XZ0~VUZ>3}A!0NVs^HceXr zwg}8=1z2RZ3d}kKkZ}eeFtg79v_2ECTVSzCKNGN1VBwhnV|EG5Zw=_y8nDzXXbs3p zPpNOdZk^K4yTf!z2kc8H#jXF8SsKREU=P4TVclV)x_RrQxOA<%mUQQ0=#TSWdYK<05%G2HGUVsdV%~dfLF~1 zfyw6rnw<;SW)kP7jPPDJnUG=f*o9`NE8n>@#a5mB#Oh>^HfRFUxsspEF3BO2*%vu% z7D)bS_DFs>+^G7Xx++G&1QI19l25ycp2f>=Kwi z6wqxbps86f6p%Fxa6sTB(`6W7pTM$VfM%vrV99X6z~O-AX6bN1pAmpx1X`HBBLIg4 zR*wLjW)2Ii90?dR641(2j0B7v1*kU)aHbhG3XpaQV52~~@h<_a7s$T^(AI1am^>QL zY&0OlBt`?8TngAG(B3q?6tG2L&ZU4%vsGZ$7(j-O6=$2-V*sr$1MC*)WYRAK>=amd z86akM3Ctf0=r$J6#Vi;L$hsVGKp@+6xg4-hVAZl+RTNiJYuE+B4}<^uYR1NI&SYYLNz?kuXUZ!F^VB`cqy$OKcX4C{gS{`7dKwsnM0oDuT=K=bg z4FZ!V0-8+(3^a*}fF_dw+XOB&O(y}i2+WxT7;Lr*%$f|ym<+hs%$^KrodE0>7-rHF zfSm#h6MzwBm%#iffNoO&qs)RSfUK#20|KK>m#Kh#0?VcX#+XWhCDQ-{rvb*ArPBa? z@&UgH%m)1*L$jnScWVSD7v|0s91&%>>Lbl>$qy01UhWP-d21 z0q8Re@Qc7)({~o&kihC$fNRWQft9lXV`c;9nTpwfkyirhT?x3}jJgt#b`@Zwzyjl6 z1z0bTe-+?Hvq50;)qrMK0~VUZ)qo~*0NVs^HcjUMwg}9b16X9X3e5U1K*oOo0yF!+ z0Ikaay9E}T^fJIsfrVuNV|EG5F9&oh2P`!U$^lt(Q|b-)_`*Jy-?DA^RoB1RX!eqe zKb*7Tuire_w`cbg_U3ke)RjTEGf3_*%fod4RP74;pVCAniIp-aJ5sStGDspwV@JhfMBu zfXUYbHVdpa4X+0@nGYzv9`K0SB(O!G)qKDjQ#c|XTXw$fFA^&HQg5i z`rHIqu@JDy927VtFzhD43ugIEfR#4`qBjFJo542&M&1HgEAX=MZULk%0_5ES*lN}Y ztQTms2=J=OT?Cl?7r0bbEm`ws(1X=}v?WQmQ%(@k@L*Q-G;#NTG z#ejLY0(O|~0y_mdEe5=6<}L=zzYVZgV3*0f4UlC3i*EzGZ}tf66X<0CyUij4Sh580 zgTP0o`w~E(rGOPn0DH_qfkOhrmI6LC%a;OH-VTV~4%ll3-wqgg2Vkwh7sk5-kai~^ z?+!qvStGDspwXRxFHP>9fXT}Mn*|P-hRXm=?gEr91AJpP32YH)br;~EDZC3X>u$gf zf$vR=y8*550nED_@PpYduv4JZJ%FFg+ z;1{##Uci$306z%)X1d=8=yN|{#eFILQ#iGJcVEhIb4X;^{Uk{>%kL-2$`yd<3P98h zUI7^S0AQ^^E#o}^NP7^F_W-~*YXsH{G=kHgG9LzH ztp+T97;uu=Bd|}P*J?mBvuHJ7$zK6K2sAg{{|e~y2w=ru0WHiyfkOhr9s!(YmOlbm z`6wXzD4>-Y{3u}L8o*kCGmWo&w;p2cN zPXJ0E2edbv1hxpYdIFGX3ZDSXS_{}AaJFf&7SQ@hz`V79PG-BnPJvEO0%B(FlYsf_ z0DA?xn9Oy6to4A!>j2qikH9{GUh4tf%%b%ie{)QwByPHIAiB>}M6cLDba!(Q;Po(l zpF++z%OyR{VM#AD_-W(kkfkOhrHUq|+<(mO3Ujjs5 z0_2&&F9Alr3|K2L$#^dV(zXEdUIrx08iDl!jkW-$n%pgb$y))N1@cY9t$-%407|z4 z3d|;fEds4x0Ti0TR{*nK1?&(gHZ5KSw0;dR?^Qsl*)FhCpwnxBE6m*20Q0v2_6p24 zncDzauLBlu16*bH2<#K+^*UgVS@b$!$s2$l1jzkH9{GUb_Ix%%WX@CGP=#5V+fPe-F^-eZY$M0L#rm zfkOhr-Ur-gmcI{J`2isM0bqq0`~hI(ZopcB2aUHIkoF-UZ#SUAtPxl*(C9=fwq31FR>`w3wFr+~cz8%*Y>fUM5|i$4WCZT1N46X^9BV53>|8DPm?zz+h?n(li6 zeLe@Q*bCTX4hkF+81^~f1+)Bfz{)QG(Jug-&EPKpBliK;3cPH*eSow|K;AyURVk06WZfft>=K4glUYa}NOKe+}3xu*+nA4aoWiu=s1h`(}^8K7n4}0Ct;2-vE|; z3;03cBh&p`K%awv72g8(n1cd`1cn_1d}@{-1g!iH5d99Y*9`s+F!FoAT7fT&_dOu( zAAr2?0hMNrz8wwg|NP5pd8H{s@@$ z6JUqH_ol^9fYyfq^L_&SV73EzGLn7>`N_poXDf!KG|2btuN~DIl{pXb7DP9fp9qOf+zW*XN)hw6PFoz{kGx!&zrm2wBGTyI9 zni(bW%^FE<T4mBRZN!bSpDB4SoiH5eqrfcs|nF zjFP0AHIg>QuZ^@dxstQY21$l#Scig|)TN-(Iuz92Y!cWa(5fyV(-hVP%&G_2A#k>7 zQ4i3%K44xwKqs?ZV5dN*`hb|3TOTmL0bs8{7n9ilkaY}TaRWfM*(0z|pw}^gZf4Oj zfF%t9KM2H4cLv4uITo;@A)vcCC~!z%*s*}~&GKUbD~|(2j|22FgO3A@JRY!Cpttdk z2c$Iu1bH|bD634gcX_R&Z4Z}#zi7^RQu=@b83&& ztHU2bhhBBV>N<2tJvn7j;krQ`Qcv)r9$!OVUv0Tqkq=UmZSfOlr}77^`kU2VQqPR; zzLo#(tOCnyEth7erZvi6Mo#`Kw0?J=S3IsXF>ONRk#&0}q!y=`kH@9fz^4bhrH+VB zeu$n`%lu}XCcd?Xk5#MM?^SKnFV|YnjGvZhH>KEq9^jKF`93tWXmsk&W?%Qz=c4oe zhSyZi(_tg(9lJUFUe)rG(POOVit>sJrp?TY7;~U!>W$IuTU^akZL`0+fSTXf=A2U} zMO8;1dx!u2x{hiZct|M_0{ zA6Cj695R;;Nj)Qa&moGg>QuciiRM3vmxm4zzE8!}{)Ruu#{ai1kNj$KhNjMpF8xg- zPqdv4A2d%*Gv5wPZQW=m&lULpFrkCh0bM(Z*ib z)#`)N{>OFrK&dd znpu6!y7^O6Pkc+2jH~^zzOIwk$Aiqn-D>2Rue#M}U8Zl<)9ZRZj-pSX=u;`p1bEGN zOxc=;gfG%}j1TWdT03@wV=Qd_jjr%@qhtEqM)Io#{tQ!D`Y`4s{ouRUBe%GO`rl0n z3nPnQ{0o0`o~N34-RjcmOTZ>Ow%9R!BBaEz+Z@wpA7+_@IW>+e>*I=D;-d8J1j%nu zTk4p;==rcK;&#XMKfmj$Q@rkQ>{!C}9J>>y70#TGG<57Pm+p92W6R1ScLVtsX~erJ zP>#OWW!3|B*xA zpMRl)54nUV!}K+#Dl_@jv>N#rT_yTy_&z&tk>Ho{b2I`8Wp(|;`!Ys0(Lu}vUwupa>GT{qs1U4cJ3 z#-Epoob1?7j>Td6zA>#Tebuo%cpmT1VOmuU=!F_n)}0^C-EwN^U=LUe$BuLCeAwxZ zHG-)zJ$awu*a?pHg0*(6v11p&+Bnt(Mtk(9Y$9#-9dUY{oX6g!j`Wt^4MV4Iq4u3gLCCV*?3ycC1AW>Q{IWZ~lf;_-g4AUWnDx zvC|y82-e%N(;XWO`;D+(`f_MR50T*}^%*d^`eNRmW364fq3XX{-5gDKa2R3z`G}UN zzGYgOhw~ojDn84l(;C*_5zs3GCWnmV{VePZw1Z2hzi89WrOR~e5?DU((ElAB91Upe zT6nf&m%;`)c8+6XU_Bkv*Iuja%Xs&4tg~ZdVclV^(HKk}xt#X`m##|<>R0Am-d)@} z*7s+tC*yc$!?eb`xy<7Uk99rK1zW)hymK8p&#^q1zN1RZxx33dk+8nM?f>iSEC8dp zx`v%yc5w)jkc18H1Oic9i$iet;K98VOK=Ls<>0~HEl?av1(!k#6qiz*I{rUrW|jn+ zzVG{gX&>&Ld(X9V{m$%+{(WM(b=LCh+q|^zR<;6n!M?$At75rD;kMCotBRXuuq%wR zBCKIW*bTQamRl_=Y`#?s(*bWxc1MU@JJmo^(Ek1s9taiH~s5^ zo2roIoX5A8TUSf24{m#L`vkL_<<=Mb0ZXpC<<<|kb+}c+?17u{nxy_*ti-W0W=~6S z0QN<=Rl(FZk!xlKLK)ojuQzV|Z+wNQoaNTvk{g731>Cgj4zS!l!(Q2v(>I!He1o<9 z{iQ|X?^DZh2=*3MeGanRhT_%|H|+(VS#HCyzX1IkY`G1`{=F4ee+jHs)o>sUP8;`7 z+_dUqV6xE&@r}k&^DqMR z(-**N*omMo7Sx6}#gh91d!*$y6*n#XB#5qsqyD+y7C#yLdFn|Up8L)5Q?TpnH?`r3 z72#CupW~)&VvglD4f_P#^lz@^HXZv!%Wa>-xhO3Q5_Zu*PP7|d0c+am1KRKNVK#!agv7G_v(Yc09OxXskJ zjmKbFXE`pxKFbnZkDKOtDa27tV=*^ba?7yqvfMUXZp(4gMmP?0i{-WgyS`UZ+s;Y$r{ifi)p>aS#oQ!kHAg;^kV?z z!DAiNw%m5(rUh9Kb#T)v+-u53du-q$h7ej`x`x-l8)1ayw%>Bwgxg5<&xM_3UvvuLAHWwMyuwq<+csG=-cL(U^!wrZpS`{!0M)JeGR+=^c|Wj zF^}Sg)nh08NJ&&{CoH)*?3##b?WEVg$^ANGb;A{Q;U{kS!<+HruBpTZ!hOcYh1D(F`AHv1fLrO>mx@*;4YFh9F-JNEDjNk>CKzFCUpqo$KZ2E(? zY;DP3>Z)ZA7HzHCI`u{5k0B{-JUOHQ-DRc*-B@;n`}jWq{o(E%xB>d6^w!V@+Codv z)~StA8=p3|0fhY&2Ekw$0z;F?ygH7Qwiww^$I&8Mcka4V*PXfU#C7Mr5OlY#o9o4} z1eSqrqHCe52&fJEC7o~JTlfx+!ErbN`l**b&=>TRK3ic2=zg~^=zey9ojuw>ET6$( z7%GPvJA9&>Vr>S^p#|v2kZM5$l!DSw2FgM?C=dGOrsD7k`MOIp4aTmYnMw(%AT{VV z*b~x0ddL77!3(@06Y$BZXb%ULUx?%ZJOte<--K`BJ2(cqSI!N(Kh6(2E$J+zvrc2s z^>st20p%cu_BR4X!D!I!vTlzjz(n{0Cc`j&x#(~#`i{^3p!;RrAM1X222G9_e!wOglXW%THhl}t7d<9408#oO6LHE@9eF8pa;IRqTkS04M~7ArOi{Q3wLv$c8`|=r`A%Q5pK} zrq}QWbW^Jzozm|=>c&+!s(jSggO3$^lmOkM@_AqnzWD1=7Rp0Kn}@V-=j7P3=$b;8Yr3?X47%!W3n`%*TqFbDnEJh@Is~r^^*}$dbr#OS z5zy~F>9?PDV(*ICQ$HM~-_yDZ`X#ZJ&qXRMFv~(IXauW4 zKVEVG^xGWAVLgn2u`nKV9~=WCfsY|czs3&ll$X%{Ww_#ywJjW3qd%dS6bIcOmxeM> z7K%Vpa6)S`p!;Io2e*fAD6Ru^gig>IxscNu!Y5D(Dnk{h3e})Gl!9V#htlsrL+fBY zY=Bj;8dg9oEQX~p7dGhvtpRmkAM`_ZyWmgIZEHQy52!YQX3!jT>slA;L49~ZxR>w> zUc(!B3)u;q19Czx$PG4lMHqe0;ji!;ME}k|f54xhd)uNA1pbf(ekZcwFcNeLs_SW8 zKkKTo7xaOH)axlY0UJQ~zEfZnj3JI5RH42ad=IRErLYXxfkqp@Om4?TQ_$VwZPa)U zj>BQlFGy^GjW8c(!W0+>x*OEp-={DbhCl?=fSRBm&|5|ZmV*8eR#@VGi_! zUeF98UE|!K({L0pgVZOJmSzd^w)zn z$hU<^Xb0_~1L#hn3+P6mKIjYZqx)kS0NtPm^n~7^AA4yI4M11_ji4Vih7{lpdOt<) zfkcwH?uGiGUi|>(NXCq@FdjY!eLZzQ7zCfeIpn_vK7thOF&K*;#_QpGE_g_;Zo)0l zbMa!J=h=FE{g4D7!9DmHvO*~64H!MoID?$M%2>Bon=;~)LjC0Dl7LM@f--z^g_yhigzu<3p0WaYd zyoQPtt`g{}V|NPEmUP;Iezf5Z`~W|~Rk#M7tY3kPMPT3P=g5AT^|cwBQNpz^&eEsPsB~4d20DSO+U% zAuNK$Fc+pl1-Oj{^>dQjDbY6A2ut*Hn+vccM_oxF8Q4Jur{O*w^x*9!yoNV$1d4#3 z1_dHp7ivIta6xvM0D6|DXJ>j=HX3=|dg>dIJHTBEa}OFND@u#SqCe)}4SG1HXJOl5 z5=@4%pyyd_A@cvKy1!AuSvU{J;3yn|4bX=ib%C+i$HV9F1&juD*CVKxXhe^W^m-B{ zzpSbYqL{%D0(uNI6SSz~Kwt2sW)pH92fM(nhWXeRfS$}ugXz!*IzUJ03|*lc)Pe8G zzzLX7oRcvp!D2nT7=}f2R0mUkP@tn#4CvuWPtdvwBTy(T26qNDV;Y%eQb5l<5@uRI z!O<7=jKdv9!;j>;HFSc`Is|Is?h=o}uBr^TB6GXzc}0I10H4BVFc^k_%4@>iiK`cM zg%;2X+zG1&n{wR*RCaMYIxBznpGNDMMb7AoT$hD1P#Ov#R0@-Pc$CB}4#glG^uC7P z@3?`C-VD)QXHm=?uo(NdxG%t*530#|m;$q57R-QYplj7oIAP-s=0pV3keDt6w3+Li zS`&OhOHl)KWgZHJ;x4pxlu8~QfIUCth1`$}vcodsdB^VSpr@8>nhp7eg#9k0n z2?AXY=-O7hWFY9)L5J9CPz5SOCHMqNLNORkOELBqVO*2|-CyWlL#Y7ep){0&a-heD zy5A@RvDkH$tgGdUTvr7YVH{Pd#)d*a=nGoCy+GSy59kEC81HDyXpXIsT(pHY&|0<( zab%NcQI7mhL!0lf9XwDJ(Ur4~#)#>POP2$>%BlqsPzP#51E>%6;1GFoFB{ZN{oMZQ zr*0ZvS8gij4yW-R#J{QKruJ6YS=mJ!OFVZZnuvNdw*>WC2TOI+@^paq&>eIs&>3_- z(K5S-k|^xGL2F4BrwZ#2hCVPqY$dGcbY+yVsSMy+*BrW98VtHv8U)cW9EO2L>ehV1 zOt=$q*VstpMp&|%$u?#M)a~y0DY-UhZI)8EtD~0Ikyo^?G(XH=;@L|=(9dB!i~~Kp zoPhZaXm>jbhe72I!9h3x`(ZvzhrKWc_P}n~rIitfWgF~-rLY~gLM&{CO|S&!z(&x; z{yJC-YhX64hPki`R>BHc28&@4EQFab1E#?gm;_(w%2b>E1DK41=6));-Sk>>t^q!F z(*!lr#QZd~>OKoT4yPG@ABOaeMBRx;#_sndHAC*q ze5{R+<>L#9{@sbkJN|b!cl{>J-2WQhT?nnZ+0p!?xzk>$T|>?FnEy+}iDkkaxmJy; zT3i3erQd>^W>#hXQU6*E?na{BGO$M(I>am0-?Uqq@J)GBE1VMUmL0-_K3_a3NSv{oJgL{?NLwr5T(*ynF zuo}BNTxLD6*HitppeOw9gfxJj`fCJ=MyRF+bj#=+04!z^^+=d=a_5l;a)V}06Vj5qGgTD3?oJCpKG2fo$JB$& z!cYhbf*xvWMpe$80k;;z^f5sG_n68*Q(!Vo0$rVc0TW>yjD;~U1O~$(_!I_0Z|DU* zK@-w;&=tEL{B?qk&>r+uwm4`e^pv(dMC(nMQcw~~fZmwV+q9)I%UIV9F{?l&&?DRm z@QG!wj5!|pYM51_E==XRHm2S_j=-!AH7s{^tBJi9G|>9j04iJu>O(#2x-n)WXaUWj z2{eV~&=%T2Ylwt)&;fL1swGtobjECJX+uZID9gPEW;al6+|aqaJC?rC2l_#O;P#er zXG_&Q8ivC#7z(Oxbsqs*eKDXiBViOw0F6)OKL?f7>KPB(H8q{SnCjL?BXH+@I(Du1 znZW-BbFs{^uG_uso4}r#Zs7;M-~*Y!3(`Xq&Re3jai!xz zAN|w^8VeACK1=urw6Q(J%mn&i<9+xU&cHoLj(BD@5&j*8(}6!Jq($N|~G4>S{cb5(D$Izb!lJ9q(q!Ef*!p21Ui z0&8F;tbk>pnO%;lY3bi;?f z2O4e{=4RLeaj*ln!gkmOJ7G8M)%sV5J@6$QhjZ{1oQ9)t3QociI1C42KOBHVZ~`>k zG58j~fv-X1(D2{ESvUilx$Bs#K?@W86aV}OS3vK7UdFrxYF>qFa0BjuYUd$5fL}oS z_I>yn>RM>ue~kTC_!j#gn7_lH@HeD~H}D!>Y5#wTX3~Kf6#i= zs>qBzHGy21S-~AB3wF(90V2zdnG5nl9>@nppdb_gU6KW27KQ*Q1bQ+(K??v{kkL)~ zM-v=~8IC|0ghE%YbruZ49t6SQAVTf4+I31nNhkq&NKg*5416&EXUm`yzVO6G2Ek{b?SC-lP#6XyVFW~j(KIoq z!6XYRVXlG|)^)U7cqI-hv>MhxBLc3)To0Q;BhyH? zU}}@w4Le{vXdj5f+zGqjOV|V2#+7fedmvMa@ZVrR3J2gT9rcf33BW^#i^JFt!9mb# zT_RM+?`y7WV19>r479r3^?ZtJt!K@k*7Zp^0q!b2i``vyXRxcr&%-(RJ_+@I8H+Z& zAD|?(1y!xPJ}+U{`c#d$>+?t4v_3UlW!$e}zN3WNyRKuu2{%Az-HjC0L{=K{Qtz%O#FfQ8~h5{aet1f4eTjs2A^O)hG+0QsK08% z)4^B>X&^PE0=?lc`C}Z_qBCOCl{lL*S<|9dt|}xu48}fAJuU_qmr}Zkpy8o6v3VoB8fmZHG$X*J0#NxN5Sek zUF)?*t|}=2fe?mUDCoks2iG?+_4v00ZfcgtEC&^!22_{NM{vhqPk`-++d!*JrzIC; z22V%}X}|$m#R+#L*>TGP{tyK2uz9iPf}Eh?(m^WF@HI&~B-1NdBGO*1A|= zSYS~WIdXQi!;c?+KE!b8*_y_Xr`)x)mm^Z;K`tn;gvwo#Y-1dLVGj}TLEw)o9$g-% zy*dnmqC^x_)X<4HK7}`}%~7xP_;UE*UMw)Egpc$X<7i_GkdtE^rTCF;im{Fmep6Rw zED5%!lqm?6@{dP-_mrqEMtnhm!NongOCRLI2B|6#*wQZFH{k`2Jw_muLIpAY_U_m- zGKvPYBW7xnshhVMKH*xZQF1`zoSffphMnxTpyGkSTH0+=VH{dHC@nBtI>4qyuHeR|eeSP(@WO5@P8JYDD^m~%*CmZF z0viH1p79JRGJ%wW!VLY|s2U9#aB;=aHWPk+sXkgMp{zLM+Bk==%VkL{4cgo%O`*9& zuH_3M@&b~6JedfQfbl4-th7;Mgv=Q4=**9QU*YGEh8++RJx^Ww#%z3nz#s;9$c zg%tUmz&oY-=Z?z!f`7{Ajz+FaoD8U^D`%3}dW%EvYHk9_X*drhy;JdD#cu+M43~%r zTuqYBdR0&6t8q&Dd`Xg*Nsk)}G2?goukKv!auwo0WvY!|B*{eNic9)U4nL_o(GlvZ zl+w)K+*`G7%?dve7B3tju@fB;mhgX&ni)l;r%-~(KI@v8A z7@}n}3vW9n*C!L{k1|aS%hAe)>F26Q|KUzWOr{qjy|R^0n~FGyLE~s7)8;$8k{b3X zxjvs5#z>#Vh+CzK6x$-M zev#+|jtR4wQEp6iBz5Q9iqWds0-4OV8uoDZ(!pQdqA|H^wwTnJMt@n3Kz@pLDCx)v zxx60CM7+6h@mrd^K^$b-NxFaHfbnO9ei7f3H>wN!!fQ03|{gycnT>CwM)&FfpS zNW9zt$u=E1vp(z|zH)E|Rcc;Zh5YXo?GFB5Tg3YW{AJ5*k}V_e)cBx@7){5WgT&ev zmAGkvBa@RF{wTpkl71G^9g+Yw5{cD{uB2>8AYjQw$txWF$7V4H+{lves+RfT+R1oT zS&4-$v?TIA80#OGJ*hT0t5&w-LvIcTRw>kD}wYeIjs2mZ&Z`;VUPj{46y6ZLUBCZ>AcocesO z!_V%>A*tsv0O!hKYPj@o=PEr|9PErQU`Z)Ak2$X~0y>Wr8Nd3_g5_m&jrK~N?~V5{MhL5~TmA2YLBmxdr`g$3%=#lkskt@J5!4zc%7Iyy)trWSXpt&~g=am14CT#_x8P*#_*0xysr>X>lM^s^0;v%45%Mlr(Z)O&T< zsoq`^eh>}o%o|e7W12kGP|IY?W(MD!i-~=^lv!*jh4DDV-y@%~iI4I9x_++KfA9)d zU35_AxaO0CiygjUE+jG#`RnXMvcCJYR~bX1C=;p2Kj)AV=9UQ--PS;;!s~j9ME~LAT-d2MeCvA{(2?ESbG0Qu&8Md@muLcOPSkn1rwai$`n&Mk| zrE=%31cydE>Nz{vi=T`HS(33+ks`}T@R^)lhn&5jxeyrI%CTuszaeqRSqp*mGEC*N z^Zq#WyLlP)sBD^%h8SsZkKhdiQkh;T_sOh6&*S{c*uPteq-K0B-d5o1OmDq zumAbQjai%4NQritakI1G0XHKP0k(O z^78X{F#~m5AC{%Ox!>WkrmA{+PAL`{-zDw9)ejE-&@ zE6O)AjG@q#v<7=|G=4J4%5IgzHXHjGbAe}!l{#tYdeuF`jz>Hr;!{q&oRaTZU>!r{ zMl-iY*!dE%1#zns9|YzNgFCj+aIA&>heflBW8^r*_O-+&D4ZFYJlg80ZClHG*&JR{ zXPd)m?TC1*d#`OYH*(AFs{(Z{_^(aWVS#D=->A&3(?D~arW}v8r{l~PPGv#2__@YjhL)$6iD!GE;I$Yaq zT@`}Y<=U~AH=w>>E1Vb49dzBR2-rruop?JCjOY*fx?CXIC!aa<0tMMlS$zC`h6xN93KI&aFkp>u2GHL@M7ZTwnGDSeqn@e~PFe6x#wN4p;wyT*{vR6NY7(tmKT z4#t>QIQ;g*s+FP-dFg6_d=&R6+e&($B#{=a%=&J&`r$W~mYn$1ZmWZf&N_=)OWsqC zV0)1^(j3!TfYeRa*7Q!%tMu0o>do6@w+#;q4Cche3*fpBHL6bLtlBW7{=m%c-BB2Kr8~-k{f=6$79CAJ9xqj;lnhzH zAW2xGe;PBWF|EImEC)D7O2q<1>vd_!#oDneUKB%A%Sye@>}0lt-zRo*9B)%}H$J+! zBbig3rTYO#!IU$)m|KyDU1aeAN-^FhPY$5J*SM!A&JJJfY#jB|q93uJZD*qY~$!<&HQ?*>`r8p+_9G{0~?GJ1!s9xX#?kG4Y9>>M9#A67XhM zd3%HjyqLVY$SSw!SB{bX4+*7BrB|Mz9>vdRSP&oTMOQiX6&Z4NGh1e{-N{@vYAwG) zhV)>O!{GGY#D0{HlNpJW#IdQv>ekEKzFLDsP+*AaDt9-jdz400xSK3GN)9S_lU;^>n1|{_*KX48|^K3_hvwnKd4>|Y^Ijq=2T7FC8?){A;)c@Zj{7BsHzIK~LlmA-^ zZblbu_wFTAiS|Flm>}WIcJlkT2G7+?3Vmnhs>*lh^-(YBUfb!CP2V|u-lv%`q|83X zd1WB~Xwo%~5l7eFGW-}}9ew45Ucc%)`#4c1ODCC+qmt77%#n1}=Z`z)jm)nL6sx+c z_mc%G`9(k3d7OaeDCjHp6U1%~0=3+oRz{!rKg~Ns2Feqiq5jwTC-XmN7oC5!_GRfw zM;CStsZKGIHj^tDt`39DwldB+J^z(bSEz4>UgNKacrx~crD0sq=4Pv3<#x{8p1Lj2 zk&|`Je;Kw$XvuG!MlIHW$riQn8BUXnL`l0d%n^fS(-{(&!Vf(4WYaUv#`+nK~ZbJ>HE~j5C{oFz&6DeirWkB)oH z5as3MS#($%0UiF9-;Z^#+9 zfUH3{?B56Be;<73$wDG`Z6QUjxVj^!)5C<+YtuwrZ(YoiBaLD56oIgRpA}{hv@k)h zyt;P%hwxg1*iQs#T&zKnp@hQqJ?4N|p?ui(7YkIM#9x4aFXVml` zYOW@do^4;WZIZ38$rEF`o)`W51XOgWq`g47D-qV0G}kut8JIbB_G4bQvSoBFP|TyH zRJcHzU8ThZM?~0QJ%k3 zGXF;w=1t_Z8f_)XCD!&)QtuMglVCy|d&yDh|FB~J{~bA9|2}e>yNP7RZh~oHKN|sF zfcnE_?xOzlaAg2bGU&o}52iNqPEox^?|I<6-%g{*Zmh7YO1UeX!8MC91zLYqFl~G1 zxvdPXoT{jPy2_+0=*PW@(L1XnnFY%Bu45zn8H zyCl7S#&{wl?y^faTEySd>L*TMQ;aaPx6!Y3@3%e2bHc@VYXtQkA?tr)3eJv%4*6q7 zAAY>I`_AOXlGqpl0!GLUBy8c5<9BjV4nbXLoa)nM(Yw+<+^`(ZU1=REs!8FiB;P=) zUgcP)EpplrUbd@HKL5q?eRiY1FNwNNr?QQdMb~NCo+D+~btWv|k@85dgGb7K zZ<#K7)wOF6+ZvkF0m8$mC~p~gg98v#*sMEmCpS4gp2(7ytS*z^bcBB3tJTxzCfP8u zSWSA}ayVu3Q%43_e9Ms>(TKZv8gaiD9V``YQH(ETfEo!!ypjg1N{t%N=;WxcN+v)xO}UpTTb6v7eO?;KJX`A(2CBjNXbG+uvRNm;4$K({LKLvgbyy$^y0 zGfCJ>*5LsU9X`26jW>JZ$Io4)v$=h&W^`1d9~x!WCh1n^HxDV&m-6$&gl#~xD77Db z)MjZs+QH)+Pe!@-=%Whsc7`+anGI3uJa!cOhr-?cA(KpfkTCFjbsG)DQed|y?#i|l zZq=GQ5^qWN)LkVBx=Xx<-2E!^NBKyoOl`R7pT(<$L1oED;zsKVl)3j3NzAH2MzIfT z4Rz^CF=0xXr2Vg`#2A4x$riPL*sWemmfzl2k<|UoeHv?(&1~vcvpX%1O<66ic6xJU zPSEx~>?eLqTl*Ng&56MXF<74e{!y;IlWK4BnYcV5Pso9pygbFEZ5ee;b| z2m9#VBRv>m*%3edShZK?Pj~Gls2fXNB%DdE{P_DuZ9bkq-P*(XUTxupe%m}9S> zd#-U+m}aiqTGU$nY{JUnCCQ%dHYlC3YRHsTr!-`AiR+oF+KEv)HVj54l*g~|c|RJt zTfd)t`O2+Dqx)DHGspNWK^~i?o0Dy;n)TCkT9sjtk)Bq+$6l%M+Gr`pz}e@uqnoKu zvJhDG$P9Chu>D2W%Tr?(?IKt)J>?AZI3?-d5bSpe!0`8tmY35AhIK$dlWKVF zz>;ylkH{P^FcKf#5S2?-uT$Ug-*t%hnLjIw)6caFe}DWvZ=DS)^!oVgc>nKb)pLe$ z)4oh{?)2T773=g1f47pLv65HFl7R?6F*zG20X(471{yZ^6C%Kf<~UZOfa8SxqK zSl^{h`jUg|8Q^ z``~!tUE-Mp;iEDQ!*zD0d2WzpT8*CV7ti1@zaX17{kt!%RNyTFTK+O$C(qv?`8PY_ z1$1x_WUrjt~T@&Ou*>JsgAR!-hx zotbsHmc^M_?m3;A&20GbVI|Fkt%n4p#F#8~l2Xz&>&?NrA1c9FwN{f-^+B3@G${zRTk@qKR7+{Wm8;)Y zb4YxVzGJtyp(p2(2xm=!L7^T4WC{Y||0Yn>WA_$wH2UoMjvud-+dG_~Jhh{bF(+P^ zW*xbDZZ`+1)|Z|I)?S?X^LTYmkTzP0RvlT5*Jyk`JbP;?+pHX;aqN*-$h%JNFvrV> z>pHGjbS`$M8KKt1Ln)Gq2yaRCRL%(3j-BSFE@XQ9!G{(u=8@>txM+_!BwJMOf*d!+ z@1}Y`Si0Pwq3Jc}wCC{gwIrP_BF3({Uf7F?q<6G;}#9G;0kX~si?{yjBhyETTr|oP)->&;w z)%ZnoWId4dqwGsd%zw*s#O+Dw|jjht!>z-)uWP!Om}$q*XdHmFR;v z#x1tKe8~wV?Ad45rYX-i*{*A_ANHGNKA1UA(mie3-Z08++yI;@8PhvMTsIKV!j4JT zvU~T@HuJteC%M5Mw@fJK_K1NwY=0~6-^EsoiJ_pT~vS{t}hnc+3jUirEH9DmY z$0r>TuKw(8uaLzq<`{ZGkPfSNbB#f`c;nI#Q){r%`A4}2_e$5yNYxmz`{1K3Hz?)vhXt#6IqbGp zxTr=G88!5{gJH#ZPD!6s#%?@ayx`o`Ba`dCFlt&Rk=fK87Y1+t#yYP@(vd?-m1oFxl2A~OctP&untm*inaXNWz? zQOWM*tYy0>J-krZrLWCMmUSIC;Ip?avKX0SnO4-}XIX*(pN8AxMX-Qx<(Zc=*m~B< zYIPyp)bHc%>}>ZrCYKq?Iq6K3i7*EwUnb`We$B9_l+(}u%M<2US?0HcTaKj*>p~L6 z%yKiUUCYE0r~XM(^#zMmd;NC{V_mNjl%#^B_i={!zdL1C>%A?Tp!k$0aMUC}P>xRYajfHl5(A zwD(1haS1tl#&dGQm!e#kck23yL}<57^oFsKL$hBaH{TC6nU8|`+P2Cse$D_UbMMT~ z5NDSQG%@M3$KfrTGdq*px=6Ro&IqZ?y(a%ef3TrvxgPFY(Z($)>;AFiyy@`Dkv%A! znP&G7X3MVg+riCEr+&)4=XJPf<31ygGdlxvUEwl4m;XFjaQh6DQZ8pe6Zih=pMq|m zikBF0h$`2~f-i;zc&D-3IF~fqT1R|z89e-!&)6F`EA~OJwCSRpB8-rIE>t%U0o}YU zY&zqI)A?Vvw-is^#mRF7Z0F)q`8$)?Y`fwZQ;XSLGmDt5A)EbH>v~U~^wt2VnvQZ+ zBC`^}RJqK|>g>Wa|0XN-{z!tdIV0@O%Mz1~Q8VpjbKt4+^SL&syWWm7GG*LI^SdnD zvpIuZu@R=KGQG`N`M{?0jQ=cJLiNV|0kLH##`6+};krdkIx0N8vvJb4GR^Pk4^fQW zGV|Rim3t|181^JrC?Gr}bt%;8{uvNb**K(1`hL%Ky^H)54+xFam1WiKwY#V&E6}Hbi z?%cC>H-Zf18(Z8SSEW#H`uh+hbjZD2V%qdHH;erpFL6uSBiS{Io>9Rap_xK2Dxe-!M~5 z8+7*0?yo{y8$L$R95-Zm9(2<4hRn%>0@2Y4H3M$V&g-mYN3XT>;$7gjWUWBU3b}0# zBas(xo=ZCPrapON&EMs2%K{|*Ya*e;e62!xw=K(?BNz!?3GidErnlt`5@DT?NQXq{ zl4Wau-|uxbB&c4^=0MA5#QYobYr$);@UaHMv63Ypo#|RjvrD(lFfPx=>4QgGk<*!u zOFaa5_P3kL{bof}G|8m?)4E5BA!n5Ly2K*j-|w!u6dhGJMb1HsK3#`^RxlA+C1rX4 zI3IOzU2^0n-lb9s!)_JQ?k>tN_ska5qd?YnYiG9OIUdypE!E@FeY2_7yIn83Qi@$K zNkH2NUOa=GpISxHsS8l6i8dr7jyqDW02|on56m*B$#bA|rL#pk5|bvvV!PHuGxZ{# z$=hX{w(S8v1k@X+<^#h{iqqtw{6sST9gx*2yX>$unQr%u)5kLP-bvVd3D<;2ro>+@ zF4m~FzrQyUR-1n!o&}wq6S?8;p2#l9w1OzLT@}#)@NpziLrx2LbhD!6`#8~@-&bd{rCY~1oQ!1g1UZ{%>lIG zg1^eK0Gf8G-{dXV=C00L3Keo@<=eeA3z4%aEEUtErNLLSr908S@MlD#6D_)aBGCx2 z;rbXZS(`?Z*_Bdy~+rj5}%_>5;BUCT~<7gg3M zy73fN-WReokl{Y(-U%0cM!N7)cWXZu-h204GR&6MN8Bw;?6FK>gLp{@%yoADIZj?ap=srhx9=M+Fk z74r8?B0`velBJW8AyjTEyX*{MeV4;{vQ_*;on7pi95Oc4x&DKLx)8qc`*|2sgfW9! z=Txlr%7y>inUT?158I`51EMQBFz2p6KbA;gjgG znBfyGt%{LAqNiJifN^T&XU`QY>x(e}wTw0U{fttNa&?_FZ@h6_WC$8(OS*`sAsdHn zUi3xmKGwNd{jz5Q{IjHA6(7Ppe>3x*=;@geKGCBx!^b#R^O7mWIm;U>vx?Iu%wO}k z=BBn8)5_uK+BbV$-=i~_H3}MgQ9sv81axhXCcSNRKc5lXjZVPwfY%#HPzjc@dnCF9 z@qH!RO0Z;4nMV9eQhL8MHe-L(+_rsh^&`_u8NtG}VdaxLC7tulV438zS}dviNw!if z$M;xCb?{oBYGT=%hgi}!0>;S!DcK@)=gY8c4V5rI#{U@ zSBi|%wk%oq%_xJ*Iy;#^^zbtt3@hub(W#C5od3CZOuSDCUs+I|Z0?XPwHRL9v$nC>GkzaqWXI38U%FMG zHyk4(9ft=t3C=QUXzgP}WG&Y&N-P50UEf{784(ujGMic9n(fE-|LLf%Z>&vaIeave zanDj@%$Q`{SbVIJ{3|I{ku22mm&O&*)DDtT!Hc!4ta>=*W+;MMp%j`-?ytz8moSsh z3Hl#0sY%cjE=svij7&EBguyd?R$2WC8ec0{aP{Ar%}g-GyJ@j)``@%#2?iJSDBVxe zRU*LyQlJt|zoE3Mgf0@oyzO}VO$b@QJZ0>-p%#_->B#t)T<$ZjQUr_J4| zY_z#|)V^)A1XaPeVlJswg+84~a@IbtMRS-kN7)X8ne=BwwC=C%G>lULU_nm4MRZhx%?gH59S+!$=`NkGm2 zp-%_f&A#nx5A>CdHCcL=phILJza@5*YLzq1ggYc^RR>+>R!vlA49&(ETP#6`K2htV zzGBq%|Inc$^f2wVbdEru*3sJgbMjF43AJ%!%6$s2PXd}ZGdQO&&YFdL8Qxfn=}MaW z8P>zO!S;+nvbZ*N$V0qe)~10ZcqZ6*v^aYm=Sc0J%j-B}TopphMa#e(Cx?A;H``+h zZVegc2q2B>I1N$}#OR_-s!M%G;H&lRo3&z#oFfWw8-{3% z-m^mvA;6LU6_smAaysy&&39(?t>|83;v>2wsyhGMRD_=8_HJ~#>Ca92M#tx;V~A9$ zNA2egGrxIlTN`;KYD(BPBv^}T`!JKz*@0%_>!0WiJ>x;`B#Y!mJ=VmDE)oohM3*jx zkGaS&f0yzYp&Kc_yLm<&L ziQ!|eJj~?G+j=<+_e9q!h7aqfS`8R&5?vA+65LQ~Kz3?`+l%F|uk-LrnGW~>YSY-{3N4OxmXsssV^NI%$IhWObcrA%Y)b2TV$es5hCS92DYxW>*L=7^Lj8w;7lMlqf5gZ|?v zW<0=P3}k+`KV%BSp>C1ZHld^VSCnfy_yt##zneH~x%yNz2eB$^->vcAQqj|BX~u$n zn6zn%*2W>A8`d=?X7#S`_iLiZdS{J?3~`y>_d$smqOu_t_>J2)CJuz7if;qUv2&A7##C3EaA>-J>N8ZQ_0i4<){Bd&@>W|HbrJ3RHs zei5vV-I<8Ae74U&@WW5{md%Uz86v})QSd$$Wll3^!4&RApEc=|f8$P(cLN<%|9P9} zNi}l;@(NRzg;fu>mLc8&eT}{vqC2*v)y*+{Sa4vr!x@w0jMtoX&u^V{ZqD2>M|L!K zhJ-b!Wma>@uA?{aEsA5sSCHcg{p(=)`^WQxluc?`n$65@z;(8w~7J;z65#}O0YDnwlhwgeV(Gxw+1IfdA zd~|FZ;yKa3^Q{&%+EC*aIr_Wu$1NDYjZxXpyu0j^M4v~}@z~nC8+T7nif3!5PnKdH zJ=U8yhyPmOd2{TpW0O#fbtl%iRcqd0{U2}FO7>RV(>8AdO37Bv;_jJ3ZwZ>Wnb^s# zYDEqc-l{h5=NdWsx0}^W)&Fv{T79@#-MTf?o;hC`xp4j4J>f8qlCrKf%1CgxvbtPv z{n0&aANiz>GfT=F`sGCa4tA2RZJgz!f309nJD;80^m^o=pTk~ronC9A$HhuFItQnU zVmQ&+WY?2C-_CfRJSq)8a8AdyXPbIH`R7zQ*Rfyk$iDqLdYq_O_RCjQTO3?v`F}fM z>+2(R>#dCATBo(!{YOTn>rnm5A+B{$UeT?^#7M&#v z7}I9RL>&jt&X^A9h|>47smx>277pz_+D_9_3PBDZ;!$qBKt=c?%1<^uMQpis)Jt3 zwX8|AOZjn0vr3kW&b$(H!P!D4Ty!3YoARSG)GjdxovGrg9B@`k9vAzKGdfMG4pE&u z_Um7`Q^)@8Wy#2-{<7(Uvu<4VADmTeaYrsY&$pJN-#fEOj*Urk#LX^|H2G&Kd&ea; o+QpzaS-mxBu8hy#Zg2X_yYG*$w%iZLC6!Vm9eE`AwxnzS9~+s99smFU delta 78486 zcmeFadz_Bd;{SbLGj7c#6C*-}gpd%@j3#qOIgC@LQdFujh8boIbBL*}85J3&ny$3y ztWr9tEeVxMrHoQdM=G^-q_f=?+NtOLxz@ThyRW^U@BY1h&+GO4;eNU2vp(;&u617P zI?O${w|+eG@vn}&v0LXxi$8Ai_2NsWKmB$0mam!2*}I4B>3-7fpO!UGJLj^D(JyQ~ zc18V&UPG$SYm;zK-|891_)jH9A`K&v$|I^5H*dM5UL?{Ob2FNPP9?HA+O@j2P1ZJ! zJq)uM_0hi^5{WcG3-b$eyN(|hDa1FzpO$3PWtWss$(fvAx`Fh{w6v^v%J`z`ybJTE z6h$J(2K~I|oefze6MGrj3hjuhKD|(tQd(S8HZd;}IW?H&HTO0Li@el|Q#qii(xT1K zMraFk4+pBcle0^6XVA=_sgCma7Op&NFsk7zROblvvg!GyIpkeRTUEp4CN`f4sydB^ zAB~PYJQ6tu-HZ}dQQ3w-55O5DP({{~u?pycC#rG{xv8sK(0{a}@;FLU#m4iaE&2nh z3S^+ip{*QOB6Y%tM0P+=I5HC9U*)0v&~Z=0kBL-7D*t*^B*MSS&Qwo5Fdx$nEk#?R zACij>d=6C$?sa+{N*R@9PDi8ss|?7v6WRh*#m7*Z_yAN5{E+6Whp#`D9<3lSK6hN% zM7p@JgdeC%iK1Oa`hz6I2ENbW9}DMxN%W0oS4`ASb7&Y)UD;MUUyY zd+8SC*PvtA<(1{AdNL2KpqDG-WBX4MYD|A{2jrF%h83Dr!aO6ClRf3qY${oLyv<+} zsIP~S&1;NmI9kOuI}TVs_a?Rd)woso!?|6X!qeW%&)Ly7_8U`D`Fw`ZTIm|JC{5 zn7vAOujAQMi;Ko(M~;ttPlT3&IoVe47gXc%tTo$c;%!YP(M;kv3NshW^#dP&%mk^M)JUc9vUBua@yrj|`9&nX(8OLfAb-ATNL z_}No!eogz@PI=vx^8%{5@-y6LU#T3NVOx6&s>*-u4wza}5SF|u$gkhMv*EB~h`kh5 zll%3v)y~FiO^rroiBHOSU<(m2SC|j#e=i2ly z<27Ed;nn{apJ(|{RO8bR)i`uP)!$E%UilX1X6KZ4&95l=D?kYxD7hW410SP64eULr z3drw~HYImPX=L2_Hoox%mfuLc`n?!cKJUXd$ERkO=Jli>Cc1o{ztC22N?BpolA_|$ z)?;=2Ns-EjiDA3amhnMsxCVbBptfBK0TR6V0AUHeF6hN!RIlM`xG5L4*n# zUSJO#S5#CmF1sYRQ*KavXt#>53T=gtqTTA(CaChC+I3o4Zdq>R1iD4O$Y~o?>Eh?3 zkEhsr*XKAj{GSY_Cd8zYuI1T<1(A41{pk27sAlTxR{K{LP0gK>O(UN#wj;9wRRM87 zf!$LF+yj^1f~ugx@x9e=kzPT+hHWb5Le)Kc%53p7@LC{Mm)fHyx}&FagsPWKK)J-d za%zN6V>eK5bw}0g_NdaAdbV~G3bIR;_Eb)xieGD+kx{TxlP64I5kPRXOAn$y>YHh{iw?YUQ(JnwPcC& zH=^n)crh26krH?OBo|*;RG6zvnXz~c+z3=%d5TNd#rapR4clCyjPeV!C+2!YsM)Pi z9r*cm_P_>?|4cJf(Qlmo&1tvmZ705iSH)gLH8T#s!H)S&cy&`5+6uh@Rc8-F8%55J zRQ4v&95AV*XiCq*X_1E2w!hoMmC@~Z^?6x=x7E5gsBYA}qVz7?h-_46$l<6aMY`i1op0*m6CD5k zPMdBsdIafTa()@A&NGf*jVj-%F8(4^oi@b!iprA+D1)}B5;jDep?mJI1$>Gs{&nY{ zL6yPYz&7J6RKDhZo9;$b!+zfbmS2gg-zK6u1FvxDu0oS_^Kd%>)%ZCw>V#g8YF=sh zdq+6;>4 z(snV$W*20S&%GP334R@_Ia7%?L>H32H97~?&=iwzGxQi#D{T=yd>p#dqdtl_;VGNJ z2SJY}%_}B9W8KtzZ73y?!pmqRufA*Tz?A0YPRXB89_i`)`J`6|$L}p3e%7YrzB+$u zc1cORIe)&_xQBGwU^1V#{dp=%b0$@`S!W~u-9W_S|9raoesMJwD?zT>pU>9bD% z^0lq#4_~v(so+lm%J_X$2NpTs>|0y0@?cu?K@}z6yMaL)kgA+m6@Lj{8o%b8S~iuL z6^Xq6gH2yrI8}CkKX0MGmHrh}Q@7nto9r?6mp>2#X~@z+U4Y4i$g#jma6*HH0mtN2A;$GhwVj9<*fFXG}CYwjX#|FxLA2J2rLzgmo66~?df;umr8 ztIGIQWc;cze)acE(Br5!J??Ahg?rxuR1+`$nCMKrCeji0Y^zIh$_g~n1A|#dwW&CT zSk0370Y}IHs&#{s>1tt)t!}*qgMU9Qcfdx^;yw`-0+eA?XA3UZyldoQmQ^Fr;k+Ee5)0eV;s=+ zU~x|_MAq4%iC^-(-o|!b{L-g*!uT9@-VnI@WXCa9<49~eP})gxcDx9Q5W%PuRRLK8b!e-WyDc)Sp9 zR`HX=)H1uB&O6Tb@c1I0#0}ttl2bH=>#*!dKQdO=mvbq>E)n^rqwTtnP<8#As4A9I zkYAQhd^wN25T3vU54CRHIlSs8I)2gr?`B_7u(NgZifPOlWf~vEr-}&Mj zReT4G@1*e!F20l6yHw_9e2e|_ZE8+_*Ze6HiXu5ZST(#F_q8jlIHynO$1)%*hbO=vqbKK;V!#%0$2`8Kk@JMA&ERYH8*VZMhMukB}t z=G*>Wcr*5v(`V2lNPaJ>0_UKrK%w)a&=&X%R2QtrIo||5LRUFS1hmwDr2s8}2l@rS zv^~zR9&D%ZoS^fuZM=3t=CR$ge;8`l%*RgW4zs>J^*xS5Ck?l|z(uIC8icCM`1FiE z?56Sk_DmeVBaYv?H(jyi*x!>@oOZ&9LzsAJv#8nNaRB05WRaZH|dVW`t!`SEMCj)D2!h>oro z)SeZtRoGrwFIZFM?{x|M?3jPW@$tz~oyg?C^(TdAst;iubYaB=$ph2LIGbY%ahKxKh)W1G zVIFlZF{pkrEy)?%VXet8BTQJ!%)St3yH8u<9YJPp%>MwU6`7X1r8 zBS;>S?mtCHb%};YwdGu@?oSA7k&WY@+r)k%sQkSk<;+z73`WEbRLX=@|9+g}>W8^} z>zvKA6WMF!CWd)lfm2T;h3*;WXvev!{x3KUBNK`4JhgYc&uDS?`hvG#@V@|<@8sacG2+knegi=!Mv#%e!p}c+5!)?U5ZnZLqfL^r@Gtj z{spHxux(OoPf8iMKW>I|wzD<_nZ+^x7_zk!Kr{LhoT?cX?my(>SepY=6MpX-3@OQo zj-sml!%j1V+=5~c!zdrl;B4|o1(~HWFEgkrjrrHa?4+_2eGSgmPp8nGICat?;pv9s z6-9AbsovP2rYz=H!}4Hu73{@nwCymR)z5Z{ox3;S&LYlsLqjHv)(cze;MC~(xKo4V zOVa%p3E9Q1aoFozc#iSApB^7%&XN9PoEl^=V;&BwX2ksMFy(4b^2gG6bv$KEO!d#g zxsGRhnHN-*bG-o5dZJUZQvL676i8z!n+vDR%VJ(dP<2_%e|&)Lc2+-UT?R}IVF#s( z({UqhA)JF>392sVWDc`aM17buC_W5I{{T)?i|ZWXI?`L`*rPcmULMr^CFZ{h8x5l{ zM!a2SJcah!=|R=Zn7@6PE8`Db?F6J z<5T@5I89``tL)w%mqK0xm?v5oj1sQexv+bpAK^|8lCMwq&%D^SxPfYuaNWhh#+x$y z8X#wU4ai?o{oOcQMa`=&m)K8zGCmh_v4G=zs`Ys-XDnP;q{-q+Ckba8T3w#%_sg!^ zjnz0Rz$yL}Lg6t%?S*Oe#*ulDoRjXS5mFV%n4#9E2i=1DCQLLe7(U@tZ(K0smW+h1 zIl;VJGW>}-b|jhpERBb7ngI0Eu+)SvbAllzBca{+U>S@JlAzl6|f5gq;(E zAqz7Sj?W9`EzIx>^X%cYk~TkxJ2C7Jt(Pr04N_7#WxD6vtu!2He)> z;c!1rs6VN~OK$%kIIaO$i(0hZr$!=NCK^5%YE9|yl_n!B0Cy;_}zbDm83^E^x`NJyWIcX6YoLWN) z#(EpB2adK)P4y4EGTu@xjnO!^K6*iY^%`z;+LZ<}t2c-M2 z6VlRS8r+iVH@MoKz-iOCRPW3n^P!k`b5QkAEczia?5YFPqp{iK9L&EZ-8Y2XswquP zsGS|mdpN^yF~@clTgYWBES&3ps`x0bKZVg4HrF38g41fTv+_Nh%ER56>L<^0C4`sYLvc1s=7Tpo$oy-}e+s7NaQ(=s^jF+K99OPf z#`O>U+P%vuZ#UEoLU&pifE1Eg8BEPN88=N znh7>dOy>r<=80JJAv~8ZgVPg!yCE2|GQ%HsWBkCBOH%#oa6`z(j@)~=lW}Y^ER`dx z?HJ?ehKq1^sc@?GmIpOY#{AtdRf2^OKDNnR#e<=n>_P~4@`TrJ3O26F@R9?6b<7_a z*!9ftkzw1L_ph_ERDUWijjWijte91Sza|!a=9Wn0f*^TKdbGLWf-($EC1lyFgw76G z>xF!>E(~2xC_fB+PAEGJ4OkS9y@yb4$PT$R61h0o^vtRC!hnicPsnC+>}`q^n?=YL z`vIZz#QYw&+lj`tGgsXOLFQT}3``RS#`gXzj$0Pa<^%7reZe4c>(AtRHWr<8muB=h%dM;%o?^5g}Vh$i{w7 z$mMcxJY6v%TiTQR*>8mG5ku~aA8{igo&r(E2ZU_7$1aJdn?lIdg^-HGkfMd61FPe-?!|5!RmV_CJ31^T06V9%Bok%i*%vX7;_E)=ml44+L z^krObFn>K4y~|aY>V7{VTUa%_!{+6|yw@^#SYPuR&3)W1=kQrm!q1NfLpEjjqgF7E zw#>6q6K-1(G?F^i6Str%Qln+KDN2>_*_vR;TN%;Qo{B^U1;gJ;*KaG{iuqlhj`v&j zj8u=?tGC13@wa3Co6qdO7MhqEt-qGjaF9GD-5*RyJy1(p2w~AoJar_heA@E{|%TvkhdwX7?I|OD99_o7gI^#GQikO?3j7`Z(J*o!b+h zw{f&|d}?$IuD>pA{o5S37tWdM;>$wk{Ht)PE}JOl{B^kgIA+&B(vn_?&yN(Ysbe@5 zOeQ2OcX3Q$+)C%{gT9@((@1G=`(iKJ0=T&3$?r^@Cam4}*SR?RO!ar%VB+k^pY>9_ zVk+TA=XeH0U3TN#B4ELEUhkZ#?o5~9Y?r8ckK+1=dCL8WvkQcqF2B>uwna3ZJ+m;V z`iP4Z*aYpU(SEOJFW{NFKa8U9{P3ZpPV7|4l+C zhKaK1qR6Xu!Phgj4d@1(UGQr7^*GhS*6A&rEs7J1-~2UO4VuRk&BEE*bE5Tcz$s<; zG|JzCQ!ZSeY)MPnWDm19N|!hnUNU$~0{^p^{~kNR95p@gRiXbevjiZzjUM&!2=-GjYt(`*CiTu!6t+7uWtR+XV?Gs~f*l zz@-J72B!OW6Y53|_EP?JTtDkrvW?#6jE-Xp&<_)Esvf;kKv}qvq0{5RpK)3N>~>tr zWdv0_V*Z2g#J7o*ms6uZ;kX@om3`)2JNV=}CM^jwhyz(xJQaN|sQEe;{RPIg`q$}x z|M&JU7q+TW9KX?{(oYfU%TWv(k8XFr7Yz9}!=L`X?R5JKgO#{pB;mS(mNefS-}G?_ zcWw@be3ubjvqe`&!|zP@8-5T!R99WeAKEdp_bI32dWIQsMe3CWRo`=U4%66EKBK)0 zr`bgt{u2K7@rRf{<0CtKY!J-Gqqf?oE)jK4bPkS7fMHzT5Mna%^e5@#_&uU#{mD4x zXs75LoI0N0ZnF5lz>UD!0qggPt+`zTt8p4WR`uJde&XNaQ$>$cdf@C-(LSZN9`1x-)92~_IzlR(hTol5@6&J+fd>)F0CVHY z#G8%lj%yg`@;UW0JHqy!^m?2oioNxI52x(xBggii+g2xrQ#1)TButsY!DD^~cdoH>M?hX`}4(?K6VR-LYr|%=Y$LLU!`1N0Yy@J!5ye zOq?dYy|cL+r~2BF*pAb<+sB~CZjWzunlShb={{po0sZ#%#U{Nv*P%D-U!@?unxM~xE*K5hLj2K{b=TSUWVUor#;@@ z<`v+SCcKS~uEU)c*3A36_8@-q7#&0?Eo^TEp+Tgjtj|*YH*t2I>3M6zpRBX(&BdjM zHRgmIy&K0PBc2R@O=z6u{7&y5wv+8sgd1?-5Sr>f3+cZd`15e&#hn5cHvarNdC3*ag2xw;@JGKAIA4#}yIl*N5B)M>} z-OOn(gL6MFoWJ2&=I=P$E}dn%{bDB;D~HU+O!1OtenQ!s}|wZRpI%^--Oe7g40AdjSLIc1Re5Qe0$TBycyRsJcg4@!ppyz zjfau%9_R_saPfKFOlBjxYxeJUE|Wcj`!>!dQPIES?7*iiOZ9t2yl{Om!0bo`xNh;x zqPOCh1|!lFUXPdtDPD%x+GM7X{oQ<`*w#e#dJCteOh=ZdMw1de7U=K`()}|Csf{el z>r?%|;M6j^^}XgC^NnkPqm6&K7fYO&=moD1>+3&~=(#~t?Z3vk!l?bx^{iuLSUDqb zCz2@}BTrect!L&nrAuCbYCRo86j$$~U=0ncs z0@)b2XiG_^urP^s#q)5za7jV+!n7oe)*Yk9X8IcL6r6o{SN|{uCfqa92#GV&tiWQN zh6wjHD-fp%#(LbH8hsOYvYDUcrF->FW@|4N9o)#%)dj!IyOoe~2^U@T9UQlNoXL)8 zY^!I_g_q-8jqXiL!ko_`;iU4b_-w76mU@;@gR^r&TU4}3SXr`5C8P@R9GhR5tip{9 z8*#|twit@1sl#zvLAKCxoK_IWaK>DQQ#p2Ge}+2)XP09~Q*|sIJh7>r(Dv{pI90{& zoj>4ofOWl^+05+SWj;!N#HKJAA52r5Yrj$FXdvPPe z$7nkVsY~s7vI`&c9=zX`;Ovg4jpsd_Te`Tz_<*-^=XRL&I18uqq|N0GoCbjP$o*AH zOE270?Rh*4r(A5)s$87CN~pp8QI|wMg04OcPcwcW+#knH!wuVC-rG1GXY)+vBkC%h zQQ{*P=bOyV^!6g4_Iz8xFL7#^{oKk4M~4H?lbDMLsTsC9kKn?2WUBd8VG_=s-KzP7 zA0KB|iS|5^htt?I+83lIENf-vbzxcUhN_3S!e&NvZf$#(>w@djlJ*;IzH9z)Oe>Gi zohvgIS81w$4o(Gy>&d?!r#je9{}ktTPlli;A83ye$FEn(Wg5<{Fm5y!<21YMukn6z z&OXWL-qtR3`{9wUQajs) z;WfNB+SHuLSOh@ZlbTj<}+p_^rV}+bg-Kd zl-;Yu)byl?CxGfU&kh~Vbp{>P`nY(3nz`rW6lW)f!Kpx2(98Vx6W1HpIM7+~u#UF* zq+|~~AEyf{_LT`~Nf>R2jl;$8E6(14FuKu!otSI715dcFliApt8gGKwrK5%UJ5Cj# zkNM?e_s(AUnL~R^u>fZ`75X?@BNsj~+($?`aQVyp>3+N&2|FuFaDOb6hjH4?fv;W@yYU}{cbq;7H0R&WDO{ckgC(9MpNjT?wlqq$n9TW8?7p60&e zSwiV>`f8y-a)`rlhm)(Z!BDe-;w*YniI@}+_yVu3pM`*qHV+Zw~{j5tF zP7BNauDu3#CXObuA++cn-&>XE2%M%xSYE=Ny-kDDxipx6T=OH^9+i=}+ zjQ8@?=rR1U#o+Kfnny?rjcte#yalK6q__Dk#XC5SHD`k2RKI?P7s(29(57}CZdB+v zF8T;=xGt0v_GXxQL)k3&FkECj@ys@Uzxo4C8%;`h`F!kYUPMoAnuRxSIXIOPu9k!a zruoDKw_icWQ8J&USF1j?`|BjH>NBE%JOEvykEYiCNGwfkT8jde<8V+_P zT>DIWreu@hH$yqbKii8Xyp(B%oK1h7HpGiu5FSWm!fiv$MrgvvLrjBnh;1{}3-8!D z9kX>$z^S(3r*;z-4>cP}<3)@=iml}glZhlO8fNB=;>fMTyvU@m3fhQg3}<@T3Nb&P z!Pyg{X3MX*bHf_w;`po)@z&}0X4m2DX}0>K)P&bYm?7s;Vv?ykkL_*9nRJPne;(W0 zmBt_KrC0Qggtw)UdMFbm;_+sK2$hpp_>v~_rnkb^fmFxwpdymMJCnCOXSnbs)$zlv z4ON~E<@himb3J@Xb>Lan{;Qg`qrK)8s^@vU58=(k4C_4(Rk`DN>+oFOdi^)5{0ew0 z|Eau{&oth8)zOMSga6@GkrLk0GTzGQQr>#iRdql)?pTk?nU&C7! zyHeT^VUnM3>NTKfp_p${9U|t{N22xyqEBnzu)+x?N<#~@>WJ`oqrzH>v`TvxQ=&Y-nG0{@VmVAlFDy!y4C46RQZ1H{B~4B!h^T4 zqB|Y0*hQcrZ;w(^&?YGVB8TgTQw6y_3SV_qE#_V(EQtGvup+ITav2-G>Z%&ZWmK4s zs}3Dk5#h`qzWy6MiX+$v!h%_zp=P;!qzazpyi~!nov*9D#{WVhG$!L*g1V{}j)yBM z*TqW}oZ!4v!HG`uoaUp-w@8sr6`aZs!THyEsj~!kw z0VOa_7r6vd1#fk}u6kzeSg*PHXsnl%sfDqaG|J;%m-c_7N_wA5FZE3FMP8c<^~q9) zr3yaC4;8b_@&BYs@~BHEResBzmn!(U^L16#e?o(z7lHk%f>+~};3=oiph~jV`R8PK zNwp)rR~`iuzJNoGRmQ zUHlJ@OT~9NUssj??g|&N`#-2E_N$ARD!9-2|D-D5cb85Y1_kv(FAxP=FGjQ_(?vD!76lYMH*JQ~nu#XqLYq<5U%R1+V<{EuVTxLi~lQ&7V{a%Y`eS2`*l$V7~MDi42Gm zOae$Jy9g;ag^@d)mn!~F=cStb_c$-rNIl@ZR8!_rrz=rSnWvquL-ksx6Os;m8C8wn za0%W;wa;vI@t->V3RTA6JO2}^<9##lX+;MDI-rXK zCpzto>YB12s+Uw53`CWF7^;Ftp{me@sCsBDs)BPJFFJ7B{9kGS0s z|C_Dom<2M&C)p5V0;$D>U8xd%%T>p*gqV@($@KQsLJf<`0=QY>w@aFUn^Y12~NAZ1a(yz^>AFO0(&`MS5<+N9hb`YK{a_} zs48%p(|#!bA{7H27$nC_sse|i%3y@!|Bb4GBi(UQ`Lmp!N->f-he9MjZUjk zy`+l2+3^LAOT`yDe;`%GZgug}L|tp%K|lrFsR*-d690ij#H^g;bqueT?{%^FDVCR1 z+1&5ERDLO{fqTSpsraMLOBMe(s?t_C{vXtyoL9PpQXTlD(^XDayZ8gC(ywvxQu(J) zq30Zzia(F4!Y?@gV$?J!@ETNzt#|BYRImS}I*GjQ(!b&KO;q{3<&Lkb;_oP_)zFFVIH%|S8T^Y^L3!=~ zsfMu-Rm-QLiZ6EYb=5O7r+Cf72Ww?6`asH)5R!G)l)l2{rtwg&mF}>AQ4=ll+L)t@ zc($z{;o7;%C6uatvz?bJqdCq?<*#vGs^(nh{DD;I=eu~Rf`Rj>J-Z7i;mt0huBx9F zI{u$jN$zy%q-x0BsPbLx`2SQUaDvI5%1)}O{E$m0)$xy@YT$C`SGaho>b1)G|BWjB z(=NSK`LA^ztq3dntOHUV@PhMFRd7A32EB}`Vz0V*srYM7H=$Yy?>PT1s@K0)I}l+4 zW%M2gsDKaMfpt~!AHj9;za3S@zH#ZKx{^##>Hm#Z{I>`EUrt|fpc*IBf<_dip*Y+X zcpz1eGfC#rNOIbK(Fm(tg9 zsq)D{6U~AWmi;3oZp9C9aRX81HQ4z~R4=KH9pZGT<5G3TnU0Tiysj$#9LJ@q(^%)x zNmvV1z(p?NK&tqQUA$Bk&30a@_;Jol<#U{lM}0H1l!wH(mwJuE)jiFn{okmj^K{Z{ z1TS+s(;a^x)mpidc-8T0RCSr{bj~5HZbi&>pbFJXs)X~LKalFU>xh?L@6y#(_4Z9J zpTMPC;L=Hz|1F2O)qRTt#wA?j{H>^7byYoZH(XH1csyWchd8z!#&Pz4cr=Y6HAjkhpt@w`zs>Xv|#!~rA=j*EY5V#tU z<>C*dN_Upa=jQwMreoNrB7C9vpM+52zAK8Pe#~h6$d{Vp+%t? zaqyE72R|7RIrzzlceET1elmhjNN_)?ec|9IBMyEt;y|B>U}}V)n9y^WgP)A}Z$7}F zp8H>#6Q1BE;pzhoX8ucc4#1D4qDhPe#xY z2l_;WI!OHBCnM+xePV)t2R|8c@RJb<`i&8<|5guvGD2JO!B0le3Hs!Oc9ZF-u8=Qt zdZ1545FHWMMfH`a9<3kzWW@eYM9?F8J$&$!5eGjR(d6JKBmUDzDYX6m|L>C#9kc=d zzkV`e)!y5^m&Tdi^%90INJ>aH^9guu&6auz=Pg)`?;FLpGgZ-q^Gu!x_)egM=~Exj zA^~tmeLzRELtulz@IwHd&7wm9GZF#&1iF~aBtWNnfMrR5u4b>mW`VIjpu2g{2h5EE zk{bYem@y3iY4rhX1$r6(P{4M9!b1VQ%^HD)hX7hN1f-h$hJe9p$|iw6re!i5~Fzkqo#a1u)3$5ZE9vya^!FENTLnaTs8qz)+KUIG|G_z_P;u!_8iS%>rYa0?srK zHU-RW3`lMU$TDM^0n$5-_?MV5`8zrt^`2q$2?Hj|60!Edr|r`W^+y;hO^i zdCdXe3FMkSM*~{40NimjV4~R}ut8vWD?t7NjrEL{3CRlzv6C09!FD>5=vE%S&@5^N z*etM5>5KSAK)~Fi0LxkfrkTA0X-5ObwgHrwd=Ic)V3YDFGc6MU3tItZCIF_J8iB#B z0bLUT<)%Clut#94z~$z_Hh`sV0LjMyW|}d_07f4JSSwIr{I-Ckwt&L6fLUgZz-ob3 z#{#Z0`NsnCjs*eYD&R(sRLks z2f%!@MPRc)-{Sx`n5yFdbB_ajCs1wrbOfYz1l-XP5SSeT+XaSq0xU3#Isq1T0_+no zd_x{!aA&}>&VWT`ufU$p35S`Bj!zik-DVy<94tUU%cL&Vq z4%jO2u<3jvpwo$f`6mLFnJogF1^V^?JZ7qT0Os}pd?&En^yvvm>j}7{Ct!uyA+TLw zcrU<8v#1wfVK2ZwfmJ5+B*5U40LxAStTB59_6Usa4S3o-*c-64Hz4_Bz*;lrWWeZ? z0c!=GGkz){DHTwd3Rq{>2&@)pl?HgxWc`DIG9B9k9u40eG*Q9;YI2m@34~J(cM1P9?h5^yv$K%T!C= zHajHmn0^_^yJnH(J+o8tzTrQQKF`~17E896y@=TpBcrjWkev>kx# zFy)f3O|9e`(|I8Bt(hhH&TNrp0)5W{G%{6Z0p^|s_)Z|j^f?=lb~fOSvjK;j9Rk}0 zhMxmyW)__TSa=R#pFnfIunI7E6kypXKufb%V2{Aqa{))02hRm8Jr|IC9-x&Oa~@#y zd4RP7ZTQ|PK+05c{4wh9b2ohJf1O$5xJ2pDd*2y7PUn+G`4ROJEY<^jGF$TEHM z0crVwJMsZ%n;in%1%^)oj53QR0TxaI>=QW8WKISQo(xzv88F7|71$#%wg7N}d9VPm zv;dH-^Z8garVuc?5U^I@V&hK%BuxPnP61?_H3F*zS``6uOnwm{uL!V7AlI~<3TQDE zFmozkqNx$sAkcLhAm5Zv1I(BP*eWpDbS?&TDhA9i1{9ht0-FW;mH>)ORS95j3E(?{ zX{JvpAgvT|M=7Ah>=4*4FuV*g*ds7@I^c5i z;B>&!>44-JfSG2@48Z6afVBb@#xDmXl>-XP0kg~+fz<-7E(2U;@-GAAT?W`BFx#}e z9MIx&z|6}5*O(fC4FX;N0;n?Oe*w(+3t+3jwWjk-K&P32`7;6Y%@%>p0)4Ll++eD% z0L;At@SQ-l=~Dqns{q_l0SL?vf$ajrD*+44qDsKRO29q=V=`v}2G0U4n*~^8vaST| z5m?vtP{wa z4d^fju*6K8186Y^@UFlErtLL=4Fa>T0X%4G1!i0W=sg$ku$eU%&}lB<3xQ>(M-^bR zz=A5kV`iJc+$z9;d4T1ndLAHc9$=Ti3e)dez;=Np*8*0WodOH51&q25u*xjH4lwvS zKy*G}jmer1*dwq~;A!Js4_G=Mkb6B~tywNG`g%aK8vxIl>>B_{HvrZPtTQP$0#*x@ z-3WNmtP{w)5zwI;u-;6o2DGRKyesgEX?qi3gTU;Y05zsoV8%^=-T~lMGb;de3IJaS zY%)D=25c5sa5LZyvrS;`&42+50JWxi0U&JwV3)w#rr#}q?E*_~0laH=3M{+@Fv^lHEOs&9-I{>}!1bky= z-3jP)C*TW#?@W)o0GkCC+y(f-Y!jG!7hu5MfSsoLZa~`IfL#JVnSP4_+Xa>^2JA9B z1r{y_jJgM~+bq5ZF!&xo^j^STlXWj(kHAWSe;V&Tz|wmGx%UC~ndJhb?*lYjq6?4& zGj544K$bw(L%0A*xt}Df1 zxxnbh0L}giXk)Vf3P}1ZV7)+Fld>GJTA*w>pq*JKkhdJr;c-9*GwpFei^l=)3UoAW zR{%B$%w7TLY-$B&tN`?W0?@_GdIHet3BVTuT}_XbfXxC6Rsy=4Z31&w0tP$@=wYg# z1f)F)*d@@*^jihkF0f=3ptsp6uy7S%)M`MgS-cuBcr_rp2GGZ3tpV&2SSgThyr%$5 z*8p;#0`xV@0bYjjpGIOP8!<^w6TSXvqWhVYXW;!!zT|YXPI886xfU5&uwhF8ixWIVp)z+8P*7bm~X1T!V^?+tC z11>h%F9VWZ2CNszHYu+FRtuE90?0Az1oB=1bl3pMHPbc#T5JHkD=^WttpRKhm|X+N zH?;yYY5=`A0w$YT8v&g*0=^I^G(BDgY!+DXDxk=06PWudV8CmDX{P!$K-z17T>>Sh z-zLCzfhC&&WoD( z7a08}pjj=T!erM1l4=3#1!kF)w*adJ%H9H8W!4Ggy#?s-Hej}y_BNo!+kkfkt}$)j z0c;SM{SKhY)C$aa2hjUnz_n)9yMRvb0=^KKZ+g53*etN%J-`iSo50-n00Z6!RGaGe z0cr08b_oQg-)6vefhC&(3(QV|g_{ARwg8M-yah0L3n2OdV3EoC0I)}3rNC{*`w+17 z13>PFfIG}`fzclVntcSg%Vd89Ncsq{USP3F*$P-KP_`9tuURLMw-wOgW55zK?PEZT zj{)xrJYd>>0@xrh`xC%}rdD9aCxG6613YYI{SDCRZ-6fZmYE*g0GkCCYy&)Iwh7GL z1{m-uV7aOO6p;2QV3)uO)9*9Dc7Y|I0aluw0t-I_jQSj~$}IjIF!*yo^b5cmll28) zkHAWSr;Ya|VCfft+%ExZ&2oX!Ujmwa1$fS6e+5YT3b0;aok`gaSS?Vt9q^)ACy=)t z&|wE)y_vQH&|(MRU4d6j+phr|1ZIB?s4=wyGrk7&{s!=>ne`2z(>H)G1U8u--vTxZ zEch1ihS?@C_glb#?*O%?`a3|{cYs|2Z<~JK1GWn+`5y4D*(tE_d%&n40PmZ{KL7^* z0Eqqw*kZDN1nd!5De$53b^@0E2*}+D*lLywjNS=o_IJQ1Cj0Mzq`w2!3v4qfKLJ(? zl>G$w%&Zg0`w7tDAAm2+w0{6v`~&c=z*na2F2DwX*}DKcOs&9-U4Y&{1HLh{eg<^< z8SsU`cc#Z~z-EC3y8%C#Z31(50|x8?>@?MT0BL&wy99nR{q_R33oO|S*kyJCc&L&2 z3$oiRmh3TmC3{WQKapR|gOYz5?^onkGe)w{EJqT4OEeAkC7he^d!iYK@~u5<_7Uru zl;4m9lP^g$>m>C|%iocxnI@@kY9xm&Xq$MRm$aaqpZ*a`OVZG6ktCZQ z3CLllO47(|L(JTSMB15{7+y0Z0@4xzy95q5{pta>3oNMzXl8Z_EUX6@6$Lami=%+S zQ9!glpry&G57;BHQs5}#9RgTdACP+ppp{uJF!~Tcvm`(plbr-eN&>7GXlqh@>UXTk zm$WnM5R>N<-Jt=|9n7=_@Z(I4q@!tjDALK4OFElc$?>LhL!^tDB{{)tk#sdZlF4Xu zLo!;BOh(bA zZVZT~0Q#7$6u=&Vl>+I;YXVrB0?2Iw=xde>jBWyGb~qqrvJVF&9S&G8(9fha1*{e* zYYI5stP{v<3h2-bFu+V}258X?@UFlh)Ak6!27%c}05VPO5s5c=PnqoIi4R0O^85IP ztFqcBe&z+01M02%N6W;&)K5P1gz)P*Dlb4An;vH;b~le4lb94u>&jp98d6U*$Fxms z=-s%gYum(cJoe|7?GhhgTC8iAcrA2z`^2jfy-Qcsc1(OaYNobNyjz(!D9kP_EGo^5 z)LyhIqkH0&iP6$R{vvu+_W6m2dgjeu6!7`fNaQdTaCTUm8@ndn?dkZDp-t)Y#OLHUR%(Q`Y$?dE54NSb; zi{82^5;=4g^*A&^$)Ypg;Vb^kwEl^|uext&;@m?LN()W5vlDwppW-XGeMJ;0UD*3a zoV}Z`5pfNi{XzK61eLe$Hr?7Lwoaa&m!FeI^}6h#0F&J>akqadN%-^X%CM0SCttQN z{5HYLYta^eRCHX~l=AE;eBZ1Yer{q8S7OW0O+4HUbdR=)NBP;~$4$;Jb~Mx9ZIHiM3%~=A#@cbADGZ(ooI4!Ay-M)?<&GIi7hR4ulCCI5B@>F71n)ziN1H zVizyrvO6?89}VlCe0I#!*CA9+MjNS~4=~#%B_8AD8E-PPu;XAa(%9BN%>R#>cHXLE z3KNff>(K3e^-)YdyfUnwS)3C++&mSlcf9$OAI**qBltojeiy9YQtMa3txen0>K$Hj zp8K2lDwvMf$6!V~Hs2-JM`_M??0T3E*XKemP-0%yE}fEG02!(ITkpkztNv! zSw-Y#2lYA8KCX}jj_I3w;@>BBi(|>KuU&@5F?~JQKF1b1ra$=q&9Ox=ttov+U4mM} z>$Wf*f199hzD#oPc9*aTtN~1&rEf^p!OWJ(p)TFsjx~ifbWGozs&vhGudqWAxeum1 z^tFItw;@X$)7K9T2*0<0!2J&D8!_}pD%kKVSk-`*yq|IzK0<^FIg+=&HdrrxHLHU9 z9{Oy@{_676$RFz1a>rW1zE-VxRXpxsYfO!UD;#SBYvSVbx@*})$jOoQo* zv$P6!!qkO*c)t(RDoCOksto_>N~E!y{rZ|-F@0Hn6UQ1l#(zQ-Y3A5rFjb~6@8(-u#C};p=F}2IKlW*2=L=*Z{{` zJ2ph~Zx=?dHVzIY{Ig@npsLI;-n$(;)}woq?{o*dyM&`*sj#-FzRFp>Fot)9OV`t-J0Etu+bep()Q}5! zp8(SuKiM67A>pyEA!+W|v1F zB=x^aXn9WLz16Wx+_8DE=U{qe!_%VfUQm{ z;C(wxYj=WUg@g@EOLwAUQwZMxi=lar6%p>{^2>+uPyZ7S|Ko~-g)ZSVB6`C5qf;C! zCVa9>SL9d;>~A#b40Nhvy1M#O&E%!8=T@c5cDf&|0V-%_WFLep0W6uX_ zxs*9JgYa(NdR^*RIpMd$gCmjYj$H<8&&1R!m;uu;UC#SzH(4)-so=lx*0(rm($3Ve z8vmKRdpLN7%kTK9q-r`PMoFauHrF_rX-Ae+bec$(wFFzh; zcIM2?8JFF=Yq<@^z6iJGm_u;WgonTq%WatDHWat0F*xeq2+MI8_CdI53mt`)O}642jr{=C*+$EMisd*4M}6O< zw$Q1T+gR-VaMQ-4Z%J2W#zBAF^l!T5HXi!`%Wa0`HUYPRR#>szCgRq?lACF`QKpk{ z)HkGRBb#LjPR3r+5}a+hO~I{)YQWzd%WW$5UY6Tj+_c`OK^xq(ku9*~renW=o3@>W zmfH;MxpXws#O&in_%WW3+ky^g|EyYdics7i-+?HE%b8s7D zh5goYn~U36OD<{!j;i%MSV27v#$06y&d2_p<+j>#TY#H3LTymyi|5G^c72K9Fid?1 zJ>DLRpgC?MFxOgci?QpQLACzZnQl=YOSsTC!D_v($5C^#6b9j@^{Pv0jc^&b-#fd} za$AmDDdhBT6K>`~hP)KQn$n#f9sRttaIEw@$J zJK(1Ey2El?ja}b-u63x3O%1XJv}Ckuc3E!UV;_&47Wr<=Z7uc*mfIe8-(H8Kz7uj0 z=3WG`8lUAlOir}e4q1VJz^;jCsU5c5Hej!7mBJC+v;;RoJ=~UK9<$^&VR!$M@3^`pAX;>Jcd8u2|R^oa=fO)74;VD-=Mov-G#mf{Yi@MJ`;cgoRA1~ z^O+cQbE%ujX&;`0eH|P#MpcnLkzR(Z)!vGivgFrVj(J&Z> zB#@W29Cf2~+dUg9qhA%M3i=6}jj#zeLkw(zt)L$@X$@^azlE~^7K3hdn?f_t4Q_L2 z0j;1lw1xIi3Q9v5l(948e}$zSl!ppX5eh>QC<^*1qEHA!vD-A-g4h$m7myf|fbO1? zLkg+%H5F^aYKJ?B--Uaio9XND6C8v?pnK`8p!?^Xpu>^QAUfLUDnZxAwV@*DdP?6> z*bRC>Ptg6e?w|X^02m1T{36Pu6PC`<1tLK=)VhDx{c;Cr1`*I4>Ow^-yAo7}*)Rv@ zg1&hEARLBca2&S5cGwB}68w#zdv1LL|58|{Uzu2dWg#pA-F$b0?$85zg8l?T-#}Rm zibDw~38kPkgh3hj3d%w`$O*YXUyZ4+%hY#&rUh^Cfehda86EuhMyiE7@+Nm;O$dqL z3%OUvkt<5yS^5wj!DILXo`7!YpTTqZ6JEf#^s*K39o*vjH_&aqzQVN{6 zt6qZ3a1~C&Pp}oXfxaGgZ6c3O9_z5^-u?&J02^TwY=%Gx0^R28wmvs}33(td?coi&ch%v>VNVN-g!#dEH z_UJc`mSAs;*#w$GGach1u;^yDCFqx^j(~oHs}fWO{Xm$0MXWgJ2DmCzgF;-FAu;_j zSw8p@GC+FJZD)PZ6?!8W0z;u0M1X#`O*hVuY+sQJEGJ+y#K12268OrtM_$MW_Xu7C*1dl#Jql8H9<8Ab=<>HcR07>khd~)A3q_$Aq$XN@8Avb$fPUd*KFkCC zw#6RU3yYyY41htPi_Kop2XsN9i-n`Q2iFyXt_V(mE&@)0PW(F2>%^`T`7dw|&cg)= zrrYTYYIX0OAN-*JWCnc~u)Zg_5sK=bTesK|pdU=D1-0R8r~~14#+15Pegb`QhrSE> z80>=GP!6Wyu0P`J50hXrbcJrv9ds|<6Lk9=0-lf>^!@TD;0&AveP6w9iT8kRgIB}% zy82uTy4{@xvtbU*1AW8)beI9-VG@jj(J&VDQv>=rf?+Ti@`G-3byphzK@bcfpgY(? zP#B6pQOE~Tx%uxjx!*t$eFrOH6)c10umtA8d{_tq%cz1{w2_*i--%lW`c<;~pkF)F zZSZX<3Hp)IaQFu5fo^^EL!~vKrhZ*mx45t1HN1hh@Hb>4upeZGERYr66X-SQ7ZQ2e zz5(|TbT{k|1t0^s;2FUOKs4yWQ&-WtYSvY9N6;j}E$>kYM`mhK^JSQ0~W&@Vme=OQn|UUIn)4#1Ca z7`lLdaYnxcqgy!LvgvkgBCH`J0@GnMG=*kR8A`%(lv@a0iF-QoJ+ODv_S+FULl-Co zVNeD}AlL``LVt*cw$KjJfPNB4-%(!%Y9e0?YQxu12f{)37WF~*4qw42WFs+KK_iH2 z!hcO60vbRSC=0q@(EUM6r~q$BEFq@eS@{Ng3~jhO!$oiC3;jUf+1wJ^LOVEw+z!x> z0(Al1ZDq&Z3$CEZ8Lj`bSoVXSjO$7G6%x?Xu=8*MQbG<`z(jG3x(p!a35YBaB!(oA z6rL0LpU?<7Jz3Sy66S};$oHh$c49_i>bYcL@Po`wFD6?oF312sz(A**tLMm+feh*wWmm%jm<|6>D1XB{cm=;Ar>As!0QU;s z=m+iIV%ZD%p%cZPosumNrJ)pfLOO^BJyp{~G(9vMg1qiQBcVB*qcqNg9+Xu8{c-zB z&_l8%uoMP?o^k0b1N2<0R-)4M1T66s#>xU%srUM^(4@vCG{Iif#{J^E;64RzJw8=r#)H#!(p7h+DI)G$;b-$4&0?P z4*Pf*u5K_CnnO6$g?i8s8bN9J5oLD5WD?LJXAsQ5-U(9`Dvdc3b07?YDCnSrqgG*F z1oMCfa4VpSX=G?=9Fpjvh^4h*@;wUUOyn>TyTfSs?p)V^Z=fD%;_e&|#;&EB&x&hE z1Q!~qo@YctD`*4lpgnW|4WKfOiL4nkgsM;-+zD$5R^qxMsBFPzv|9cO4v|64=`5wW z)H?&kK<^sppj!kJJv<6yhC&DgLjV+n>&O(q)Geq#rXT3OSND*!F=v66;tb5`Fb$@_ zB+#vE5FEvSjLjpb@xuSgT&Qx|_)CH}=tNoqbOE0$rdkU}u`i;sW6uVeAuD8o9FP-= zK@lhng+Mn0+5z*xFfya-+uWdQTV3PowW6DcASeL37zl*X$m>|E8;k|m^+1t>#_Gl{ zqbQDKz$jtOrA|*;m66aAnuFG!j*q%x@q>DxE15c=s~TehmeZXa734+-hl2-{v1ilA zpF27l8XJ^~NL7HZpcIsbG7tvkp&XQjy=bCxZZ|dEcJ)&^4PO;h&K*wU-GhG>%T4X9 zsdRl6Wo$Owk!T|7QOy$6>l&7uW-plgXPpuIpBIC^_Q7dU+&C3ampDB7pB zZw~-X*j*=a6?H3O)TI|fgCW`y)=I0b74()0I^NmQKI&tA<5xEUp+><-7y-JN9F6%C z9E1a~7gTN!?1o*i6K2C?*bc*B8*G6X*sK-w1C~v&2sVIDQ}bahd=CpjU=8T>wGzIA z6)+vXg_*D%mcde33=3c$%!R2i1t!4+7zbm)uCGSZo#A*c5=hsstfW<0mC=5oUYaR& z(=;@m`218qb)N>GhEpBm3!`o`K9?`~nXtPAHIZ3XWdE9wOzv*wLB;v1#~AAH!pD$b^Cu>U`vQt6L81*Y5qT!r`M_Q$BzFuH+PN3 zRqlTq=jPZq@hRahA*~wi8rsA^&1k?n993Ht`qWd~^Bl`x z`<{`A(QGnmkoeNvi2H<(Th^zs{#(%a%51BZdVD=fi)9D?ZY|x}wbG0Fs7rpTgpNJ& zC8%!uK)ajS?b)kf*YWE|(8N9sr*WzK5jbr5C)7u4G|@xguE2y?0>KG? zC*U~r(P7u+yr!4(w5Ow3@{?F^!A-a!MS3w^{)#mL5&eVdz|>>zci8n>kG(b3_i<#@ zJpeaVq9CLKJx0p`dQzY#1{z;V(35nPPmHNYYkI=2C+#b+Tj8QSd~iqsdfc82+{$PG zJ%-l^6pc_#4d|Bf#IAW!B~t@SDdRdF_Oy@&yg<+Ryg|e3(S|R$Ad?(G|8>kOWp$RmGW)J}_K<{a5PPGJD zVd{!e?K%v$vD`agwu44m|LWPEi>}ZGxqD3Ncwy9+MDPO3eb9!b&lh^ZUHBb-gA?#8B*H>HnBa z8T7$OePB{^_A};LI1Y!QIOrL^-qx!NMIjUxBD(;zIzse3-yg@EkQw|S6Ql=K;tNOw z2|yd@JNOG;z*G1G9>XJe2;ag|SOSYcm0gUfiRs^R?91Rj{0g_V{%>Nr0nwDibU!QY_wtv$5< zqcPv(@DJ1l8y*S42@V*JzitK6U{47tAPFRfD5qjllUMKS{UVETePkPACn8r!0*m{}k@WP==#A96!3&^1;*%)Ia=sP&$j-dk%3-+;=GK(0P!JQ9U&;J77C(gY~cu=E5A94YObd41f-x1BUJ<^|=JS z38)RQFJ@2B`Jo4aC2aQZ4U5lwrE(SKh5738F+=RIiHp3R!3f(9HT=ifF|M_qRY9%mqi_V=ReBP;mbO;i3CldK zpL{vR#TmE&+VFmX!q5b?Xx;UB9=q13mWaDP&*7%^so``*a|Kh^c{kuH_UmvB-g8|Y z&XAGYT;GD5P(BJr_X*KnJl#s%!Bgw@XE+P0P#^sN#8f4oV?Ko^kQw(sFtvd_0#)!K z<^y;P&p`dPG?F_Qv_NmE>y!9$Wr(9<)L;0#1W#Nv~!%ut62j@jhM zP)D9hS-H*vIyvXVGzJb5%MJM<2)97cC2a?;b*-j{tA%h=Q=cj*0i~qGFz%_B!5Rj7 zzG_2Umrq(UKo4me>UD@w>2r~ zAL3t-o+u{~2-RI*Y6NzA-uX5r*;mUffkMpV_@u|@#bu8+k5XJ6X!!&b^muBMOrsp$ z{6@FXC`VIU29Z&YVz!)ec9bK?6^O!_30C}8$!{djs33wB@(Z?DGeZ8)Cm*0PFw-qKn6fl-EsqiVbd3gTY zPr7*8ZK3`FhSvoduR)(!LGzwl_G8mAciw2+`1=PNYX_$9=m!%JAYqtduAl-SFevHL0v>ACe+;( zV^W?wm#~Z7mJk=!aT=y4=FV|{miPLj?@xAH9^%r!Ws-lKqnz!KbRXvkcOBx!fy%qG zCRk8wVeoINvVTB`f2hX=iP%Hw-IRCZNVlT|jOS{AR2k1zX^B!}r-V($@vt0MzYB6l z9dpU)2@W3#p5O@Pcft>_+ltH3365%(kK3`IGW$`JU@u!Xz ziBBty`09!06e8*&d8d3R5q`a%@&`gCr@wd;YD3QSX~)fJv6@gJv_GoaO2m|Nmx!5P zmtPtdV}v@A^~%e+2c~5*>d8pU%5hDpGMm^hAg3KNzwhuSGbiq;fE>L?b+Y8FtmcyS zQ_;}6vXV5fT;A!-9uV?i#^QblZqcgTapaVsX>`)52;?S_y$Oem$>w!`POLK9W$-kj zy)1JuTsi<`L~dQt?7K6Zy3i_CE~A`OfszR1L}2lcud>bVR;EC#KudY6t;4K2`yc5g zdN!4BURss>KQ{??@c*&xd`KXv?4((_@?~H+My~^fD^<^A`#pQCULUnDqusfcimze% z_HZ$#6H(V+!*1-EvpHF8M4#w1Uarn3zw0FBOpJI^v7-M{CdU!5GNGhQFJV|;~7lkL3I;bVWES)R;wc!#9UVwPp` z=V!{^Ul{0&Eut^+(ZWlTu+^Fx_x{#7k=a9gSUD*%kJ7D=fVTP+(Sw?-z20O;tbj4F zeCTwa4>PXZS0Uf#b#AGLr`l(G1dEzp~>q1@s3G4yK!1QkrwtJ1Bo?gSuOFS zGBwBLAl`t|bb%x2-&!@})5op)L#3O&`9IROn)UzK!mO(MkA^1RiyRqTJ+qlZeYtLR z{Fc`%HY9fXvqrqn3;(0CwFKrV&sMZqsdjGiO-FB@TYh<;$rWNy8skNA9 z|6ilM(OO!IZ4*;?H^x6*Zd@IBqKlV=noBHQzj#FBqeJPrqPc<1RLHfe{jcKw2UiQGQSVkwG1Fwz1lLFMLwr z6PSK=&VB&{M&iTTQwv~(G+ss$B1s!KePq%yM|IvjbRDOVeT2~h7*OqDrx}S}Um=XH z?ojIlKA!k=E&BEBqM26jG<=NMu1o&qX!;ZZEujI89c%k^?;nGJRlyz-fdIot|K*go zkE~UrncT*3^O`I^m-4sE0oChZ;kr#7bnw zN!OLAm`TQBxOz}V+Ni#%^>ekx&kn~K;D^ZWm4tdM$yYJj{SzR$S2+TCe@(+xj=YJd zQcPOlOJpLxA^*@gbUvB*ZB+2E!2Wkp*&6BAA*d>UyUo^OUc%LDt;`_5U9J+;b;uIf zRlDte`#)zVS*3znXT@_hie8t=4V^x1?;)q9m%Q=I+&x;JZf+!DRM%O2v?AYiu&wKo zwd^IktpH`J4P_XnCjPKxgYc`x_mm}!wL+hWk1D*nbHK(9k6X--P1Q`!bqfJau7Pdt zAS{} zk!B%lDAMcl+ZuxAlPv2TK~nX5M+V-yvp1*HhXwE8@9{C4nVi5UWLI%hk;7|CZfjYm zcxJmzJ0se8c$F~o{W^KpO=(Lnd}6mbaM4;Z+MSPtu4Q3;U)rxl!IyX?#%q3PiyG?> zwC-Uj7^3kMl?4d!qP(qZ>8xR4X4`L8^lid9^Los*+bCV*&s6t6+J1TIypG(}l%4C0 zYSXSFPu7vUxV^$hO0IVVy9$;wvz29s*PR-_ms*HES~VnQj&|0f*7Y}Xx^lkSz0aPt zUzT(rXLYqya(O-Je7qF?C@_Aybahp!V5YVDY|SrzZgjsEaTGE~hj5w9LYpn34zRo& ztH3sz`fHxW`GFWZA$|ym}MAS5Fh)CN-|&rp^cH( zN50=c{>^dMolI|ew}E*7eb62w0UKj;_Hp3qGHWA+Z0&_U%I<&U)5uDYYf}}om3lw; zAw`WYH5!r?Yo4$Q!YoWVwTWSJmOTB{;gztkR=&B3D)#R}*B>X4IT3+c%@p z3P~Kpz_7lW+0Lp+i5SZA&uUUHhGFA^jOEfbu)4X(Js7>}mznFA#aYGAkliY<9swPC z!va@j+q5I?Tr{)>l6?}kh18Boatznf8rB?Du54lNi*;vzCTEt6?`^7{El70l-UI6Q z9RD&=tQ@sGX^X?#HGrMCHn197=WopLOC6nctf|9O_HQ9$bH#HjqjIV`oFyTbY~`s_ zMICe7AV>eE_&m1bqdLDy{m_+qbL!G|TY!1;;c-m*64WfobPh^#yBt)9kE+dTb|G`a zO+^+Bx{|fRAV(?^u-e%=d~__z|NW5US&HrR(@dB;noU$@Y@?BFg1T*+a9Z{<(OA$?Lcy!w8wCTePb%-lX7~KWbMXi zby^d06W6E~T~6(D1p5l{>B{5iBWJy^oV!Eqwn2d$OyD}PnPlEi z@GKEhbibpjD}RL9oQ@VNUrhQht6@kQix^`@@?qxMzMrk&=QGzJ%^4G2tLE}VV_eum zk{od4NfO!8+^y_vDJ2h};E9$p?tsJF_d4#{{#%aQT(|9&`HziKque+JZYg^XP&RLo z(0MpP?L?ysy@_gogf*EajFc2V(hE~ZnpISI$c^9$^U9=0TXgU*q|*V%cRqGkq%``` z5pB;MDc1<#Ta*A=AulJK|MLEcxi7r5=>_Q-TGhxIzo2!;NXhpTg3$=F&5ADhu1LzS z=FT}~3KnLvSrjSFRAM6%x&-<$*X*9JOJ4Cqf)1?bEc+rQ(>db$B~rdeGV~6Tx?gkL z*j}^e#ePvpGG%F5rEO)loaok$tY7yJKj>w1;G(6Hr^$2ez1rHK?J0!H5x9%NXHgS<;t*zLZqI=+@wZ8M%rt)9RD&NrbAqyTuTgf28 z+9$S>nFk$ZIU>D!&{51jtCifi=mQdTyxM>5kX^C6|#bXqHU+w$kJnk~!K+^a)3xd=hjLtf zmiA!{wj3eeJj;xgI#Sa8%qU;3uQ~2K?|yH_+Q@s;X!F+UrGX6mnHe|=0c|&TkNM_o zS2yu`1cLm71JP!_tVh7MS&m}(9z!l2a+B-*^1}Z6H=7K(|J!(atFKgNdhF(HIn_c;0fsMlVAFg8vgoLpC_#Gb|(9a&clc4>)-m|To zF%Z4zC&kZGh$;G;k%y&jR(w&u6hn~Er4=nKn{+-;`DIM9mC37JZ&C;WDpgaj zh>uofp&e~AtjLg}jghQz+rXM>TuqS3kHqD-*RG!rt=ca((_`f+k=vF@(hC&MHsrJd zj`|dKy}IC^-H0hrwKycD5crQN>pVfVpZBVh&za-I@CgL9?g!rKFB2|sZu;+&+FQbE z2kw7u?#u8r7e*Ud3(;vLrCe6Mat$!`dio;Y$`Ou5uMsF>www@2d6BhXIca={FG>t- zJ>~GPKNuM?cENWCNY%@%cb*QA6PFo#PPCG{ml>Mg4G`Zew2x#1%@pz;ZT8LR6n(V% zt)+;!)VM+>vdU03@=DAVM>XGI#C1f<*|X^G+}{O-YDt8uJ{}*>uVw$g4cOND+)r2E zpC2HqCi=Ha#$IEy_4(08X`YDC5*>l5&AwIJ&cn9f_ugfglxlJyPl^`KGe0oPaMqziP>6(Gr{b;6LYM0fhpQ^W+8Mv!|LmvDIlf`ErmGp} zoegCo!XZ5o&}HA6M!EXct(Pbt0y<&P)u!O14PwjF2-my5SAR9960PW^GWj{_Y?aBs zvx(-m9Zw;M=M9d;e$zn?sKJIA;-X~St=?mZgN;z&WdQ>>rJl0i$>K)%RqAhR9)8JB%;Q0 z6vNfdl3ScSCSm*IC)nW)(j^rUb(i-Wit2{e&5TcTlCf# zsqxa0&Ot8rNygh0(J>;`ai_q5(p4+v{4yJnQi3}2Um7eOZqp5J50-_u=@#z>%Qmjf zEw+zq;}G+XX`@t8-(I`+pqWv&Mt_F3VmpGhW4gj zyt91PT_PGT$JO|tS)BMxt>To9p^)~}*>YO*yHyt5$1oyH=b&;2iQoNCiZguz%_MSh z;vtRn3F34HeACI-4>$__P!U!^8rjlOz#sQ~tKdEz5VT%c0)%?}`=q;7XhQyy z^vx%AojyUZSrqA{{XdK##`x&to&j9RCYy6c!>S9Pj9EUYC@o4CM2uC&!a%2LYeU;P4 zHEXK*@XFB*ew9nLt2C4VJgUZdmB;Ss*PTATcktI`#E42It1S6*)H~Ck^|Ht7sp4gG z=HUsrQa0ycK1a0I=FDPiDPxN`gXBUHXLb9-Y0}E>3=DZa%{*V)?v*ZeNe_d%UFR{a$1!bDi73HAWnxa#t_{8b$K-W$(@ZzSqH6fy#Ue!0DCHNT>;48b`Pb!n%dIJhWB$ zfEkze#tILUt_W}gXRI1)WPNdW&;zm?r;rO)IvtuVet&NA*09*1kF7|Xr~5l`Nqe6IHH< z)T-xJYpB#p;?y-vA&*INGm+CPGeug+WA1YE4r<29l{>XqIF(bDJRauX2JuWp9ET;Z z8fKV;Mwr&pRGo%M*Je&8?1D>WY%{Kn6uc4|YhXj%3gu(VEN?Zy1c^vNfcyo!iDz@J zNAkrIRMK_!OgWhtmle{qxif7M9a#>qG*3M0_glL2McQln*rwIl?&71%$_lqSl*_X> zW~=E#H}Nnu;3g>ox+*SHBfLhn@l)AcXi%M}opO`-WlwUa!^)9FC2=}!UJ{hpS&n1U zhAo_#L=rnQ#wr)!R<5vIPfn@VUv2i#BkA6xO_K8IFK)#Odi0QtNyx-xKID#JDibG} zB5wP{6eMXDsHolka-}>@;`FxM5;CL|^e_s?ibcn1OWsg-x^&iFV|5<09%Nfmr?2bt z=R@6hYAoUGk~A5a7@NavIujQC5dLo9$#62EyRQ&BuarvW3~~jmGoPAm-ZguP(n(Vm zHUx}An`jwN3~v1meLBisbs8p@3Cf6`l5*m=^^KD|(`O9!D80e#k~jZ3{(bJ?pZHf_m|?V8Bcu=#j*p9$(0S0*au>hNqtC3+ z5_HP6D(OQTHrnQgT*a*+BZnhoEp-|)bCWs5K3EmGZ2p-Ao6Y!iN9z7`O6Qp;Qw|zc zRq&x*2hy}p%exfLY9Y~^&HZA~lotK=&RfXi(cN*;2b0HPYBzpREF$7?VD-cV)m3F^ zXyJttm6DQNBePO|7%N?$rj*L&9b>LrKU9Mj=VzTRpR`To46>izCi7D<#^2m#rn>O- zwl1erbedo&Y@AHJz(;qson9AtRjplZm*HcaGT7unDrerr-rLRbsjc{VI;-)RP)ASH z8!C%DNp_rU$I&Io=vumuZ`)HhM>TmyBvgpb;|nFiU|5Mx=$X{t=-PX zmGAN7GCOHw6T3vZrgjGT?ngj7e8J8qXH8B&PWr}?evkHX`I14>`Vhr=VXTA@Vr+VXP~Wt ze3yo7wBKQt(*0U3`qvzrdl13(?5PO90pQa_WOuvdRilKI^P*%%>@TxL*<|sMYl@c>eXpUo0dpk5Gn)N%_OI5Q9S?tXisq&m==Lb zd(2c;^!=xE&_Wk8jq0eyb5_=?z(Y&m)tmlp_SKulGyA{blAa{4%gu^TFNsR;bhQ?`!(AxJazkX;`gRv?cCbbaYQJW7L;R{fK4Or{qU2i)6jL1C-W4kq0@*Cf!wRyA)UPm_VS=C^>zmS+e1M#7NZFTu?WcF<7{oe zbyz0&&~$pqULTSfBbR+x>!pyVr5HJeA2U-c@qEwvBdJ34xdXlNLw1kiqpjOfFR;$> zge&!_KkKe)Mp>1Ch?2>V8Js~OM^2d4JhtVu^*b&d)>}t7sQtdr@Ls=pGTzeF!Xygl6Ni=Tp{Uwoz;BDS^aYUi0xmkc$Z?i zd3>V@UOO$(zT|YJY(S7K+A=zQ?AKe0Z$@S@vs;_4UeY3?)9*iuaG=b~=*-K~;BZD1 z4U%^mSr`Y2UnXZYsl$USSuzKNqFLGVW`nKqe9yXilX`PwITRNywuX|;&zUz{D=s~` zob}lr-tA+Pt_Ws#?}I)Ubo(5?Abb26RGwTgrXAz(Zs*f=IEvonFs94*tuG|WPF zxfrMeVH-l}x+QI@_1pVR>|{=q#_@ttq=T98!-_IwRn6wiz=t_oXQPtbpQqZ_=f?J< zRrwyc*(&H+vo6Z1Y))_c(u;CG8k6V}I8R@%T zSg~g~H>2N+nT5H}t3x;K&%+_5a?-crK5pmZd*!k@j})$aq*c%mnBs$ z=L!3+D{>`YCZ&eIQW5)T}@;N)HarmCG-4De2T)!gzxoH*mk)vic>7SeW?ReEp_EzG2F&#VPeU5-N z(e;tFxhemCayvJ5`qMRY7CBeCbk6i$Cayxf*l4tm_sr($&|4hZmgeO*p` z=?t`Axh(cPn451(XdY)(-&<5e8bZ`qbGrAV^FBkFn3nr|aMI*F$zA z=xTY_>=(6e*NQ5ec*`49w0;cKQJ&{>R@2hI5zMZLQI96qC_mZieBbPcXTJI6*7awT zrV+*}-T^W(KarkCKvz%&Qhw3gZ}Nuw2v9#dy&jU|U!mTu`*Mw#eE&dDC+v~~lc&4g zErzFwj0Fi}za_zg!FieMS<{vCcedukkZW}2Z}!+!I-vNhf9%JndBiHvS8l4fwL)l% zToR(+USxOKh_2i7SaP(K?oyxta^`}>G_2X)Tr}vtAAa@!hnbJD3;G2Gt^cJn;jLoM zChIkPO*x9;yZos+>}0Ngq`Oy#hQ*N67D;|LJ)KdIk>jUlrXp>tv`<@L%IIhWbbw~) zF!!9&yr44^&&m!iNTdy94u-4mLo<_mQx|*DwCa?jhVsUJ;9+vIAcc`i3I#BI8eiul zlYQ0rx5neB{qDT5K8afJF^pyyB1ki z0;lQ#U=1c?rGE&e+(K@LFcfx^^r6ms%#5}|q-s9wF2RMI!LDXDn=!X;>RYgBm7kJ7 zHDWNHAnz|j3+cLz=U)m@0yAv#un^0>R8pj{bFh6;0@+d6x%%V7H%5frMVLv_uO%{%TbVw~&W_Htbg3~dPHLtM`P3NFd2S~LNlx0zAU4m6=dC5}BnNE(D z;Bcpg+$-U1?K_=Fbouga7hN=F}auk6uJBFZ2C_oPCkgIfEX%B(D}XjoGHPt7|$l>WN=uy1S_Rvh#-e#$XUFHOi9RN$I3r zS$gQU7c##rqsP|trp^T?)$MxrP8S`ntqJZxdO1@TT}peKA3Qx*z5KTiCf*E=m2l70 z=B_Ujb@wknMJdFO;$=}eN+4W3!`bzoEJu@$AY$FrE}Jp`p)+%{Fhf@o=WQPHM>*!> zof)KLc{06WRV3g|sIqg<;%orH*IMe-abc`;mmoy~Fj z+tH6Ab4KLWpB`D8&C1fWB5m7G)>lG2yR1?1&wrESUW^${PLJ;1B48R(7A7t4TW&v6gU=G&(H-k0HBO94g;Uk9w>{|GmsIsM3chK32mwo}c+x z!OVoU0cNOdG{9<=nMR9cwHd3-;p4Vv>b(2T*w|!_$U&;9(34 zUb4G7z2V>M`@cpF8lEv~c-5dBmQcky$8=qO)uUG~A3f;Mr5DO}m53Uo_t6-Tw}iFu zc7F%C+~k(q`*ttH;gWj+5zpXY<%+@KpBjw4^gQpHl%+8)2g$LT&J6MOLTgBgrx&uy z(=1pAM>>Rvwj(|r{=XS7tfpv;7e2DN7MpYP=#JG3_gwUbXWYYk>t2`XlrT`ABQ$QP z=#7xV9>#&RS_^T{TW_oAv%rgMvm2jZ(Ck%x#`f5m=HZ+1l#u2KZF#u(2@;pOjRW5D>}zKm`>{Z2REHL1=2XVkab^sOr#!4@h~gPzw4|uY;&QbP!$Uknovw5# zN@IxivEK=leBq>Lm1ieu8cvR$1WLbf^j#k;-{Tu!U0Kn@)0nKWbcehpD&Kf-6&fco zx8_N~x1uowhRnsg1T_SD#Bg^pqs{DgJ}k3G*JmYd{zTXQVw>b_z=&)ff3k7w z-oRPTeyx;jYv8P7j$l5arx@yVqA1b&RLcP)+hwOob9$r;o&Vb)sJ|>_%3u)m@r`Ej zk%~C`?e@Ci<(c-i#%VTMRvR}O(viLmvl$i9HBX^LJ%YBqv^o+?w4Gt%--vb1-(fPK z5p%&8Wz6a7T1c(FjWf(Ej-;-U7>ZNN+D24UP6WJ3>}jRyjaMcsHPpebjKPTxP!gZ? z_*}d{O0rE^aXi*1T;37X)e!+71j3Wu8y;D|UxiqK5fadtUb3W&RBgRZuVCykIADlnN#T|EvA^!MABCP7xKWGF-H zKBI96?j`G+IE%0aeb9s%^I8?j+>{>qM-`iKj+1lm)CgxxUY728IMUO8;_g2*O#jLgxs|$g&$Z9?iQ^x`P=eS0hm7 zqxw$P)>Y%}9z)uu zYFX*>I)m_*^U*Ihn4VcTB$PYD9AC=nn1sv!AHd@XmjD0& diff --git a/package.json b/package.json index 43057df..c088aa4 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "@headlessui/react": "^1.7.18", "@heroicons/react": "^2.1.1", "@langchain/community": "^0.0.41", + "@langchain/openai": "0.0.24", "@mantine/form": "^7.5.0", "@mantine/hooks": "^7.5.3", "@mozilla/readability": "^0.5.0", @@ -39,6 +40,7 @@ "lucide-react": "^0.350.0", "mammoth": "^1.7.2", "ml-distance": "^4.0.1", + "openai": "^4.65.0", "pdfjs-dist": "4.0.379", "property-information": "^6.4.1", "pubsub-js": "^1.9.4", diff --git a/src/components/Common/ModelSelect.tsx b/src/components/Common/ModelSelect.tsx index e39a9f6..1a9e8d0 100644 --- a/src/components/Common/ModelSelect.tsx +++ b/src/components/Common/ModelSelect.tsx @@ -38,10 +38,10 @@ export const ModelSelect: React.FC = () => { ), onClick: () => { - if (selectedModel === d.name) { + if (selectedModel === d.model) { setSelectedModel(null) } else { - setSelectedModel(d.name) + setSelectedModel(d.model) } } })) || [], diff --git a/src/components/Common/ProviderIcon.tsx b/src/components/Common/ProviderIcon.tsx index a97776f..83a8cca 100644 --- a/src/components/Common/ProviderIcon.tsx +++ b/src/components/Common/ProviderIcon.tsx @@ -1,4 +1,4 @@ -import { ChromeIcon } from "lucide-react" +import { ChromeIcon, CloudCog } from "lucide-react" import { OllamaIcon } from "../Icons/Ollama" export const ProviderIcons = ({ @@ -11,6 +11,8 @@ export const ProviderIcons = ({ switch (provider) { case "chrome": return + case "custom": + return default: return } diff --git a/src/components/Layouts/Header.tsx b/src/components/Layouts/Header.tsx index 65fab8e..67338c2 100644 --- a/src/components/Layouts/Header.tsx +++ b/src/components/Layouts/Header.tsx @@ -11,7 +11,6 @@ import { } from "lucide-react" import { useTranslation } from "react-i18next" import { useLocation, NavLink } from "react-router-dom" -import { OllamaIcon } from "../Icons/Ollama" import { SelectedKnowledge } from "../Option/Knowledge/SelectedKnwledge" import { ModelSelect } from "../Common/ModelSelect" import { PromptSelect } from "../Common/PromptSelect" diff --git a/src/components/Option/Models/index.tsx b/src/components/Option/Models/index.tsx index af3c866..bd14c30 100644 --- a/src/components/Option/Models/index.tsx +++ b/src/components/Option/Models/index.tsx @@ -1,9 +1,5 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" +import { useMutation, } from "@tanstack/react-query" import { - Skeleton, - Table, - Tag, - Tooltip, notification, Modal, Input, @@ -23,7 +19,7 @@ dayjs.extend(relativeTime) export const ModelsBody = () => { const [open, setOpen] = useState(false) const [segmented, setSegmented] = useState("ollama") - + const { t } = useTranslation(["settings", "common", "openai"]) const form = useForm({ diff --git a/src/db/models.ts b/src/db/models.ts index 207fe97..ef615a9 100644 --- a/src/db/models.ts +++ b/src/db/models.ts @@ -18,6 +18,11 @@ export const generateID = () => { export const removeModelPrefix = (id: string) => { return id.replace(/^model-/, "") } + +export const isCustomModel = (model: string) => { + const customModelRegex = /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/ + return customModelRegex.test(model) +} export class ModelDb { db: chrome.storage.StorageArea @@ -174,3 +179,30 @@ export const isLookupExist = async (lookup: string) => { const model = models.find((model) => model.lookup === lookup) return model ? true : false } + + +export const ollamaFormatAllCustomModels = async () => { + + const allModles = await getAllCustomModels() + + const ollamaModels = allModles.map((model) => { + return { + name: model.name, + model: model.id, + modified_at: "", + provider: "custom", + size: 0, + digest: "", + details: { + parent_model: "", + format: "", + family: "", + families: [], + parameter_size: "", + quantization_level: "" + } + } + }) + + return ollamaModels +} \ No newline at end of file diff --git a/src/models/index.ts b/src/models/index.ts index ce3ab39..07c134e 100644 --- a/src/models/index.ts +++ b/src/models/index.ts @@ -1,5 +1,8 @@ +import { getModelInfo, isCustomModel } from "@/db/models" import { ChatChromeAI } from "./ChatChromeAi" import { ChatOllama } from "./ChatOllama" +import { getOpenAIConfigById } from "@/db/openai" +import { ChatOpenAI } from "@langchain/openai" export const pageAssistModel = async ({ model, @@ -22,23 +25,49 @@ export const pageAssistModel = async ({ seed?: number numGpu?: number }) => { - switch (model) { - case "chrome::gemini-nano::page-assist": - return new ChatChromeAI({ - temperature, - topK - }) - default: - return new ChatOllama({ - baseUrl, - keepAlive, - temperature, - topK, - topP, - numCtx, - seed, - model, - numGpu - }) + + if (model === "chrome::gemini-nano::page-assist") { + return new ChatChromeAI({ + temperature, + topK + }) } + + + const isCustom = isCustomModel(model) + + console.log("isCustom", isCustom, model) + + if (isCustom) { + const modelInfo = await getModelInfo(model) + const providerInfo = await getOpenAIConfigById(modelInfo.provider_id) + + return new ChatOpenAI({ + modelName: modelInfo.model_id, + openAIApiKey: providerInfo.apiKey || "", + temperature, + topP, + configuration: { + apiKey: providerInfo.apiKey || "", + baseURL: providerInfo.baseUrl || "", + } + }) as any + } + + + + return new ChatOllama({ + baseUrl, + keepAlive, + temperature, + topK, + topP, + numCtx, + seed, + model, + numGpu + }) + + + } diff --git a/src/services/ollama.ts b/src/services/ollama.ts index ee58b0e..5aff8c3 100644 --- a/src/services/ollama.ts +++ b/src/services/ollama.ts @@ -4,6 +4,7 @@ import { urlRewriteRuntime } from "../libs/runtime" import { getChromeAIModel } from "./chrome" import { setNoOfRetrievedDocs, setTotalFilePerKB } from "./app" import fetcher from "@/libs/fetcher" +import { ollamaFormatAllCustomModels } from "@/db/models" const storage = new Storage() @@ -193,9 +194,13 @@ export const fetchChatModels = async ({ } }) const chromeModel = await getChromeAIModel() + + const customModels = await ollamaFormatAllCustomModels() + return [ ...chatModels, - ...chromeModel + ...chromeModel, + ...customModels ] } catch (e) { console.error(e) @@ -207,10 +212,11 @@ export const fetchChatModels = async ({ } }) const chromeModel = await getChromeAIModel() - + const customModels = await ollamaFormatAllCustomModels() return [ ...models, - ...chromeModel + ...chromeModel, + ...customModels ] } } From 192e3893bb26de84617bd2794fcdfe9e48167c34 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 29 Sep 2024 23:59:15 +0530 Subject: [PATCH 04/33] feat: support custom models for messages This commit introduces support for custom models in the message history generation process. Previously, the history would format messages using LangChain's standard message structure, which is not compatible with custom models. This change allows for correct history formatting regardless of the selected model type, enhancing compatibility and user experience. --- src/hooks/useMessage.tsx | 44 ++++++++------ src/hooks/useMessageOption.tsx | 34 ++++++----- src/utils/generate-history.ts | 107 ++++++++++++++++++--------------- src/utils/human-message.tsx | 43 +++++++++++++ 4 files changed, 148 insertions(+), 80 deletions(-) create mode 100644 src/utils/human-message.tsx diff --git a/src/hooks/useMessage.tsx b/src/hooks/useMessage.tsx index 4dcac4e..3b6b4e5 100644 --- a/src/hooks/useMessage.tsx +++ b/src/hooks/useMessage.tsx @@ -9,7 +9,7 @@ import { } from "~/services/ollama" import { useStoreMessageOption, type Message } from "~/store/option" import { useStoreMessage } from "~/store" -import { HumanMessage, SystemMessage } from "@langchain/core/messages" +import { SystemMessage } from "@langchain/core/messages" import { getDataFromCurrentTab } from "~/libs/get-html" import { MemoryVectorStore } from "langchain/vectorstores/memory" import { memoryEmbedding } from "@/utils/memory-embeddings" @@ -33,6 +33,7 @@ import { getAllDefaultModelSettings } from "@/services/model-settings" import { getSystemPromptForWeb } from "@/web/web" import { pageAssistModel } from "@/models" import { getPrompt } from "@/services/application" +import { humanMessageFormatter } from "@/utils/human-message" export const useMessage = () => { const { @@ -313,7 +314,7 @@ export const useMessage = () => { ] } - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: systemPrompt @@ -321,10 +322,11 @@ export const useMessage = () => { .replace("{question}", query), type: "text" } - ] + ], + model: selectedModel }) - const applicationChatHistory = generateHistory(history) + const applicationChatHistory = generateHistory(history, selectedModel) const chunks = await ollama.stream( [...applicationChatHistory, humanMessage], @@ -500,16 +502,17 @@ export const useMessage = () => { const prompt = await systemPromptForNonRag() const selectedPrompt = await getPromptById(selectedSystemPrompt) - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: message, type: "text" } - ] + ], + model: selectedModel }) if (image.length > 0) { - humanMessage = new HumanMessage({ + humanMessage = humanMessageFormatter({ content: [ { text: message, @@ -519,11 +522,12 @@ export const useMessage = () => { image_url: image, type: "image_url" } - ] + ], + model: selectedModel }) } - const applicationChatHistory = generateHistory(history) + const applicationChatHistory = generateHistory(history, selectedModel) if (prompt && !selectedPrompt) { applicationChatHistory.unshift( @@ -760,16 +764,17 @@ export const useMessage = () => { // message = message.trim().replaceAll("\n", " ") - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: message, type: "text" } - ] + ], + model: selectedModel }) if (image.length > 0) { - humanMessage = new HumanMessage({ + humanMessage = humanMessageFormatter({ content: [ { text: message, @@ -779,11 +784,12 @@ export const useMessage = () => { image_url: image, type: "image_url" } - ] + ], + model: selectedModel }) } - const applicationChatHistory = generateHistory(history) + const applicationChatHistory = generateHistory(history, selectedModel) if (prompt) { applicationChatHistory.unshift( @@ -966,16 +972,17 @@ export const useMessage = () => { try { const prompt = await getPrompt(messageType) - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: prompt.replace("{text}", message), type: "text" } - ] + ], + model: selectedModel }) if (image.length > 0) { - humanMessage = new HumanMessage({ + humanMessage = humanMessageFormatter({ content: [ { text: prompt.replace("{text}", message), @@ -985,7 +992,8 @@ export const useMessage = () => { image_url: image, type: "image_url" } - ] + ], + model: selectedModel }) } diff --git a/src/hooks/useMessageOption.tsx b/src/hooks/useMessageOption.tsx index 4e633f8..bceb5a6 100644 --- a/src/hooks/useMessageOption.tsx +++ b/src/hooks/useMessageOption.tsx @@ -33,6 +33,7 @@ import { useStoreChatModelSettings } from "@/store/model" import { getAllDefaultModelSettings } from "@/services/model-settings" import { pageAssistModel } from "@/models" import { getNoOfRetrievedDocs } from "@/services/app" +import { humanMessageFormatter } from "@/utils/human-message" export const useMessageOption = () => { const { @@ -68,7 +69,7 @@ export const useMessageOption = () => { } = useStoreMessageOption() const currentChatModelSettings = useStoreChatModelSettings() const [selectedModel, setSelectedModel] = useStorage("selectedModel") - const [ speechToTextLanguage, setSpeechToTextLanguage ] = useStorage( + const [speechToTextLanguage, setSpeechToTextLanguage] = useStorage( "speechToTextLanguage", "en-US" ) @@ -207,16 +208,17 @@ export const useMessageOption = () => { // message = message.trim().replaceAll("\n", " ") - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: message, type: "text" } - ] + ], + model: selectedModel }) if (image.length > 0) { - humanMessage = new HumanMessage({ + humanMessage = humanMessageFormatter({ content: [ { text: message, @@ -226,11 +228,12 @@ export const useMessageOption = () => { image_url: image, type: "image_url" } - ] + ], + model: selectedModel }) } - const applicationChatHistory = generateHistory(history) + const applicationChatHistory = generateHistory(history, selectedModel) if (prompt) { applicationChatHistory.unshift( @@ -412,16 +415,17 @@ export const useMessageOption = () => { const prompt = await systemPromptForNonRagOption() const selectedPrompt = await getPromptById(selectedSystemPrompt) - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: message, type: "text" } - ] + ], + model: selectedModel }) if (image.length > 0) { - humanMessage = new HumanMessage({ + humanMessage = humanMessageFormatter({ content: [ { text: message, @@ -431,11 +435,12 @@ export const useMessageOption = () => { image_url: image, type: "image_url" } - ] + ], + model: selectedModel }) } - const applicationChatHistory = generateHistory(history) + const applicationChatHistory = generateHistory(history, selectedModel) if (prompt && !selectedPrompt) { applicationChatHistory.unshift( @@ -695,7 +700,7 @@ export const useMessageOption = () => { }) // message = message.trim().replaceAll("\n", " ") - let humanMessage = new HumanMessage({ + let humanMessage = humanMessageFormatter({ content: [ { text: systemPrompt @@ -703,10 +708,11 @@ export const useMessageOption = () => { .replace("{question}", message), type: "text" } - ] + ], + model: selectedModel }) - const applicationChatHistory = generateHistory(history) + const applicationChatHistory = generateHistory(history, selectedModel) const chunks = await ollama.stream( [...applicationChatHistory, humanMessage], diff --git a/src/utils/generate-history.ts b/src/utils/generate-history.ts index dd6f446..cb4b466 100644 --- a/src/utils/generate-history.ts +++ b/src/utils/generate-history.ts @@ -1,55 +1,66 @@ +import { isCustomModel } from "@/db/models" import { - HumanMessage, - AIMessage, - type MessageContent, + HumanMessage, + AIMessage, + type MessageContent } from "@langchain/core/messages" export const generateHistory = ( - messages: { - role: "user" | "assistant" | "system" - content: string - image?: string - }[] + messages: { + role: "user" | "assistant" | "system" + content: string + image?: string + }[], + model: string ) => { - let history = [] - for (const message of messages) { - if (message.role === "user") { - let content: MessageContent = [ - { - type: "text", - text: message.content - } - ] - - if (message.image) { - content = [ - { - type: "image_url", - image_url: message.image - }, - { - type: "text", - text: message.content - } - ] + let history = [] + const isCustom = isCustomModel(model) + for (const message of messages) { + if (message.role === "user") { + let content: MessageContent = isCustom + ? message.content + : [ + { + type: "text", + text: message.content } - history.push( - new HumanMessage({ - content: content - }) - ) - } else if (message.role === "assistant") { - history.push( - new AIMessage({ - content: [ - { - type: "text", - text: message.content - } - ] - }) - ) - } + ] + + if (message.image) { + content = [ + { + type: "image_url", + image_url: !isCustom + ? message.image + : { + url: message.image + } + }, + { + type: "text", + text: message.content + } + ] + } + history.push( + new HumanMessage({ + content: content + }) + ) + } else if (message.role === "assistant") { + history.push( + new AIMessage({ + content: isCustom + ? message.content + : [ + { + type: "text", + text: message.content + } + ] + }) + ) } - return history -} \ No newline at end of file + } + return history +} diff --git a/src/utils/human-message.tsx b/src/utils/human-message.tsx new file mode 100644 index 0000000..6712339 --- /dev/null +++ b/src/utils/human-message.tsx @@ -0,0 +1,43 @@ +import { isCustomModel } from "@/db/models" +import { HumanMessage, type MessageContent } from "@langchain/core/messages" + + +type HumanMessageType = { + content: MessageContent, + model: string +} + +export const humanMessageFormatter = ({ content, model }: HumanMessageType) => { + + const isCustom = isCustomModel(model) + + if(isCustom) { + if(typeof content !== 'string') { + if(content.length > 1) { + // this means that we need to reformat the image_url + const newContent: MessageContent = [ + { + type: "text", + //@ts-ignore + text: content[0].text + }, + { + type: "image_url", + image_url: { + //@ts-ignore + url: content[1].image_url + } + } + ] + + return new HumanMessage({ + content: newContent + }) + } + } + } + + return new HumanMessage({ + content, + }) +} \ No newline at end of file From d7510333a7a850e47416f9ba5304ea35e599ee08 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 29 Sep 2024 23:59:41 +0530 Subject: [PATCH 05/33] Bump version to 1.3.0 Prepares the project for the next release with updated version information. --- wxt.config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wxt.config.ts b/wxt.config.ts index bc9001e..eac1c2b 100644 --- a/wxt.config.ts +++ b/wxt.config.ts @@ -50,7 +50,7 @@ export default defineConfig({ outDir: "build", manifest: { - version: "1.2.4", + version: "1.3.0", name: process.env.TARGET === "firefox" ? "Page Assist - A Web UI for Local AI Models" From efec675b9a2adc29a937c8b879eaf8326aaa5145 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Mon, 30 Sep 2024 10:28:35 +0530 Subject: [PATCH 06/33] Fix: Replace empty API keys with temporary placeholder The previous code used an empty string for the `apiKey` when no key was provided, which could lead to unexpected behavior. This commit replaces those with a temporary placeholder ("temp") to avoid potential errors and make the code more robust. --- src/models/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models/index.ts b/src/models/index.ts index 07c134e..c33e312 100644 --- a/src/models/index.ts +++ b/src/models/index.ts @@ -44,11 +44,11 @@ export const pageAssistModel = async ({ return new ChatOpenAI({ modelName: modelInfo.model_id, - openAIApiKey: providerInfo.apiKey || "", + openAIApiKey: providerInfo.apiKey || "temp", temperature, topP, configuration: { - apiKey: providerInfo.apiKey || "", + apiKey: providerInfo.apiKey || "temp", baseURL: providerInfo.baseUrl || "", } }) as any From caeb923d00da8db54aaff8acb5704954ae6883b7 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Mon, 30 Sep 2024 10:28:45 +0530 Subject: [PATCH 07/33] Fix: Remove debugging log statement Removed a debugging `console.log` statement that was printing the `isCustom` variable and the model name. This statement was no longer necessary and was potentially causing issues. --- src/models/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/models/index.ts b/src/models/index.ts index c33e312..d459e66 100644 --- a/src/models/index.ts +++ b/src/models/index.ts @@ -36,7 +36,6 @@ export const pageAssistModel = async ({ const isCustom = isCustomModel(model) - console.log("isCustom", isCustom, model) if (isCustom) { const modelInfo = await getModelInfo(model) From 4363ac1b9cb85969abce726748217a1d9124aded Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Sun, 6 Oct 2024 08:21:29 +0200 Subject: [PATCH 08/33] Added support for Danish Language --- src/assets/locale/da/chrome.json | 13 + src/assets/locale/da/common.json | 105 +++++++++ src/assets/locale/da/knowledge.json | 43 ++++ src/assets/locale/da/option.json | 12 + src/assets/locale/da/playground.json | 29 +++ src/assets/locale/da/settings.json | 339 +++++++++++++++++++++++++++ src/assets/locale/da/sidepanel.json | 7 + src/i18n/support-language.ts | 8 + 8 files changed, 556 insertions(+) create mode 100644 src/assets/locale/da/chrome.json create mode 100644 src/assets/locale/da/common.json create mode 100644 src/assets/locale/da/knowledge.json create mode 100644 src/assets/locale/da/option.json create mode 100644 src/assets/locale/da/playground.json create mode 100644 src/assets/locale/da/settings.json create mode 100644 src/assets/locale/da/sidepanel.json diff --git a/src/assets/locale/da/chrome.json b/src/assets/locale/da/chrome.json new file mode 100644 index 0000000..2b05470 --- /dev/null +++ b/src/assets/locale/da/chrome.json @@ -0,0 +1,13 @@ +{ + "heading": "Konfigurer Chrome AI", + "status": { + "label": "Tænd eller sluk Chrome AI Support på Page Assist" + }, + "error": { + "browser_not_supported": "Denne version af Chrome er ikke undersøttet af Gemini Nano modelen. Opdater venligst til version 127 eller nyere", + "ai_not_supported": "Indstillingen chrome://flags/#prompt-api-for-gemini-nano er ikke tændt. Venligst tænd for indstillingen.", + "ai_not_ready": "Gemini Nano er ikke tilgængelig; du er nødt til at double-cheke Chrome indstillingerne.", + "internal_error": "Der opstod en intern fejl. Prøv venligst igen senere." + }, + "errorDescription": "For at bruge Chrome AI skal du bruge en browserversion, der er nyere end 127, hvilket kun er tilgængelig i Dev og Canary channels. Efter at have downloadet den undersøttet version, følg disse steps:\n\n1. Gå til `chrome://flags/#prompt-api-for-gemini-nano` og vælg \"Enable\".\n2. Gå til `chrome://flags/#optimization-guide-on-device-model` og vælg \"EnabledBypassPrefRequirement\".\n3. Gå til `chrome://components`, søg efter \"Optimization Guide On Device Model\", og click \"Søg efter opdatering\". Dette vil downloade modelen. Hvis du ikke kan se indstillingerne, gentag trin 1 og 2 og genstart din browser." +} \ No newline at end of file diff --git a/src/assets/locale/da/common.json b/src/assets/locale/da/common.json new file mode 100644 index 0000000..5a60ec5 --- /dev/null +++ b/src/assets/locale/da/common.json @@ -0,0 +1,105 @@ +{ + "pageAssist": "Page Assist", + "selectAModel": "Vælg en Model", + "save": "Gem", + "saved": "Gemt", + "cancel": "Fortryd", + "retry": "Prøv igen", + "share": { + "tooltip": { + "share": "Del" + }, + "modal": { + "title": "Del link til Chatten" + }, + "form": { + "defaultValue": { + "name": "Anonym", + "title": "Unavngivet chat" + }, + "title": { + "label": "Chattitel", + "placeholder": "Indtast chattitel", + "required": "Chattitel er nødvendig" + }, + "name": { + "label": "Dit navn", + "placeholder": "Indtast dit navn", + "required": "Dit navn er nødvendig" + }, + "btn": { + "save": "Generer et Link", + "saving": "Generering af link..." + } + }, + "notification": { + "successGenerate": "Link kopied til udklipsholder", + "failGenerate": "Kunne ikke generere link" + } + }, + "copyToClipboard": "Kopier til udklipsholder", + "webSearch": "Søger på internettet", + "regenerate": "Regenerer", + "edit": "Ændre", + "saveAndSubmit": "Gem & Indsend", + "editMessage": { + "placeholder": "Skriv en besked..." + }, + "submit": "Indsend", + "noData": "Igen data", + "noHistory": "Igen chat history", + "chatWithCurrentPage": "Chat med nuværende side", + "beta": "Beta", + "tts": "Læs op", + "currentChatModelSettings": "Nuværende chat model indstillinger", + "modelSettings": { + "label": "Model Indstillinger", + "description": "Konfigurer model indstillingerne alle chats", + "form": { + "keepAlive": { + "label": "Hold i live", + "help": "controls how long the model will stay loaded into memory following the request (standard: 5m)", + "placeholder": "Indtast længden af sessionen (fx. 5m, 10m, 1t)" + }, + "temperature": { + "label": "Temperatur", + "placeholder": "Indtast Temperatur værdi (fx. 0.7, 1.0)" + }, + "numCtx": { + "label": "Længden af Kontekst", + "placeholder": "Instast Længden af Kontekst værdien (standard: 2048)" + }, + "seed": { + "label": "Seed", + "placeholder": "Indtast Seed værdi (fx. 1234)", + "help": "Reproducerbarhed af modeloutput" + }, + "topK": { + "label": "Top K", + "placeholder": "Indtast Top K værdi (fx. 40, 100)" + }, + "topP": { + "label": "Top P", + "placeholder": "Indtast Top P value (fx. 0.9, 0.95)" + }, + "numGpu": { + "label": "Antal GPU'er", + "placeholder": "Indtast antallet af lag, som sendes til GPU('er)" + }, + "systemPrompt": { + "label": "Midlertidige System Prompt", + "placeholder": "Indtast System Prompt", + "help": "Dette er en hurtig måde at indstille systemprompten i den aktuelle chat, som vil tilsidesætte den valgte systemprompt, hvis den findes." + } + }, + "advanced": "Flere Model Indstillinger" + }, + "copilot": { + "summary": "Opsummer", + "explain": "Forklar", + "rephrase": "Omskriv", + "translate": "Oversæt", + "custom": "Brugerdefineret" + }, + "citations": "Citater" +} \ No newline at end of file diff --git a/src/assets/locale/da/knowledge.json b/src/assets/locale/da/knowledge.json new file mode 100644 index 0000000..b1ae090 --- /dev/null +++ b/src/assets/locale/da/knowledge.json @@ -0,0 +1,43 @@ +{ + "addBtn": "Tilføj Ny Viden", + "columns": { + "title": "Titel", + "status": "Status", + "embeddings": "Embedding Model", + "createdAt": "Oprettet At", + "action": "Handlinger" + }, + "expandedColumns": { + "name": "Navn" + }, + "tooltip": { + "delete": "Slet" + }, + "confirm": { + "delete": "Er du sikker på du vil slette denne viden?" + }, + "deleteSuccess": "Viden slettet med success", + "status": { + "pending": "Venter", + "finished": "Færdig", + "processing": "Processerer", + "failed": "Fejlet" + }, + "addKnowledge": "Tilføj Viden", + "form": { + "title": { + "label": "Viden Titel", + "placeholder": "Indtast viden titel", + "required": "Viden titel er nødvendig" + }, + "uploadFile": { + "label": "Upload Filer", + "uploadText": "Træk og slip denne fil here og klik upload", + "uploadHint": "Understøttet filtyper: .pdf, .csv, .txt, .md, .docx", + "required": "En fil er nødvendig" + }, + "submit": "Indsend", + "success": "Viden tilføjet med success" + }, + "noEmbeddingModel": "Tilføj venligst en embedding model fra RAG indstillingerne først" +} \ No newline at end of file diff --git a/src/assets/locale/da/option.json b/src/assets/locale/da/option.json new file mode 100644 index 0000000..4548680 --- /dev/null +++ b/src/assets/locale/da/option.json @@ -0,0 +1,12 @@ +{ + "newChat": "Ny Chat", + "selectAPrompt": "Vælg en Prompt", + "githubRepository": "GitHub Repository", + "settings": "Indstillinger", + "sidebarTitle": "Chathistorik", + "error": "Fejl", + "somethingWentWrong": "Noget gik galt", + "validationSelectModel": "Venligst vælg en model for at forsæætte", + "deleteHistoryConfirmation": "Er du sikker på at du vil slette denne historik?", + "editHistoryTitle": "Indtast en ny titel" +} \ No newline at end of file diff --git a/src/assets/locale/da/playground.json b/src/assets/locale/da/playground.json new file mode 100644 index 0000000..f8df2b2 --- /dev/null +++ b/src/assets/locale/da/playground.json @@ -0,0 +1,29 @@ +{ + "ollamaState": { + "searching": "Søger efter din Ollama 🦙", + "running": "Ollama kør 🦙", + "notRunning": "Kan ikke oprette forbindelse til Ollama 🦙", + "connectionError": "Det lader til, at du har en forbindelsesfejl. Se venligst denne dokumentation for fejlfinding." + }, + "formError": { + "noModel": "Vælg venligst en model", + "noEmbeddingModel": "Vælg venligst en embedding model under indstillinger > RAG side" + }, + "form": { + "textarea": { + "placeholder": "Skriv en besked..." + }, + "webSearch": { + "on": "Til", + "off": "Fra" + } + }, + "tooltip": { + "searchInternet": "Søg Internettet", + "speechToText": "Tal til Tekst", + "uploadImage": "Upload Billed", + "stopStreaming": "Stop Streaming", + "knowledge": "Viden" + }, + "sendWhenEnter": "Søg, når Indtast trykkes" +} \ No newline at end of file diff --git a/src/assets/locale/da/settings.json b/src/assets/locale/da/settings.json new file mode 100644 index 0000000..7c98035 --- /dev/null +++ b/src/assets/locale/da/settings.json @@ -0,0 +1,339 @@ +{ + "generalSettings": { + "title": "Generelle Indstillinger", + "settings": { + "heading": "Web UI Indstillinger", + "speechRecognitionLang": { + "label": "Talegenkendelse Sprog", + "placeholder": "Vælg et sprog" + }, + "language": { + "label": "Sprog", + "placeholder": "Vælg et sprog" + }, + "darkMode": { + "label": "Ændre Tema", + "options": { + "light": "Lyst", + "dark": "Mørkt" + } + }, + "copilotResumeLastChat": { + "label": "Genoptag den sidste chat, når du åbner SidePanel (copilot)" + }, + "hideCurrentChatModelSettings": { + "label": "Skjul nuværende chatmodelindstillinger" + }, + "restoreLastChatModel": { + "label": "Gendan sidste brugte chatmodel fremtidigt" + }, + "sendNotificationAfterIndexing": { + "label": "Send besked efter færdigbehandling af vidensbasen" + }, + "generateTitle" :{ + "label": "Generer titel med AI" + } + }, + "sidepanelRag": { + "heading": "Copilot Chat med Website Indstillinger", + "ragEnabled": { + "label": "Chat med websitet ved brug af vektorindlejringer (vector embeddings)" + }, + "maxWebsiteContext": { + "label": "Normal webstedsindhold størrelse", + "placeholder": "Indholdsstørrelse (standard 4028)" + } + }, + "webSearch": { + "heading": "Administrer Web Søgning", + "searchMode": { + "label": "Søge Tilstand" + }, + "provider": { + "label": "Søgemaskine", + "placeholder": "Vælg en søgemaskine" + }, + "totalSearchResults": { + "label": "Antal søgeresultater", + "placeholder": "Indtast antal Søgeresultater" + }, + "visitSpecificWebsite": { + "label": "Besøg websitet nævnt i samtalen" + } + }, + "system": { + "heading": "Systemindstillinger", + "deleteChatHistory": { + "label": "Slet Chathistorik", + "button": "Slet", + "confirm": "Er du sikker på, at du vil slette din chathistorik? Denne handling kan ikke fortrydes." + + }, + "export": { + "label": "Eksporter chathistorik, vidensbase og prompts", + "button": "Eksporter Data", + "success": "Export lykkedes" + }, + "import": { + "label": "Importer chathistorik, vidensbase og prompts", + "button": "Importer Data", + "success": "Import lykkedes", + "error": "Importfejl" + } + }, + "tts": { + "heading": "Tekst-til-tale Indstillinger", + "ttsEnabled": { + "label": "Tilføj Teskt-til-Tale" + }, + "ttsProvider": { + "label": "Tekst-til-Tale Udbyder", + "placeholder": "Vælg en udbyder" + }, + "ttsVoice": { + "label": "Tekst-til-Tale Stemme", + "placeholder": "Vælg en stemme" + }, + "ssmlEnabled": { + "label": "Aktiver SSML (Speech Synthesis Markup Language)" + } + } + }, + "manageModels": { + "title": "Administer Modeller", + "addBtn": "Tilføj ny Model", + "columns": { + "name": "Navn", + "digest": "Digest", + "modifiedAt": "Ændret den", + "size": "Størrelse", + "actions": "Handlinger" + }, + "expandedColumns": { + "parentModel": "Forælder model", + "format": "Format", + "family": "Familie", + "parameterSize": "Parameterstørrelse", + "quantizationLevel": "kvantificeringsniveau" + }, + "tooltip": { + "delete": "Slet Model", + "repull": "Hent Model Igen" + }, + "confirm": { + "delete": "Er du sikker på, at du vil slette denne model?", + "repull": "Er du sikker på, at du vil hente denne model igen?" + }, + "modal": { + "title": "Tilføj Ny Model", + "placeholder": "Indtast Modelnavn", + "pull": "Hent Model" + }, + "notification": { + "pullModel": "Henter Model", + "pullModelDescription": "Henter {{modelName}} model. For flere detaljer, tjek udvidelsesikonet.", + "success": "Det virkede", + "error": "Fejl", + "successDescription": "Det lykkedes at hente modellen", + "successDeleteDescription": "Det lykkedes at slette modellen", + "someError": "Noget gik galt. Venligst prøv igen senere" + } + }, + "managePrompts": { + "title": "Administrer Prompts", + "addBtn": "Tilføj Ny Prompt", + "option1": "Normal", + "option2": "RAG", + "questionPrompt": "Spørgsmålsprompt", + "segmented": { + "custom": "Brugerdefinerede Prompts", + "copilot": "Copilot Prompts" + }, + "columns": { + "title": "Titel", + "prompt": "Prompt", + "type": "Prompttype", + "actions": "Handlinger" + }, + "systemPrompt": "Systemprompt", + "quickPrompt": "Hurtig Prompt", + "tooltip": { + "delete": "Slet Prompt", + "edit": "Ændre Prompt" + }, + "confirm": { + "delete": "Er du sikker på, at du vil slette denne prompt? Denne handling kan ikke fortrydes." }, + "modal": { + "addTitle": "Tilføj ny Prompt", + "editTitle": "Ændre Prompt" + }, + "form": { + "title": { + "label": "Titel", + "placeholder": "Min Seje Prompt", + "required": "Indtast venligst en titel" + }, + "prompt": { + "label": "Prompt", + "placeholder": "Indtast Prompt", + "required": "Venligst indtast en prompt", + "help": "Du kan bruge {key} som variabel i din prompt.", + "missingTextPlaceholder": "Variablen {text} mangler i prompten. Tilføj venligst dette." + }, + "isSystem": { + "label": "Er Systemprompt" + }, + "btnSave": { + "saving": "Tilføjer Prompt...", + "save": "Tilføj Prompt" + }, + "btnEdit": { + "saving": "Opdaterer Prompt...", + "save": "Opdater Prompt" + } + }, + "notification": { + "addSuccess": "Prompt Tilføjet", + "addSuccessDesc": "Prompt blev tilføjet med succes", + "error": "Fejl", + "someError": "Noget gik galt. Prøv venligst igen senere", + "updatedSuccess": "Prompt Opdateret", + "updatedSuccessDesc": "Prompt blev opdateret med succes", + "deletedSuccess": "Prompt Slettet", + "deletedSuccessDesc": "Prompt blev slettet med succes" + } + }, + "manageShare": { + "title": "Administrer Deling", + "heading": "Konfigurerer Page deling URL", + "form": { + "url": { + "label": "Page Deling URL", + "placeholder": "Indtast websted deling URL", + "required": "Venligst indstast din Page deling URL!", + "help": "Af hensyn til privatliv kan du selv hoste side delingen og angive URL'en her. Lær Mere." } + }, + "webshare": { + "heading": "Web Deling", + "columns": { + "title": "Titel", + "url": "URL", + "actions": "Handlinger" + }, + "tooltip": { + "delete": "Slet Deling" + }, + "confirm": { + "delete": "Er du sikker på du vil slette denne deling? Dette kan ikke fortrydes." + }, + "label": "Administrer Page Deling", + "description": "Tilføj eller disable the page share feature" + }, + "notification": { + "pageShareSuccess": "Page Deling URL Updateret korrekt", + "someError": "Noget gik galt. Prøv venligst igen senere", + "webShareDeleteSuccess": "Webdeling er slettet korrekt" + } + }, + "ollamaSettings": { + "title": "Ollama Indstillinger", + "heading": "Konfigurerer Ollama", + "settings": { + "ollamaUrl": { + "label": "Ollama URL", + "placeholder": "Indtast Ollama URL" + }, + "advanced": { + "label": "Avanceret Ollama URL Konfiguration", + "urlRewriteEnabled": { + "label": "Aktiver eller Deaktiver Tilpasset Oprindelses-URL" + }, + "rewriteUrl": { + "label": "Tilpasset Oprindelses URL", + "placeholder": "Indtast tilpasset oprindelses URL" + }, + "headers": { + "label": "Tilpas Headers", + "Tilføj": "Tilføj Header", + "key": { + "label": "Header Værdi", + "placeholder": "Autorisation" + }, + "value": { + "label": "Header Value", + "placeholder": "Bearer token" + } + }, + "help": "Hvis du har forbindelsesproblemer med Ollama på Page Assist, kan du konfigurere en brugerdefineret oprindelses-URL. For mere information om konfigurationen, klik her." } + } + }, + "manageSearch": { + "title": "Administrer Web Search", + "heading": "Konfigurerer Web Search" + }, + "about": { + "title": "Om", + "heading": "Om", + "chromeVersion": "Page Assist Version", + "ollamaVersion": "Ollama Version", + "support": "Du kan støtte Page Assist-projektet ved at donere eller sponsorere via følgende platforme:", + "koFi": "Støt på Ko-fi", + "githubSponsor": "Sponsor på GitHub", + "githubRepo": "GitHub Repository" + }, + "manageKnowledge": { + "title": "Administrer Viden", + "heading": "konfigurer Videnbase" + }, + "rag": { + "title": "RAG Indstillinger", + "ragSettings": { + "label": "RAG Indstillinger", + "model": { + "label": "Embedding Model", + "required": "Vælg venligst en model", + "help": "Det anbefales stærkt at bruge indlejringsmodeller som `nomic-embed-text`.", + "placeholder": "Vælg a model" + }, + "chunkSize": { + "label": "Chunk Størrelse", + "placeholder": "Indtast Chunk Størrelse", + "required": "Venligst indtast en chunk størrelse" + }, + "chunkOverlap": { + "label": "Chunk Overlap", + "placeholder": "Indtast Chunk Overlap", + "required": "Indtast venligst chunk overlap" + }, + "totalFilePerKB": { + "label": "Videnbase Standard Fil Upload Grænse", + "placeholder": "Indtast standard fil upload grænse (f.eks. 10)", + "required": "Indtast venligst standard fil upload grænsen" + }, + "noOfRetrievedDocs": { + "label": "Antal Hentede Dokumenter", + "placeholder": "Indtast Number of Retrieved Documents", + "required": "Venligst indtast the number of retrieved documents" + } + }, + "prompt": { + "label": "Konfigurer RAG Prompt", + "option1": "Normal", + "option2": "Web", + "alert": "Konfigurering af systemprompt her er forældet. Venligst brug Administrer Prompts sektionen til, at tilføje eller ændre prompts. Denne sektion vil blive fjernet i fremtidige versioner.", + "systemPrompt": "System Prompt", + "systemPromptPlaceholder": "Indtast System Prompt", + "webSearchPrompt": "Websøgningsprompt", + "webSearchPromptHelp": "Fjern ikke `{search_results}` fra prompten.", + "webSearchPromptError": "Venligst indtast a web search prompt", + "webSearchPromptPlaceholder": "Indtast Websøgningsprompt", + "webSearchFollowUpPrompt": "Web Search Follow Up Prompt", + "webSearchFollowUpPromptHelp": "Do not remove `{chat_history}` og `{question}` from the prompt.", + "webSearchFollowUpPromptError": "Indtast venligst din websøgning opfølgende prompt!", + "webSearchFollowUpPromptPlaceholder": "Din Websøgnings opfølgende Prompt" + } + }, + "chromeAiSettings": { + "title": "Chrome AI Indstillinger" + } +} diff --git a/src/assets/locale/da/sidepanel.json b/src/assets/locale/da/sidepanel.json new file mode 100644 index 0000000..d9abf2f --- /dev/null +++ b/src/assets/locale/da/sidepanel.json @@ -0,0 +1,7 @@ +{ + "tooltip": { + "embed": "Det kan tage et par minutter at indlejre siden. Vent venligst...", + "clear": "Slet chat historiken", + "history": "Chat historik" + } +} \ No newline at end of file diff --git a/src/i18n/support-language.ts b/src/i18n/support-language.ts index 17658dc..c9bbfa1 100644 --- a/src/i18n/support-language.ts +++ b/src/i18n/support-language.ts @@ -43,5 +43,13 @@ export const supportLanguage = [ { label: "Deutsch", value: "de" + }, + { + label: "Dansk", + value: "da" + }, + { + label: "Norsk", + value: "no" } ] From 7b26e64dacc5dd687422814af0d82e260d594271 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Sun, 6 Oct 2024 08:46:24 +0200 Subject: [PATCH 09/33] Danish language support is fully functioning --- src/i18n/index.ts | 4 +++- src/i18n/lang/da.ts | 17 ++++++++++++++++ src/public/_locales/da/messages.json | 29 ++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 src/i18n/lang/da.ts create mode 100644 src/public/_locales/da/messages.json diff --git a/src/i18n/index.ts b/src/i18n/index.ts index 2a5029e..f812efb 100644 --- a/src/i18n/index.ts +++ b/src/i18n/index.ts @@ -10,6 +10,7 @@ import { ja } from "./lang/ja"; import { it } from "./lang/it"; import { es } from "./lang/es"; import { fa } from "./lang/fa"; +import { da } from "./lang/da"; import { de } from "./lang/de"; import LanguageDetector from 'i18next-browser-languagedetector'; @@ -32,7 +33,8 @@ i18n "ja-JP": ja, fa: fa, "fa-IR": fa, - de: de + de: de, + da: da }, fallbackLng: "en", lng: localStorage.getItem("i18nextLng") || "en", diff --git a/src/i18n/lang/da.ts b/src/i18n/lang/da.ts new file mode 100644 index 0000000..83caf80 --- /dev/null +++ b/src/i18n/lang/da.ts @@ -0,0 +1,17 @@ +import option from "@/assets/locale/da/option.json"; +import playground from "@/assets/locale/da/playground.json"; +import common from "@/assets/locale/da/common.json"; +import sidepanel from "@/assets/locale/da/sidepanel.json"; +import settings from "@/assets/locale/da/settings.json"; +import knowledge from "@/assets/locale/da/knowledge.json"; +import chrome from "@/assets/locale/da/chrome.json"; + +export const da = { + option, + playground, + common, + sidepanel, + settings, + knowledge, + chrome +} \ No newline at end of file diff --git a/src/public/_locales/da/messages.json b/src/public/_locales/da/messages.json new file mode 100644 index 0000000..b0b239d --- /dev/null +++ b/src/public/_locales/da/messages.json @@ -0,0 +1,29 @@ +{ + "extName": { + "message": "Page Assist - En Web UI for at køre AI modeller lokalt" + }, + "extDescription": { + "message": "Brug dine lokalt kørende AI-modeller til at hjælpe dig med at surfe på nettet." + }, + "openSidePanelToChat": { + "message": "Åben Copilot for at Chatte" + }, + "openOptionToChat": { + "message": "Åben Web UI for at Chatte" + }, + "contextSummarize": { + "message": "Opsummer" + }, + "contextExplain": { + "message": "Forklar" + }, + "contextRephrase": { + "message": "Omskriv" + }, + "contextTranslate" :{ + "message": "Oversæt" + }, + "contextCustom": { + "message": "Tilpasset" + } +} \ No newline at end of file From 43a88ab83c108efbdcaa95e8209f346461206d3f Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Sun, 6 Oct 2024 08:54:33 +0200 Subject: [PATCH 10/33] Everythings works for Danish now --- src/i18n/lang/da.ts | 17 ++++++++++++++++ src/public/_locales/da/messages.json | 29 ++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) create mode 100644 src/i18n/lang/da.ts create mode 100644 src/public/_locales/da/messages.json diff --git a/src/i18n/lang/da.ts b/src/i18n/lang/da.ts new file mode 100644 index 0000000..83caf80 --- /dev/null +++ b/src/i18n/lang/da.ts @@ -0,0 +1,17 @@ +import option from "@/assets/locale/da/option.json"; +import playground from "@/assets/locale/da/playground.json"; +import common from "@/assets/locale/da/common.json"; +import sidepanel from "@/assets/locale/da/sidepanel.json"; +import settings from "@/assets/locale/da/settings.json"; +import knowledge from "@/assets/locale/da/knowledge.json"; +import chrome from "@/assets/locale/da/chrome.json"; + +export const da = { + option, + playground, + common, + sidepanel, + settings, + knowledge, + chrome +} \ No newline at end of file diff --git a/src/public/_locales/da/messages.json b/src/public/_locales/da/messages.json new file mode 100644 index 0000000..b0b239d --- /dev/null +++ b/src/public/_locales/da/messages.json @@ -0,0 +1,29 @@ +{ + "extName": { + "message": "Page Assist - En Web UI for at køre AI modeller lokalt" + }, + "extDescription": { + "message": "Brug dine lokalt kørende AI-modeller til at hjælpe dig med at surfe på nettet." + }, + "openSidePanelToChat": { + "message": "Åben Copilot for at Chatte" + }, + "openOptionToChat": { + "message": "Åben Web UI for at Chatte" + }, + "contextSummarize": { + "message": "Opsummer" + }, + "contextExplain": { + "message": "Forklar" + }, + "contextRephrase": { + "message": "Omskriv" + }, + "contextTranslate" :{ + "message": "Oversæt" + }, + "contextCustom": { + "message": "Tilpasset" + } +} \ No newline at end of file From 832c565303d086afd0dfa0f2a808ed6db2634361 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Sun, 6 Oct 2024 09:05:51 +0200 Subject: [PATCH 11/33] n4ze3m you can now merge the two git branches --- src/i18n/index.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/i18n/index.ts b/src/i18n/index.ts index 2a5029e..ed0c96c 100644 --- a/src/i18n/index.ts +++ b/src/i18n/index.ts @@ -11,6 +11,7 @@ import { it } from "./lang/it"; import { es } from "./lang/es"; import { fa } from "./lang/fa"; import { de } from "./lang/de"; +import { da } from "./lang/da"; import LanguageDetector from 'i18next-browser-languagedetector'; i18n @@ -32,7 +33,8 @@ i18n "ja-JP": ja, fa: fa, "fa-IR": fa, - de: de + de: de, + da: da }, fallbackLng: "en", lng: localStorage.getItem("i18nextLng") || "en", From 46d2e6a75b12ac9618ea0d4523510f524285f593 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Thu, 10 Oct 2024 17:29:08 +0200 Subject: [PATCH 12/33] Norwegian langauge support, but I need to do some small fixes --- src/assets/locale/da/chrome.json | 2 +- src/assets/locale/no/chrome.json | 13 ++ src/assets/locale/no/common.json | 105 +++++++++++ src/assets/locale/no/knowledge.json | 43 +++++ src/assets/locale/no/option.json | 12 ++ src/assets/locale/no/playground.json | 29 +++ src/assets/locale/no/settings.json | 262 +++++++++++++++++++++++++++ src/assets/locale/no/sidepanel.json | 7 + src/i18n/index.ts | 1 + src/i18n/lang/no.ts | 17 ++ src/public/_locales/no/messages.json | 29 +++ 11 files changed, 519 insertions(+), 1 deletion(-) create mode 100644 src/assets/locale/no/chrome.json create mode 100644 src/assets/locale/no/common.json create mode 100644 src/assets/locale/no/knowledge.json create mode 100644 src/assets/locale/no/option.json create mode 100644 src/assets/locale/no/playground.json create mode 100644 src/assets/locale/no/settings.json create mode 100644 src/assets/locale/no/sidepanel.json create mode 100644 src/i18n/lang/no.ts create mode 100644 src/public/_locales/no/messages.json diff --git a/src/assets/locale/da/chrome.json b/src/assets/locale/da/chrome.json index 2b05470..7c5e6d9 100644 --- a/src/assets/locale/da/chrome.json +++ b/src/assets/locale/da/chrome.json @@ -4,7 +4,7 @@ "label": "Tænd eller sluk Chrome AI Support på Page Assist" }, "error": { - "browser_not_supported": "Denne version af Chrome er ikke undersøttet af Gemini Nano modelen. Opdater venligst til version 127 eller nyere", + "browser_not_supported": "Denne version af Chrome er ikke understøttet af Gemini Nano modelen. Opdater venligst til version 127 eller nyere", "ai_not_supported": "Indstillingen chrome://flags/#prompt-api-for-gemini-nano er ikke tændt. Venligst tænd for indstillingen.", "ai_not_ready": "Gemini Nano er ikke tilgængelig; du er nødt til at double-cheke Chrome indstillingerne.", "internal_error": "Der opstod en intern fejl. Prøv venligst igen senere." diff --git a/src/assets/locale/no/chrome.json b/src/assets/locale/no/chrome.json new file mode 100644 index 0000000..a37c08d --- /dev/null +++ b/src/assets/locale/no/chrome.json @@ -0,0 +1,13 @@ +{ + "heading": "Konfigurer Chrome AI", + "status": { + "label": "Slå Chrome AI Support på eller av på Page Assist" + }, + "error": { + "browser_not_supported": "Denne versjonen av Chrome støttes ikke av Gemini Nano-modellen. Vennligst oppdater til versjon 127 eller nyere", + "ai_not_supported": "Innstillingen chrome://flags/#prompt-api-for-gemini-nano er ikke tændt. Slå på innstillingen..", + "ai_not_ready": "Gemini Nano er ikke tilgjengelig; du må dobbeltsjekke Chrome-innstillingene.", + "internal_error": "Det oppsto en intern feil. Vennligst prøv på nytt senere." + }, + "errorDescription": "For å bruke Chrome AI trenger du en nettleserversjon nyere enn 127, som kun er tilgjengelig i Dev - og Canary-kanaler. Etter å ha lastet ned den støttede versjonen, følg disse trinnene:\n\n1. Gå til `chrome://flags/#prompt-api-for-gemini-nano` and select \"Enable\".\n2. Gå til `://flags/#optimization-guide-on-device-model` og velge \"EnabledBypassPrefRequirement\".\n3. Gå til URL-linjen `://components`, søk på \"Optimization Guide On Device Model\", og klikk \"Search for update\". Dette vil laste ned modellen. Hvis du ikke ser innstillingene, gjentar du trinn 1 og 2 og starter nettleseren på nytt." +} \ No newline at end of file diff --git a/src/assets/locale/no/common.json b/src/assets/locale/no/common.json new file mode 100644 index 0000000..99c91ce --- /dev/null +++ b/src/assets/locale/no/common.json @@ -0,0 +1,105 @@ +{ + "pageAssist": "Sideassistent", + "selectAModel": "Velg en modell", + "save": "Lagre", + "saved": "Lagret", + "cancel": "Avbryt", + "retry": "Prøv igjen", + "share": { + "tooltip": { + "share": "Del" + }, + "modal": { + "title": "Del lenke til chatten" + }, + "form": { + "defaultValue": { + "name": "Anonym", + "title": "Navnløs chat" + }, + "title": { + "label": "Chattittel", + "placeholder": "Skriv inn chattittel", + "required": "Chattittel er nødvendig" + }, + "name": { + "label": "Ditt navn", + "placeholder": "Skriv inn ditt navn", + "required": "Ditt navn er nødvendig" + }, + "btn": { + "save": "Generer en lenke", + "saving": "Genererer lenke..." + } + }, + "notification": { + "successGenerate": "Lenke kopiert til utklippstavle", + "failGenerate": "Kunne ikke generere lenke" + } + }, + "copyToClipboard": "Kopier til utklippstavle", + "webSearch": "Søker på internett", + "regenerate": "Regenerer", + "edit": "Endre", + "saveAndSubmit": "Lagre & Send inn", + "editMessage": { + "placeholder": "Skriv en melding..." + }, + "submit": "Send inn", + "noData": "Ingen data", + "noHistory": "Ingen chathistorikk", + "chatWithCurrentPage": "Chat med nåværende side", + "beta": "Beta", + "tts": "Les opp", + "currentChatModelSettings": "Nåværende chatmodellinnstillinger", + "modelSettings": { + "label": "Modellinnstillinger", + "description": "Konfigurer modellinnstillingene for alle chatter", + "form": { + "keepAlive": { + "label": "Hold i live", + "help": "kontrollerer hvor lenge modellen vil forbli lastet i minnet etter forespørselen (standard: 5m)", + "placeholder": "Skriv inn lengden på økten (f.eks. 5m, 10m, 1t)" + }, + "temperature": { + "label": "Temperatur", + "placeholder": "Skriv inn temperaturverdi (f.eks. 0.7, 1.0)" + }, + "numCtx": { + "label": "Kontekstlengde", + "placeholder": "Skriv inn kontekstlengdeverdi (standard: 2048)" + }, + "seed": { + "label": "Seed", + "placeholder": "Skriv inn seedverdi (f.eks. 1234)", + "help": "Reproduserbarhet av modellutdata" + }, + "topK": { + "label": "Topp K", + "placeholder": "Skriv inn Topp K-verdi (f.eks. 40, 100)" + }, + "topP": { + "label": "Topp P", + "placeholder": "Skriv inn Topp P-verdi (f.eks. 0.9, 0.95)" + }, + "numGpu": { + "label": "Antall GPUer", + "placeholder": "Skriv inn antall lag som sendes til GPU(er)" + }, + "systemPrompt": { + "label": "Midlertidig systemprompt", + "placeholder": "Skriv inn systemprompt", + "help": "Dette er en rask måte å sette systemprompt i den nåværende chatten, som vil overstyre den valgte systemprompt hvis den finnes." + } + }, + "advanced": "Flere modellinnstillinger" + }, + "copilot": { + "summary": "Oppsummer", + "explain": "Forklar", + "rephrase": "Omformulér", + "translate": "Oversett", + "custom": "Egendefinert" + }, + "citations": "Sitater" +} \ No newline at end of file diff --git a/src/assets/locale/no/knowledge.json b/src/assets/locale/no/knowledge.json new file mode 100644 index 0000000..c5f7311 --- /dev/null +++ b/src/assets/locale/no/knowledge.json @@ -0,0 +1,43 @@ +{ + "addBtn": "Legg Til Ny Kunnskap", + "columns": { + "title": "Tittel", + "status": "Status", + "embeddings": "Embedding Modell", + "createdAt": "Opprettet På", + "action": "Handlinger" + }, + "expandedColumns": { + "name": "Navn" + }, + "tooltip": { + "delete": "Slett" + }, + "confirm": { + "delete": "Er du sikker på at du vil slette denne kunnskapen?" + }, + "deleteSuccess": "Kunnskap slettet med suksess", + "status": { + "pending": "Venter", + "finished": "Ferdig", + "processing": "Behandler", + "failed": "Mislyktes" + }, + "addKnowledge": "Legg Til Kunnskap", + "form": { + "title": { + "label": "Kunnskapstittel", + "placeholder": "Skriv inn kunnskapstittel", + "required": "Kunnskapstittel er nødvendig" + }, + "uploadFile": { + "label": "Last Opp Filer", + "uploadText": "Dra og slipp filen her og klikk for å laste opp", + "uploadHint": "Støttede filtyper: .pdf, .csv, .txt, .md, .docx", + "required": "En fil er nødvendig" + }, + "submit": "Send Inn", + "success": "Kunnskap lagt til med suksess" + }, + "noEmbeddingModel": "Vennligst legg til en embedding-modell fra RAG-innstillingene først" +} \ No newline at end of file diff --git a/src/assets/locale/no/option.json b/src/assets/locale/no/option.json new file mode 100644 index 0000000..76d335e --- /dev/null +++ b/src/assets/locale/no/option.json @@ -0,0 +1,12 @@ +{ + "newChat": "Ny Chat", + "selectAPrompt": "Velg en Prompt", + "githubRepository": "GitHub Repository", + "settings": "Innstillinger", + "sidebarTitle": "Chathistorikk", + "error": "Feil", + "somethingWentWrong": "Noe gikk galt", + "validationSelectModel": "Vennligst velg en modell for å fortsette", + "deleteHistoryConfirmation": "Er du sikker på at du vil slette denne historikken?", + "editHistoryTitle": "Skriv inn en ny tittel" +} diff --git a/src/assets/locale/no/playground.json b/src/assets/locale/no/playground.json new file mode 100644 index 0000000..b812649 --- /dev/null +++ b/src/assets/locale/no/playground.json @@ -0,0 +1,29 @@ +{ + "ollamaState": { + "searching": "Søker etter din Ollama 🦙", + "running": "Ollama kjører 🦙", + "notRunning": "Kan ikke koble til Ollama 🦙", + "connectionError": "Det ser ut til at du har en tilkoblingsfeil. Vennligst se denne dokumentasjonen for feilsøking." + }, + "formError": { + "noModel": "Vennligst velg en modell", + "noEmbeddingModel": "Vennligst velg en embedding-modell under innstillinger > RAG-siden" + }, + "form": { + "textarea": { + "placeholder": "Skriv en melding..." + }, + "webSearch": { + "on": "På", + "off": "Av" + } + }, + "tooltip": { + "searchInternet": "Søk på Internett", + "speechToText": "Tale til Tekst", + "uploadImage": "Last opp Bilde", + "stopStreaming": "Stopp Streaming", + "knowledge": "Kunnskap" + }, + "sendWhenEnter": "Søk når Enter trykkes" +} diff --git a/src/assets/locale/no/settings.json b/src/assets/locale/no/settings.json new file mode 100644 index 0000000..2da5fb3 --- /dev/null +++ b/src/assets/locale/no/settings.json @@ -0,0 +1,262 @@ +{ + "generalSettings": { + "title": "Generelle Innstillinger", + "settings": { + "heading": "Web UI Innstillinger", + "speechRecognitionLang": { + "label": "Talegjenkjenning Språk", + "placeholder": "Velg et språk" + }, + "language": { + "label": "Språk", + "placeholder": "Velg et språk" + }, + "darkMode": { + "label": "Endre Tema", + "options": { + "light": "Lyst", + "dark": "Mørkt" + } + }, + "copilotResumeLastChat": { + "label": "Gjenoppta siste chat ved åpning av SidePanel (copilot)" + }, + "hideCurrentChatModelSettings": { + "label": "Skjul gjeldende chatmodellinnstillinger" + }, + "restoreLastChatModel": { + "label": "Gjenopprett sist brukte chatmodell for fremtidig bruk" + }, + "sendNotificationAfterIndexing": { + "label": "Send varsel etter ferdigbehandling av kunnskapsbasen" + }, + "generateTitle" :{ + "label": "Generer tittel med AI" + } + }, + "sidepanelRag": { + "heading": "Copilot Chat med Nettsted Innstillinger", + "ragEnabled": { + "label": "Chat med nettstedet ved bruk av vektorinnleggelser (vector embeddings)" + }, + "maxWebsiteContext": { + "label": "Normal nettstedinnholds størrelse", + "placeholder": "Innholdsstørrelse (standard 4028)" + } + }, + "webSearch": { + "heading": "Administrer Web Søk", + "searchMode": { + "label": "Søkemodus" + }, + "provider": { + "label": "Søkemotor", + "placeholder": "Velg en søkemotor" + }, + "totalSearchResults": { + "label": "Antall søkeresultater", + "placeholder": "Skriv inn antall søkeresultater" + }, + "visitSpecificWebsite": { + "label": "Besøk nettstedet nevnt i samtalen" + } + }, + "system": { + "heading": "Systeminnstillinger", + "deleteChatHistory": { + "label": "Slett Chathistorikk", + "button": "Slett", + "confirm": "Er du sikker på at du vil slette chathistorikken din? Denne handlingen kan ikke angres." + }, + "export": { + "label": "Eksporter chathistorikk, kunnskapsbase og prompts", + "button": "Eksporter Data", + "success": "Eksport vellykket" + }, + "import": { + "label": "Importer chathistorikk, kunnskapsbase og prompts", + "button": "Importer Data", + "success": "Import vellykket", + "error": "Importfeil" + } + }, + "tts": { + "heading": "Tekst-til-tale Innstillinger", + "ttsEnabled": { + "label": "Legg til Tekst-til-Tale" + }, + "ttsProvider": { + "label": "Tekst-til-Tale Tilbyder", + "placeholder": "Velg en tilbyder" + }, + "ttsVoice": { + "label": "Tekst-til-Tale Stemme", + "placeholder": "Velg en stemme" + }, + "ssmlEnabled": { + "label": "Aktiver SSML (Speech Synthesis Markup Language)" + } + } + }, + "manageModels": { + "title": "Administrer Modeller", + "addBtn": "Legg til ny Modell", + "columns": { + "name": "Navn", + "digest": "Digest", + "modifiedAt": "Endret den", + "size": "Størrelse", + "actions": "Handlinger" + }, + "expandedColumns": { + "parentModel": "Overordnet modell", + "format": "Format", + "family": "Familie", + "parameterSize": "Parameterstørrelse", + "quantizationLevel": "Kvantifiseringsnivå" + }, + "tooltip": { + "delete": "Slett Modell", + "repull": "Hent Modell På Nytt" + }, + "confirm": { + "delete": "Er du sikker på at du vil slette denne modellen?", + "repull": "Er du sikker på at du vil hente denne modellen på nytt?" + }, + "modal": { + "title": "Legg til Ny Modell", + "placeholder": "Skriv inn Modellnavn", + "pull": "Hent Modell" + }, + "notification": { + "pullModel": "Henter Modell", + "pullModelDescription": "Henter {{modelName}} modell. For flere detaljer, sjekk utvidelsesikonet.", + "success": "Suksess", + "error": "Feil", + "successDescription": "Modellen ble hentet vellykket", + "successDeleteDescription": "Modellen ble slettet vellykket", + "someError": "Noe gikk galt. Vennligst prøv igjen senere" + } + }, + "managePrompts": { + "title": "Administrer Prompts", + "addBtn": "Legg til Ny Prompt", + "option1": "Normal", + "option2": "RAG", + "questionPrompt": "Spørsmålsprompt", + "segmented": { + "custom": "Tilpassede Prompts", + "copilot": "Copilot Prompts" + }, + "columns": { + "title": "Tittel", + "prompt": "Prompt", + "type": "Prompttype", + "actions": "Handlinger" + }, + "systemPrompt": "Systemprompt", + "quickPrompt": "Hurtigprompt", + "tooltip": { + "delete": "Slett Prompt", + "edit": "Endre Prompt" + }, + "confirm": { + "delete": "Er du sikker på at du vil slette denne prompten? Denne handlingen kan ikke angres." + }, + "modal": { + "addTitle": "Legg til ny Prompt", + "editTitle": "Endre Prompt" + }, + "form": { + "title": { + "label": "Tittel", + "placeholder": "Min Kule Prompt", + "required": "Vennligst skriv inn en tittel" + }, + "prompt": { + "label": "Prompt", + "placeholder": "Skriv inn Prompt", + "required": "Vennligst skriv inn en prompt", + "help": "Du kan bruke {key} som variabel i din prompt.", + "missingTextPlaceholder": "Variabelen {text} mangler i prompten. Vennligst legg til dette." + }, + "isSystem": { + "label": "Er Systemprompt" + }, + "btnSave": { + "saving": "Legger til Prompt...", + "save": "Legg til Prompt" + }, + "btnEdit": { + "saving": "Oppdaterer Prompt...", + "save": "Oppdater Prompt" + } + }, + "notification": { + "addSuccess": "Prompt Lagt Til", + "addSuccessDesc": "Prompt ble lagt til vellykket", + "error": "Feil", + "someError": "Noe gikk galt. Vennligst prøv igjen senere", + "updatedSuccess": "Prompt Oppdatert", + "updatedSuccessDesc": "Prompt ble oppdatert vellykket", + "deletedSuccess": "Prompt Slettet", + "deletedSuccessDesc": "Prompt ble slettet vellykket" + } + }, + "manageShare": { + "title": "Administrer Deling", + "heading": "Konfigurer Side deling URL", + "form": { + "url": { + "label": "Side Deling URL", + "placeholder": "Skriv inn side deling URL", + "required": "Vennligst skriv inn din Side deling URL!", + "help": "For personvern kan du selv hoste side delingen og angi URL-en her. Lær Mer." + } + }, + "webshare": { + "heading": "Web Deling", + "columns": { + "title": "Tittel", + "url": "URL", + "actions": "Handlinger" + }, + "tooltip": { + "delete": "Slett Deling" + }, + "confirm": { + "delete": "Er du sikker på at du vil slette denne delingen? Dette kan ikke angres." + }, + "label": "Administrer Side Deling", + "description": "Legg til eller deaktiver side delingsfunksjonen" + }, + "notification": { + "pageShareSuccess": "Side Deling URL oppdatert vellykket", + "someError": "Noe gikk galt. Vennligst prøv igjen senere", + "webShareDeleteSuccess": "Webdeling ble slettet vellykket" + } + }, + "ollamaSettings": { + "title": "Ollama Innstillinger", + "heading": "Konfigurer Ollama Plugin", + "enabled": { + "label": "Aktiver Ollama Plugin" + }, + "provider": { + "label": "Ollama Tilbyder", + "placeholder": "Velg en Ollama tilbyder" + }, + "apiKey": { + "label": "Ollama API Nøkkel", + "placeholder": "Skriv inn Ollama API Nøkkel", + "required": "API Nøkkel er påkrevd" + }, + "saveBtn": { + "save": "Lagre" + }, + "notification": { + "saveSuccess": "Ollama Plugin lagret vellykket", + "someError": "Noe gikk galt. Vennligst prøv igjen senere" + } + } +} diff --git a/src/assets/locale/no/sidepanel.json b/src/assets/locale/no/sidepanel.json new file mode 100644 index 0000000..6df9d20 --- /dev/null +++ b/src/assets/locale/no/sidepanel.json @@ -0,0 +1,7 @@ +{ + "tooltip": { + "embed": "Det kan ta noen minutter å bygge din siden. Vennligst vent...", + "clear": "Slett chathistorikken", + "history": "Chathistorikk" + } +} diff --git a/src/i18n/index.ts b/src/i18n/index.ts index ed0c96c..82c6c6d 100644 --- a/src/i18n/index.ts +++ b/src/i18n/index.ts @@ -12,6 +12,7 @@ import { es } from "./lang/es"; import { fa } from "./lang/fa"; import { de } from "./lang/de"; import { da } from "./lang/da"; +import { no } from "./lang/no"; import LanguageDetector from 'i18next-browser-languagedetector'; i18n diff --git a/src/i18n/lang/no.ts b/src/i18n/lang/no.ts new file mode 100644 index 0000000..f7541a6 --- /dev/null +++ b/src/i18n/lang/no.ts @@ -0,0 +1,17 @@ +import option from "@/assets/locale/no/option.json"; +import playground from "@/assets/locale/no/playground.json"; +import common from "@/assets/locale/no/common.json"; +import sidepanel from "@/assets/locale/no/sidepanel.json"; +import settings from "@/assets/locale/no/settings.json"; +import knowledge from "@/assets/locale/no/knowledge.json"; +import chrome from "@/assets/locale/no/chrome.json"; + +export const no = { + option, + playground, + common, + sidepanel, + settings, + knowledge, + chrome +} \ No newline at end of file diff --git a/src/public/_locales/no/messages.json b/src/public/_locales/no/messages.json new file mode 100644 index 0000000..1bb0b1f --- /dev/null +++ b/src/public/_locales/no/messages.json @@ -0,0 +1,29 @@ +{ + "extName": { + "message": "Page Assist - Et Web UI for at kjøre AI-modeller lokalt" + }, + "extDescription": { + "message": "Bruk dine lokalt kjørende AI-modeller til at hjælpe dig med at surfe på nettet." + }, + "openSidePanelToChat": { + "message": "Åben Copilot for at Chatte" + }, + "openOptionToChat": { + "message": "Åben Web UI for at Chatte" + }, + "contextSummarize": { + "message": "Oppsummer" + }, + "contextExplain": { + "message": "Forklar" + }, + "contextRephrase": { + "message": "Omskrive" + }, + "contextTranslate" :{ + "message": "Oversett" + }, + "contextCustom": { + "message": "Tilpasset" + } +} \ No newline at end of file From d3ccbbfbe339dc58c1826cb8d17be8db77f81dd1 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Thu, 10 Oct 2024 17:38:16 +0200 Subject: [PATCH 13/33] spell fix --- src/assets/locale/da/settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/assets/locale/da/settings.json b/src/assets/locale/da/settings.json index 7c98035..461ca36 100644 --- a/src/assets/locale/da/settings.json +++ b/src/assets/locale/da/settings.json @@ -22,7 +22,7 @@ "label": "Genoptag den sidste chat, når du åbner SidePanel (copilot)" }, "hideCurrentChatModelSettings": { - "label": "Skjul nuværende chatmodelindstillinger" + "label": "Skjul nuværende chat model indstillinger" }, "restoreLastChatModel": { "label": "Gendan sidste brugte chatmodel fremtidigt" From 718e1c8501ad7b6d65156d8177137135f368c466 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Thu, 10 Oct 2024 17:39:01 +0200 Subject: [PATCH 14/33] Spell --- src/assets/locale/da/settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/assets/locale/da/settings.json b/src/assets/locale/da/settings.json index 461ca36..372213f 100644 --- a/src/assets/locale/da/settings.json +++ b/src/assets/locale/da/settings.json @@ -37,7 +37,7 @@ "sidepanelRag": { "heading": "Copilot Chat med Website Indstillinger", "ragEnabled": { - "label": "Chat med websitet ved brug af vektorindlejringer (vector embeddings)" + "label": "Chat med websitet ved brug af vector embeddings" }, "maxWebsiteContext": { "label": "Normal webstedsindhold størrelse", From ec3301503945ea443a2ad62a95b4c5d04d30d081 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Thu, 10 Oct 2024 21:28:04 +0200 Subject: [PATCH 15/33] Maybe it works? --- src/i18n/index.ts | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/i18n/index.ts b/src/i18n/index.ts index 82c6c6d..3480e59 100644 --- a/src/i18n/index.ts +++ b/src/i18n/index.ts @@ -15,6 +15,8 @@ import { da } from "./lang/da"; import { no } from "./lang/no"; import LanguageDetector from 'i18next-browser-languagedetector'; +const supportedLanguages = ['en', 'zh', 'ja', 'fa', 'de', 'da', 'no']; + i18n .use(LanguageDetector) .use(initReactI18next) @@ -35,10 +37,16 @@ i18n fa: fa, "fa-IR": fa, de: de, - da: da + da: da, + no: no }, fallbackLng: "en", + detection: { + order: ['localStorage', 'navigator'], + caches: ['localStorage'] + }, + supportedLngs: supportedLanguages, lng: localStorage.getItem("i18nextLng") || "en", - }) + }); -export default i18n; +export default i18n; \ No newline at end of file From 6fc29310bac954e77b64fd38cedafbfb03d789b8 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Fri, 11 Oct 2024 17:31:35 +0200 Subject: [PATCH 16/33] spell issues --- src/assets/locale/no/settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/assets/locale/no/settings.json b/src/assets/locale/no/settings.json index 2da5fb3..fb90aa5 100644 --- a/src/assets/locale/no/settings.json +++ b/src/assets/locale/no/settings.json @@ -22,7 +22,7 @@ "label": "Gjenoppta siste chat ved åpning av SidePanel (copilot)" }, "hideCurrentChatModelSettings": { - "label": "Skjul gjeldende chatmodellinnstillinger" + "label": "Skjul gjeldende chat modell innstillinger" }, "restoreLastChatModel": { "label": "Gjenopprett sist brukte chatmodell for fremtidig bruk" From d2c5785aacb3364fd9b6f2c2dafbd08f84d84ac5 Mon Sep 17 00:00:00 2001 From: Abubakar115e <113997856+Abubakar115e@users.noreply.github.com> Date: Fri, 11 Oct 2024 17:36:04 +0200 Subject: [PATCH 17/33] Added more language to support auto browser recognition --- src/i18n/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/i18n/index.ts b/src/i18n/index.ts index 1b97e69..4b3826a 100644 --- a/src/i18n/index.ts +++ b/src/i18n/index.ts @@ -15,7 +15,7 @@ import { da } from "./lang/da"; import { no } from "./lang/no"; import LanguageDetector from 'i18next-browser-languagedetector'; -const supportedLanguages = ['en', 'zh', 'ja', 'fa', 'de', 'da', 'no']; +const supportedLanguages = ['en', 'zh', 'ja', 'fa', 'de', 'da', 'no', 'es', 'fr', 'it', 'ml', 'pt-BR', 'ru', 'zh-CN', 'ru-RU', 'ja-JP']; i18n .use(LanguageDetector) From ff371d6eef3f178bd1047402dfc19c0d3b07e888 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 12 Oct 2024 16:53:42 +0530 Subject: [PATCH 18/33] feat: Add OpenAI Provider Selection Add a provider selection dropdown to the OpenAI settings, enabling users to choose from pre-configured options like "Azure" or "Custom." This streamlines setup and allows for more flexibility in configuring OpenAI API endpoints. The dropdown pre-populates base URLs and names based on the selected provider. The dropdown also automatically populates base URLs and names based on the selected provider, further simplifying the configuration process. --- src/assets/locale/en/openai.json | 5 +-- src/components/Common/Playground/Message.tsx | 7 ++-- src/components/Common/ProviderIcon.tsx | 4 +-- .../Layouts/SettingsOptionLayout.tsx | 4 +-- .../Option/Settings/openai-fetch-model.tsx | 17 +++++++--- src/components/Option/Settings/openai.tsx | 19 +++++++++-- src/db/models.ts | 7 ++-- src/db/openai.ts | 1 + src/hooks/useMessage.tsx | 21 ++---------- src/hooks/useMessageOption.tsx | 28 +++------------- src/utils/oai-api-providers.ts | 32 +++++++++++++++++++ 11 files changed, 86 insertions(+), 59 deletions(-) create mode 100644 src/utils/oai-api-providers.ts diff --git a/src/assets/locale/en/openai.json b/src/assets/locale/en/openai.json index 48f0430..460e450 100644 --- a/src/assets/locale/en/openai.json +++ b/src/assets/locale/en/openai.json @@ -17,7 +17,7 @@ }, "baseUrl": { "label": "Base URL", - "help": "The base URL of the OpenAI API provider. eg (http://loocalhost:8080/v1)", + "help": "The base URL of the OpenAI API provider. eg (http://localhost:1234/v1)", "required": "Base URL is required.", "placeholder": "Enter base URL" }, @@ -58,5 +58,6 @@ "confirm": { "delete": "Are you sure you want to delete this model?" } - } + }, + "noModelFound": "No model found. Make sure you have added correct provider with base URL and API key." } \ No newline at end of file diff --git a/src/components/Common/Playground/Message.tsx b/src/components/Common/Playground/Message.tsx index 268d0b6..c2d4290 100644 --- a/src/components/Common/Playground/Message.tsx +++ b/src/components/Common/Playground/Message.tsx @@ -15,6 +15,7 @@ import { useTranslation } from "react-i18next" import { MessageSource } from "./MessageSource" import { useTTS } from "@/hooks/useTTS" import { tagColors } from "@/utils/color" +import { removeModelSuffix } from "@/db/models" type Props = { message: string @@ -69,7 +70,9 @@ export const PlaygroundMessage = (props: Props) => { {props.isBot ? props.name === "chrome::gemini-nano::page-assist" ? "Gemini Nano" - : props.name + : removeModelSuffix( + props.name?.replaceAll(/accounts\/[^\/]+\/models\//g, "") + ) : "You"} @@ -135,7 +138,7 @@ export const PlaygroundMessage = (props: Props) => { key: "1", label: (
    - {t('citations')} + {t("citations")}
    ), children: ( diff --git a/src/components/Common/ProviderIcon.tsx b/src/components/Common/ProviderIcon.tsx index 83a8cca..54ecbf0 100644 --- a/src/components/Common/ProviderIcon.tsx +++ b/src/components/Common/ProviderIcon.tsx @@ -1,4 +1,4 @@ -import { ChromeIcon, CloudCog } from "lucide-react" +import { ChromeIcon, CpuIcon } from "lucide-react" import { OllamaIcon } from "../Icons/Ollama" export const ProviderIcons = ({ @@ -12,7 +12,7 @@ export const ProviderIcons = ({ case "chrome": return case "custom": - return + return default: return } diff --git a/src/components/Layouts/SettingsOptionLayout.tsx b/src/components/Layouts/SettingsOptionLayout.tsx index 6365381..8d4d2cf 100644 --- a/src/components/Layouts/SettingsOptionLayout.tsx +++ b/src/components/Layouts/SettingsOptionLayout.tsx @@ -7,7 +7,7 @@ import { InfoIcon, CombineIcon, ChromeIcon, - CloudCogIcon + CpuIcon } from "lucide-react" import { useTranslation } from "react-i18next" import { Link, useLocation } from "react-router-dom" @@ -93,7 +93,7 @@ export const SettingsLayout = ({ children }: { children: React.ReactNode }) => { diff --git a/src/components/Option/Settings/openai-fetch-model.tsx b/src/components/Option/Settings/openai-fetch-model.tsx index e847544..ce86dad 100644 --- a/src/components/Option/Settings/openai-fetch-model.tsx +++ b/src/components/Option/Settings/openai-fetch-model.tsx @@ -79,11 +79,15 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => { if (status === "pending") { return } - if (status === "error" || !data || data.length === 0) { - return
    {t("noModelFound")}
    + return ( +
    +

    + {t("noModelFound")} +

    +
    + ) } - return (

    @@ -116,7 +120,12 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => { key={model.id} checked={selectedModels.includes(model.id)} onChange={(e) => handleModelSelect(model.id, e.target.checked)}> - {model?.name || model.id} +

    + {`${model?.name || model.id}`.replaceAll( + /accounts\/[^\/]+\/models\//g, + "" + )} +
    ))}
    diff --git a/src/components/Option/Settings/openai.tsx b/src/components/Option/Settings/openai.tsx index 5178628..b3a9b1d 100644 --- a/src/components/Option/Settings/openai.tsx +++ b/src/components/Option/Settings/openai.tsx @@ -1,4 +1,4 @@ -import { Form, Input, Modal, Table, message, Tooltip } from "antd" +import { Form, Input, Modal, Table, message, Tooltip, Select } from "antd" import { useState } from "react" import { useTranslation } from "react-i18next" import { @@ -10,6 +10,7 @@ import { import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" import { Pencil, Trash2, RotateCwIcon } from "lucide-react" import { OpenAIFetchModel } from "./openai-fetch-model" +import { OAI_API_PROVIDERS } from "@/utils/oai-api-providers" export const OpenAIApp = () => { const { t } = useTranslation("openai") @@ -182,11 +183,25 @@ export const OpenAIApp = () => { form.resetFields() }} footer={null}> + {!editingConfig && ( + { const value = OAI_API_PROVIDERS.find((item) => item.value === e) form.setFieldsValue({ baseUrl: value?.baseUrl, name: value?.label }) + setProvider(e) }} className="w-full !mb-4" options={OAI_API_PROVIDERS} diff --git a/src/db/models.ts b/src/db/models.ts index 2eb6bb4..c284e58 100644 --- a/src/db/models.ts +++ b/src/db/models.ts @@ -1,4 +1,7 @@ -import { getOpenAIConfigById as providerInfo } from "./openai" +import { + getAllOpenAIConfig, + getOpenAIConfigById as providerInfo +} from "./openai" type Model = { id: string @@ -16,11 +19,15 @@ export const generateID = () => { } export const removeModelSuffix = (id: string) => { - return id.replace(/_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/, "") + return id.replace( + /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/, + "" + ) } export const isCustomModel = (model: string) => { - const customModelRegex = /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/ + const customModelRegex = + /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/ return customModelRegex.test(model) } export class ModelDb { @@ -174,6 +181,17 @@ export const deleteModel = async (id: string) => { await db.delete(id) } +export const deleteAllModelsByProviderId = async (provider_id: string) => { + const db = new ModelDb() + const models = await db.getAll() + const modelsToDelete = models.filter( + (model) => model.provider_id === provider_id + ) + for (const model of modelsToDelete) { + await db.delete(model.id) + } +} + export const isLookupExist = async (lookup: string) => { const db = new ModelDb() const models = await db.getAll() @@ -181,17 +199,19 @@ export const isLookupExist = async (lookup: string) => { return model ? true : false } - export const ollamaFormatAllCustomModels = async () => { - const allModles = await getAllCustomModels() + const allProviders = await getAllOpenAIConfig() + const ollamaModels = allModles.map((model) => { return { name: model.name, model: model.id, modified_at: "", - provider: "custom", + provider: + allProviders.find((provider) => provider.id === model.provider_id) + ?.provider || "custom", size: 0, digest: "", details: { @@ -206,4 +226,4 @@ export const ollamaFormatAllCustomModels = async () => { }) return ollamaModels -} \ No newline at end of file +} diff --git a/src/db/openai.ts b/src/db/openai.ts index ee3a526..3fba1b8 100644 --- a/src/db/openai.ts +++ b/src/db/openai.ts @@ -1,4 +1,5 @@ import { cleanUrl } from "@/libs/clean-url" +import { deleteAllModelsByProviderId } from "./models" type OpenAIModelConfig = { id: string @@ -93,7 +94,7 @@ export class OpenAIModelDb { } -export const addOpenAICofig = async ({ name, baseUrl, apiKey }: { name: string, baseUrl: string, apiKey: string }) => { +export const addOpenAICofig = async ({ name, baseUrl, apiKey, provider }: { name: string, baseUrl: string, apiKey: string, provider?: string }) => { const openaiDb = new OpenAIModelDb() const id = generateID() const config: OpenAIModelConfig = { @@ -102,7 +103,8 @@ export const addOpenAICofig = async ({ name, baseUrl, apiKey }: { name: string, baseUrl: cleanUrl(baseUrl), apiKey, createdAt: Date.now(), - db_type: "openai" + db_type: "openai", + provider } await openaiDb.create(config) return id @@ -117,13 +119,15 @@ export const getAllOpenAIConfig = async () => { export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => { const openaiDb = new OpenAIModelDb() + const oldData = await openaiDb.getById(id) const config: OpenAIModelConfig = { + ...oldData, id, name, baseUrl: cleanUrl(baseUrl), apiKey, createdAt: Date.now(), - db_type: "openai" + db_type: "openai", } await openaiDb.update(config) @@ -135,6 +139,7 @@ export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: st export const deleteOpenAIConfig = async (id: string) => { const openaiDb = new OpenAIModelDb() await openaiDb.delete(id) + await deleteAllModelsByProviderId(id) } diff --git a/src/libs/openai.ts b/src/libs/openai.ts index 8b6230e..377639f 100644 --- a/src/libs/openai.ts +++ b/src/libs/openai.ts @@ -1,25 +1,40 @@ type Model = { id: string name?: string + display_name?: string + type: string } export const getAllOpenAIModels = async (baseUrl: string, apiKey?: string) => { - const url = `${baseUrl}/models` - const headers = apiKey - ? { + try { + const url = `${baseUrl}/models` + const headers = apiKey + ? { Authorization: `Bearer ${apiKey}` - } - : {} + } + : {} - const res = await fetch(url, { - headers - }) + const res = await fetch(url, { + headers + }) - if (!res.ok) { + if (!res.ok) { + return [] + } + + if (baseUrl === "https://api.together.xyz/v1") { + const data = (await res.json()) as Model[] + return data.map(model => ({ + id: model.id, + name: model.display_name, + })) + } + + const data = (await res.json()) as { data: Model[] } + + return data.data + } catch (e) { + console.log(e) return [] } - - const data = (await res.json()) as { data: Model[] } - - return data.data } diff --git a/src/utils/oai-api-providers.ts b/src/utils/oai-api-providers.ts index 57b3c28..25f845a 100644 --- a/src/utils/oai-api-providers.ts +++ b/src/utils/oai-api-providers.ts @@ -24,6 +24,11 @@ export const OAI_API_PROVIDERS = [ value: "together", baseUrl: "https://api.together.xyz/v1" }, + { + label: "OpenRouter", + value: "openrouter", + baseUrl: "https://openrouter.ai/api/v1" + }, { label: "Custsom", value: "custom", From ddb8993f17708a6347190ed1b48dc3b3e7332ab4 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 12 Oct 2024 19:05:21 +0530 Subject: [PATCH 21/33] feat: Support LMStudio models Adds support for LMStudio models, allowing users to access and use them within the application. This involves: - Adding new functions to `db/models.ts` to handle LMStudio model IDs and fetch their information from the OpenAI API. - Modifying the `ollamaFormatAllCustomModels` function to include LMStudio models in the list of available models. - Introducing a timeout mechanism in `libs/openai.ts` to prevent API requests from hanging. This change enhances the model selection experience, providing users with a wider range of models to choose from. --- src/db/models.ts | 79 ++++++++++++++++++++++++++++++++++++++++++--- src/libs/openai.ts | 16 +++++++-- src/models/index.ts | 2 +- 3 files changed, 89 insertions(+), 8 deletions(-) diff --git a/src/db/models.ts b/src/db/models.ts index c284e58..e985472 100644 --- a/src/db/models.ts +++ b/src/db/models.ts @@ -1,3 +1,4 @@ +import { getAllOpenAIModels } from "@/libs/openai" import { getAllOpenAIConfig, getOpenAIConfigById as providerInfo @@ -22,10 +23,27 @@ export const removeModelSuffix = (id: string) => { return id.replace( /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/, "" - ) + ).replace(/_lmstudio_openai-[a-f0-9]{4}-[a-f0-9]{3}-[a-f0-9]{4}/, "") +} +export const isLMStudioModel = (model: string) => { + const lmstudioModelRegex = /_lmstudio_openai-[a-f0-9]{4}-[a-f0-9]{3}-[a-f0-9]{4}/ + return lmstudioModelRegex.test(model) } +export const getLMStudioModelId = (model: string): { model_id: string, provider_id: string } => { + const lmstudioModelRegex = /_lmstudio_openai-[a-f0-9]{4}-[a-f0-9]{3}-[a-f0-9]{4}/ + const match = model.match(lmstudioModelRegex) + if (match) { + const modelId = match[0] + const providerId = match[0].replace("_lmstudio_openai-", "") + return { model_id: modelId, provider_id: providerId } + } + return null +} export const isCustomModel = (model: string) => { + if (isLMStudioModel(model)) { + return true + } const customModelRegex = /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/ return customModelRegex.test(model) @@ -158,6 +176,19 @@ export const createModel = async ( export const getModelInfo = async (id: string) => { const db = new ModelDb() + + if (isLMStudioModel(id)) { + const lmstudioId = getLMStudioModelId(id) + if (!lmstudioId) { + throw new Error("Invalid LMStudio model ID") + } + return { + model_id: id.replace(/_lmstudio_openai-[a-f0-9]{4}-[a-f0-9]{3}-[a-f0-9]{4}/, ""), + provider_id: `openai-${lmstudioId.provider_id}`, + name: id.replace(/_lmstudio_openai-[a-f0-9]{4}-[a-f0-9]{3}-[a-f0-9]{4}/, "") + } + } + const model = await db.getById(id) return model } @@ -199,12 +230,52 @@ export const isLookupExist = async (lookup: string) => { return model ? true : false } +export const dynamicFetchLMStudio = async ({ + baseUrl, + providerId +}: { + baseUrl: string + providerId: string +}) => { + const models = await getAllOpenAIModels(baseUrl) + const lmstudioModels = models.map((e) => { + return { + name: e?.name || e?.id, + id: `${e?.id}_lmstudio_${providerId}`, + provider: providerId, + lookup: `${e?.id}_${providerId}`, + provider_id: providerId, + } + }) + + return lmstudioModels +} + export const ollamaFormatAllCustomModels = async () => { - const allModles = await getAllCustomModels() + const [allModles, allProviders] = await Promise.all([ + getAllCustomModels(), + getAllOpenAIConfig() + ]) - const allProviders = await getAllOpenAIConfig() + const lmstudioProviders = allProviders.filter( + (provider) => provider.provider === "lmstudio" + ) - const ollamaModels = allModles.map((model) => { + const lmModelsPromises = lmstudioProviders.map((provider) => + dynamicFetchLMStudio({ + baseUrl: provider.baseUrl, + providerId: provider.id + }) + ) + + const lmModelsFetch = await Promise.all(lmModelsPromises) + + const lmModels = lmModelsFetch.flat() + + // merge allModels and lmModels + const allModlesWithLMStudio = [...allModles, ...lmModels] + + const ollamaModels = allModlesWithLMStudio.map((model) => { return { name: model.name, model: model.id, diff --git a/src/libs/openai.ts b/src/libs/openai.ts index 377639f..dad8d72 100644 --- a/src/libs/openai.ts +++ b/src/libs/openai.ts @@ -14,10 +14,16 @@ export const getAllOpenAIModels = async (baseUrl: string, apiKey?: string) => { } : {} + const controller = new AbortController() + const timeoutId = setTimeout(() => controller.abort(), 10000) + const res = await fetch(url, { - headers + headers, + signal: controller.signal }) + clearTimeout(timeoutId) + if (!res.ok) { return [] } @@ -27,14 +33,18 @@ export const getAllOpenAIModels = async (baseUrl: string, apiKey?: string) => { return data.map(model => ({ id: model.id, name: model.display_name, - })) + })) as Model[] } const data = (await res.json()) as { data: Model[] } return data.data } catch (e) { - console.log(e) + if (e instanceof DOMException && e.name === 'AbortError') { + console.log('Request timed out') + } else { + console.log(e) + } return [] } } diff --git a/src/models/index.ts b/src/models/index.ts index d459e66..4df2419 100644 --- a/src/models/index.ts +++ b/src/models/index.ts @@ -40,7 +40,7 @@ export const pageAssistModel = async ({ if (isCustom) { const modelInfo = await getModelInfo(model) const providerInfo = await getOpenAIConfigById(modelInfo.provider_id) - + console.log(modelInfo, providerInfo) return new ChatOpenAI({ modelName: modelInfo.model_id, openAIApiKey: providerInfo.apiKey || "temp", From 1300945b75a197bb687888747d60cddaa68ecc5b Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 12 Oct 2024 19:28:08 +0530 Subject: [PATCH 22/33] Fix: Prevent model selection modal from appearing for LM Studio provider When using the LM Studio provider, the model selection modal was incorrectly appearing. This commit prevents the modal from appearing when the provider is set to LM Studio. This ensures that the user experience is consistent across all providers. --- src/components/Option/Settings/openai.tsx | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/components/Option/Settings/openai.tsx b/src/components/Option/Settings/openai.tsx index 07a6132..d0a889c 100644 --- a/src/components/Option/Settings/openai.tsx +++ b/src/components/Option/Settings/openai.tsx @@ -35,8 +35,11 @@ export const OpenAIApp = () => { }) setOpen(false) message.success(t("addSuccess")) - setOpenaiId(data) - setOpenModelModal(true) + if (provider !== "lmstudio") { + setOpenaiId(data) + setOpenModelModal(true) + } + setProvider("custom") } }) @@ -75,8 +78,6 @@ export const OpenAIApp = () => { provider }) } - - setProvider("custom") } const handleEdit = (record: any) => { From ba071ffeb19ec96931d19fdb00ad3df4cac19ce7 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 12 Oct 2024 21:12:45 +0530 Subject: [PATCH 23/33] add embedding support --- src/models/OAIEmbedding.ts | 234 +++++++++++++++++++++++++++++++++++++ src/models/types.ts | 26 +++++ src/models/utils/openai.ts | 70 +++++++++++ 3 files changed, 330 insertions(+) create mode 100644 src/models/OAIEmbedding.ts create mode 100644 src/models/types.ts create mode 100644 src/models/utils/openai.ts diff --git a/src/models/OAIEmbedding.ts b/src/models/OAIEmbedding.ts new file mode 100644 index 0000000..b2df653 --- /dev/null +++ b/src/models/OAIEmbedding.ts @@ -0,0 +1,234 @@ +import { type ClientOptions, OpenAI as OpenAIClient } from "openai" +import { Embeddings, type EmbeddingsParams } from "@langchain/core/embeddings" +import { chunkArray } from "@langchain/core/utils/chunk_array" +import { OpenAICoreRequestOptions, LegacyOpenAIInput } from "./types" +import { wrapOpenAIClientError } from "./utils/openai" + +/** + * Interface for OpenAIEmbeddings parameters. Extends EmbeddingsParams and + * defines additional parameters specific to the OpenAIEmbeddings class. + */ +export interface OpenAIEmbeddingsParams extends EmbeddingsParams { + /** + * Model name to use + * Alias for `model` + */ + modelName: string + /** Model name to use */ + model: string + + /** + * The number of dimensions the resulting output embeddings should have. + * Only supported in `text-embedding-3` and later models. + */ + dimensions?: number + + /** + * Timeout to use when making requests to OpenAI. + */ + timeout?: number + + /** + * The maximum number of documents to embed in a single request. This is + * limited by the OpenAI API to a maximum of 2048. + */ + batchSize?: number + + /** + * Whether to strip new lines from the input text. This is recommended by + * OpenAI for older models, but may not be suitable for all use cases. + * See: https://github.com/openai/openai-python/issues/418#issuecomment-1525939500 + */ + stripNewLines?: boolean + + signal?: AbortSignal +} + +/** + * Class for generating embeddings using the OpenAI API. Extends the + * Embeddings class and implements OpenAIEmbeddingsParams and + * AzureOpenAIInput. + * @example + * ```typescript + * // Embed a query using OpenAIEmbeddings to generate embeddings for a given text + * const model = new OpenAIEmbeddings(); + * const res = await model.embedQuery( + * "What would be a good company name for a company that makes colorful socks?", + * ); + * console.log({ res }); + * + * ``` + */ +export class OAIEmbedding + extends Embeddings + implements OpenAIEmbeddingsParams { + modelName = "text-embedding-ada-002" + + model = "text-embedding-ada-002" + + batchSize = 512 + + // TODO: Update to `false` on next minor release (see: https://github.com/langchain-ai/langchainjs/pull/3612) + stripNewLines = true + + /** + * The number of dimensions the resulting output embeddings should have. + * Only supported in `text-embedding-3` and later models. + */ + dimensions?: number + + timeout?: number + + azureOpenAIApiVersion?: string + + azureOpenAIApiKey?: string + + azureADTokenProvider?: () => Promise + + azureOpenAIApiInstanceName?: string + + azureOpenAIApiDeploymentName?: string + + azureOpenAIBasePath?: string + + organization?: string + + protected client: OpenAIClient + + protected clientConfig: ClientOptions + signal?: AbortSignal + + constructor( + fields?: Partial & { + verbose?: boolean + /** + * The OpenAI API key to use. + * Alias for `apiKey`. + */ + openAIApiKey?: string + /** The OpenAI API key to use. */ + apiKey?: string + configuration?: ClientOptions + }, + configuration?: ClientOptions & LegacyOpenAIInput + ) { + const fieldsWithDefaults = { maxConcurrency: 2, ...fields } + + super(fieldsWithDefaults) + + let apiKey = fieldsWithDefaults?.apiKey ?? fieldsWithDefaults?.openAIApiKey + + this.modelName = + fieldsWithDefaults?.model ?? fieldsWithDefaults?.modelName ?? this.model + this.model = this.modelName + this.batchSize = fieldsWithDefaults?.batchSize + this.stripNewLines = fieldsWithDefaults?.stripNewLines ?? this.stripNewLines + this.timeout = fieldsWithDefaults?.timeout + this.dimensions = fieldsWithDefaults?.dimensions + + if (fields.signal) { + this.signal = fields.signal + } + + + this.clientConfig = { + apiKey, + organization: this.organization, + baseURL: configuration?.basePath, + dangerouslyAllowBrowser: true, + defaultHeaders: configuration?.baseOptions?.headers, + defaultQuery: configuration?.baseOptions?.params, + ...configuration, + ...fields?.configuration + } + } + + /** + * Method to generate embeddings for an array of documents. Splits the + * documents into batches and makes requests to the OpenAI API to generate + * embeddings. + * @param texts Array of documents to generate embeddings for. + * @returns Promise that resolves to a 2D array of embeddings for each document. + */ + async embedDocuments(texts: string[]): Promise { + const batches = chunkArray( + this.stripNewLines ? texts.map((t) => t.replace(/\n/g, " ")) : texts, + this.batchSize + ) + + const batchRequests = batches.map((batch) => { + const params: OpenAIClient.EmbeddingCreateParams = { + model: this.model, + input: batch + } + if (this.dimensions) { + params.dimensions = this.dimensions + } + return this.embeddingWithRetry(params) + }) + const batchResponses = await Promise.all(batchRequests) + + const embeddings: number[][] = [] + for (let i = 0; i < batchResponses.length; i += 1) { + const batch = batches[i] + const { data: batchResponse } = batchResponses[i] + for (let j = 0; j < batch.length; j += 1) { + embeddings.push(batchResponse[j].embedding) + } + } + return embeddings + } + + /** + * Method to generate an embedding for a single document. Calls the + * embeddingWithRetry method with the document as the input. + * @param text Document to generate an embedding for. + * @returns Promise that resolves to an embedding for the document. + */ + async embedQuery(text: string): Promise { + const params: OpenAIClient.EmbeddingCreateParams = { + model: this.model, + input: this.stripNewLines ? text.replace(/\n/g, " ") : text + } + if (this.dimensions) { + params.dimensions = this.dimensions + } + const { data } = await this.embeddingWithRetry(params) + return data[0].embedding + } + + /** + * Private method to make a request to the OpenAI API to generate + * embeddings. Handles the retry logic and returns the response from the + * API. + * @param request Request to send to the OpenAI API. + * @returns Promise that resolves to the response from the API. + */ + protected async embeddingWithRetry( + request: OpenAIClient.EmbeddingCreateParams + ) { + const requestOptions: OpenAICoreRequestOptions = {} + if (this.azureOpenAIApiKey) { + requestOptions.headers = { + "api-key": this.azureOpenAIApiKey, + ...requestOptions.headers + } + requestOptions.query = { + "api-version": this.azureOpenAIApiVersion, + ...requestOptions.query + } + } + return this.caller.call(async () => { + try { + const res = await this.client.embeddings.create(request, { + ...requestOptions, + signal: this.signal + }) + return res + } catch (e) { + const error = wrapOpenAIClientError(e) + throw error + } + }) + } +} diff --git a/src/models/types.ts b/src/models/types.ts new file mode 100644 index 0000000..0fc96f6 --- /dev/null +++ b/src/models/types.ts @@ -0,0 +1,26 @@ +export type OpenAICoreRequestOptions< + Req extends object = Record +> = { + path?: string; + query?: Req | undefined; + body?: Req | undefined; + headers?: Record | undefined; + + maxRetries?: number; + stream?: boolean | undefined; + timeout?: number; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + httpAgent?: any; + signal?: AbortSignal | undefined | null; + idempotencyKey?: string; +}; + +export interface LegacyOpenAIInput { + /** @deprecated Use baseURL instead */ + basePath?: string; + /** @deprecated Use defaultHeaders and defaultQuery instead */ + baseOptions?: { + headers?: Record; + params?: Record; + }; +} diff --git a/src/models/utils/openai.ts b/src/models/utils/openai.ts new file mode 100644 index 0000000..22ecb56 --- /dev/null +++ b/src/models/utils/openai.ts @@ -0,0 +1,70 @@ +import { + APIConnectionTimeoutError, + APIUserAbortError, + OpenAI as OpenAIClient, + } from "openai"; + import { zodToJsonSchema } from "zod-to-json-schema"; + import type { StructuredToolInterface } from "@langchain/core/tools"; + import { + convertToOpenAIFunction, + convertToOpenAITool, + } from "@langchain/core/utils/function_calling"; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + export function wrapOpenAIClientError(e: any) { + let error; + if (e.constructor.name === APIConnectionTimeoutError.name) { + error = new Error(e.message); + error.name = "TimeoutError"; + } else if (e.constructor.name === APIUserAbortError.name) { + error = new Error(e.message); + error.name = "AbortError"; + } else { + error = e; + } + return error; + } + + export { + convertToOpenAIFunction as formatToOpenAIFunction, + convertToOpenAITool as formatToOpenAITool, + }; + + export function formatToOpenAIAssistantTool(tool: StructuredToolInterface) { + return { + type: "function", + function: { + name: tool.name, + description: tool.description, + parameters: zodToJsonSchema(tool.schema), + }, + }; + } + + export type OpenAIToolChoice = + | OpenAIClient.ChatCompletionToolChoiceOption + | "any" + | string; + + export function formatToOpenAIToolChoice( + toolChoice?: OpenAIToolChoice + ): OpenAIClient.ChatCompletionToolChoiceOption | undefined { + if (!toolChoice) { + return undefined; + } else if (toolChoice === "any" || toolChoice === "required") { + return "required"; + } else if (toolChoice === "auto") { + return "auto"; + } else if (toolChoice === "none") { + return "none"; + } else if (typeof toolChoice === "string") { + return { + type: "function", + function: { + name: toolChoice, + }, + }; + } else { + return toolChoice; + } + } \ No newline at end of file From 768ff2e5558d759e3f5a0e3d1a171223014031f1 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sat, 12 Oct 2024 23:32:00 +0530 Subject: [PATCH 24/33] feat: Improve model selection and embedding Refactor embedding models and their handling to improve performance and simplify the process. Add a new model selection mechanism, and enhance the UI for model selection, offering clearer and more user-friendly options for embedding models. Refactor embeddings to use a common model for page assist and RAG, further improving performance and streamlining the workflow. --- src/components/Option/Settings/rag.tsx | 28 +++++++++++++------- src/hooks/useMessage.tsx | 4 +-- src/hooks/useMessageOption.tsx | 4 +-- src/libs/process-knowledge.ts | 4 +-- src/models/OAIEmbedding.ts | 16 +----------- src/models/embedding.ts | 36 ++++++++++++++++++++++++++ src/models/index.ts | 2 +- src/services/ollama.ts | 24 ++++++++++++++++- src/utils/memory-embeddings.ts | 2 +- src/web/search-engines/brave.ts | 5 ++-- src/web/search-engines/duckduckgo.ts | 4 +-- src/web/search-engines/google.ts | 4 +-- src/web/search-engines/sogou.ts | 4 +-- src/web/website/index.ts | 4 +-- 14 files changed, 98 insertions(+), 43 deletions(-) create mode 100644 src/models/embedding.ts diff --git a/src/components/Option/Settings/rag.tsx b/src/components/Option/Settings/rag.tsx index fd5d576..8cc6bbf 100644 --- a/src/components/Option/Settings/rag.tsx +++ b/src/components/Option/Settings/rag.tsx @@ -5,13 +5,14 @@ import { defaultEmbeddingChunkOverlap, defaultEmbeddingChunkSize, defaultEmbeddingModelForRag, - getAllModels, + getEmbeddingModels, saveForRag } from "~/services/ollama" import { SettingPrompt } from "./prompt" import { useTranslation } from "react-i18next" import { getNoOfRetrievedDocs, getTotalFilePerKB } from "@/services/app" import { SidepanelRag } from "./sidepanel-rag" +import { ProviderIcons } from "@/components/Common/ProviderIcon" export const RagSettings = () => { const { t } = useTranslation("settings") @@ -29,7 +30,7 @@ export const RagSettings = () => { totalFilePerKB, noOfRetrievedDocs ] = await Promise.all([ - getAllModels({ returnEmpty: true }), + getEmbeddingModels({ returnEmpty: true }), defaultEmbeddingChunkOverlap(), defaultEmbeddingChunkSize(), defaultEmbeddingModelForRag(), @@ -113,18 +114,27 @@ export const RagSettings = () => { ]}> + + + + + ) +} diff --git a/src/components/Option/Models/index.tsx b/src/components/Option/Models/index.tsx index bd14c30..b2cab08 100644 --- a/src/components/Option/Models/index.tsx +++ b/src/components/Option/Models/index.tsx @@ -1,18 +1,11 @@ -import { useMutation, } from "@tanstack/react-query" -import { - notification, - Modal, - Input, - Segmented -} from "antd" +import { Segmented } from "antd" import dayjs from "dayjs" import relativeTime from "dayjs/plugin/relativeTime" import { useState } from "react" -import { useForm } from "@mantine/form" -import { Download } from "lucide-react" import { useTranslation } from "react-i18next" import { OllamaModelsTable } from "./OllamaModelsTable" import { CustomModelsTable } from "./CustomModelsTable" +import { AddOllamaModelModal } from "./AddOllamaModelModal" dayjs.extend(relativeTime) @@ -22,36 +15,6 @@ export const ModelsBody = () => { const { t } = useTranslation(["settings", "common", "openai"]) - const form = useForm({ - initialValues: { - model: "" - } - }) - - const pullModel = async (modelName: string) => { - notification.info({ - message: t("manageModels.notification.pullModel"), - description: t("manageModels.notification.pullModelDescription", { - modelName - }) - }) - - setOpen(false) - - form.reset() - - browser.runtime.sendMessage({ - type: "pull_model", - modelName - }) - - return true - } - - const { mutate: pullOllamaModel } = useMutation({ - mutationFn: pullModel - }) - return (
    @@ -60,7 +23,11 @@ export const ModelsBody = () => {
    @@ -88,28 +55,7 @@ export const ModelsBody = () => { {segmented === "ollama" ? : }
    - setOpen(false)}> -
    pullOllamaModel(values.model))}> - - - - -
    +
    ) } diff --git a/src/db/models.ts b/src/db/models.ts index e985472..c9d294d 100644 --- a/src/db/models.ts +++ b/src/db/models.ts @@ -10,6 +10,7 @@ type Model = { name: string provider_id: string lookup: string + model_type: string db_type: string } export const generateID = () => { @@ -140,7 +141,8 @@ export const createManyModels = async ( lookup: `${item.model_id}_${item.provider_id}`, id: `${item.model_id}_${generateID()}`, db_type: "openai_model", - name: item.name.replaceAll(/accounts\/[^\/]+\/models\//g, "") + name: item.name.replaceAll(/accounts\/[^\/]+\/models\//g, ""), + model_type: "chat" } }) @@ -168,7 +170,8 @@ export const createModel = async ( name, provider_id, lookup: `${model_id}_${provider_id}`, - db_type: "openai_model" + db_type: "openai_model", + model_type: "chat" } await db.create(model) return model diff --git a/src/i18n/index.ts b/src/i18n/index.ts index 4b3826a..6ee62a9 100644 --- a/src/i18n/index.ts +++ b/src/i18n/index.ts @@ -41,11 +41,6 @@ i18n de: de }, fallbackLng: "en", - detection: { - order: ['localStorage', 'navigator'], - caches: ['localStorage'] - }, - supportedLngs: supportedLanguages, lng: localStorage.getItem("i18nextLng") || "en", }); From 4e04155471cf4ac8909580e2d511e0e33000b55b Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 13 Oct 2024 18:22:08 +0530 Subject: [PATCH 28/33] Fix: Update dependencies and documentation Updated dependencies to use Bun for package management and updated documentation to reflect the changes. Also updated the browser compatibility list to include LibreWolf and Zen Browser. --- CONTRIBUTING.md | 1 - README.md | 50 ++++++++++++++++++++++--------------------------- 2 files changed, 22 insertions(+), 29 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 54d889d..c37131c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,3 @@ - # Contributing to Page Assist Thank you for your interest in contributing to Page Assist! We welcome contributions from anyone, whether it's reporting bugs, suggesting improvements, or submitting code changes. diff --git a/README.md b/README.md index 750271b..d659a0b 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,6 @@ Page Assist supports Chromium-based browsers like Chrome, Brave, Edge, and Firef [![Chrome Web Store](https://pub-35424b4473484be483c0afa08c69e7da.r2.dev/UV4C4ybeBTsZt43U4xis.png)](https://chrome.google.com/webstore/detail/page-assist/jfgfiigpkhlkbnfnbobbkinehhfdhndo) [![Firefox Add-on](https://pub-35424b4473484be483c0afa08c69e7da.r2.dev/get-the-addon.png)](https://addons.mozilla.org/en-US/firefox/addon/page-assist/) - Checkout the Demo (v1.0.0):
    @@ -30,15 +29,13 @@ Checkout the Demo (v1.0.0): want more features? Create an issue and let me know. - ### Manual Installation #### Pre-requisites -- Node.js (v18 or higher) - [Installation Guide](https://nodejs.org) -- npm +- Bun - [Installation Guide](https://bun.sh/) - Ollama (Local AI Provider) - [Installation Guide](https://ollama.com) - +- Any OpenAI API Compatible Endpoint (like LM Studio, llamafile etc.) 1. Clone the repository @@ -50,19 +47,19 @@ cd page-assist 2. Install the dependencies ```bash -npm install +bun install ``` 3. Build the extension (by default it will build for Chrome) ```bash -npm run build +bun run build ``` or you can build for Firefox ```bash -npm run build:firefox +bun build:firefox ``` 4. Load the extension (chrome) @@ -101,45 +98,43 @@ Note: You can change the keyboard shortcuts from the extension settings on the C You can run the extension in development mode to make changes and test them. ```bash -npm run dev +bun dev ``` This will start a development server and watch for changes in the source files. You can load the extension in your browser and test the changes. ## Browser Support -| Browser | Sidebar | Chat With Webpage | Web UI | -| -------- | ------- | ----------------- | ------ | -| Chrome | ✅ | ✅ | ✅ | -| Brave | ✅ | ✅ | ✅ | -| Firefox | ✅ | ✅ | ✅ | -| Vivaldi | ✅ | ✅ | ✅ | -| Edge | ✅ | ❌ | ✅ | -| Opera | ❌ | ❌ | ✅ | -| Arc | ❌ | ❌ | ✅ | - - +| Browser | Sidebar | Chat With Webpage | Web UI | +| ----------- | ------- | ----------------- | ------ | +| Chrome | ✅ | ✅ | ✅ | +| Brave | ✅ | ✅ | ✅ | +| Firefox | ✅ | ✅ | ✅ | +| Vivaldi | ✅ | ✅ | ✅ | +| Edge | ✅ | ❌ | ✅ | +| LibreWolf | ✅ | ✅ | ✅ | +| Zen Browser | ✅ | ✅ | ✅ | +| Opera | ❌ | ❌ | ✅ | +| Arc | ❌ | ❌ | ✅ | ## Local AI Provider - [Ollama](https://github.com/ollama/ollama) -- Chrome AI (Gemini Nano) +- Chrome AI (Gemini Nano) -More providers will be added in the future. +- OpenAI API Compatible endpoints (like LM Studio, llamafile etc.) ## Roadmap -- [X] Firefox Support -- [ ] Code Completion support for Web based IDEs (like Colab, Jupyter, etc.) -- [ ] More Local AI Providers +- [x] Firefox Support +- [x] More Local AI Providers - [ ] More Customization Options - [ ] Better UI/UX - ## Privacy -Page Assist does not collect any personal data. The only time the extension communicates with the server is when you are using the share feature, which can be disabled from the settings. +Page Assist does not collect any personal data. The only time the extension communicates with the server is when you are using the share feature, which can be disabled from the settings. All the data is stored locally in the browser storage. You can view the source code and verify it yourself. @@ -155,7 +150,6 @@ If you like the project and want to support it, you can buy me a coffee. It will or you can sponsor me on GitHub. - ## Blogs and Videos About Page Assist This are some of the blogs and videos about Page Assist. If you have written a blog or made a video about Page Assist, feel free to create a PR and add it here. From ff4473c35b8fcdb41668519d81f2d403f8b43b83 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Sun, 13 Oct 2024 18:22:16 +0530 Subject: [PATCH 29/33] feat: Add model type support Adds model type support for chat and embedding models. This allows users to specify which type of model they want to use when adding custom models. Additionally, this commit introduces a more descriptive interface for adding custom models, enhancing the clarity of the model selection process. --- src/assets/locale/en/openai.json | 32 ++++- .../Option/Models/AddCustomModelModal.tsx | 129 ++++++++++++++++++ .../Option/Models/AddOllamaModelModal.tsx | 3 +- .../Option/Models/CustomModelsTable.tsx | 18 +-- src/components/Option/Models/index.tsx | 9 ++ .../Option/Settings/openai-fetch-model.tsx | 44 +++++- src/components/Option/Settings/openai.tsx | 30 ++-- src/db/models.ts | 49 ++++--- src/services/ollama.ts | 6 +- 9 files changed, 277 insertions(+), 43 deletions(-) create mode 100644 src/components/Option/Models/AddCustomModelModal.tsx diff --git a/src/assets/locale/en/openai.json b/src/assets/locale/en/openai.json index 18a5f81..c0838cb 100644 --- a/src/assets/locale/en/openai.json +++ b/src/assets/locale/en/openai.json @@ -31,7 +31,7 @@ "deleteConfirm": "Are you sure you want to delete this provider?", "model": { "title": "Model List", - "subheading": "Please select the models you want to use with this provider.", + "subheading": "Please select the chat models you want to use with this provider.", "success": "Successfully added new models." }, "tipLMStudio": "Page Assist will automatically fetch the models you loaded on LM Studio. You don't need to add them manually." @@ -41,7 +41,8 @@ "updateSuccess": "Provider updated successfully.", "delete": "Delete", "edit": "Edit", - "refetch": "Refech Model List", + "newModel": "Add Models to Provider", + "noNewModel": "For LMStudio, we fetch dynamically. No manual addition needed.", "searchModel": "Search Model", "selectAll": "Select All", "save": "Save", @@ -49,6 +50,7 @@ "manageModels": { "columns": { "name": "Model Name", + "model_type": "Model Type", "model_id": "Model ID", "provider": "Provider Name", "actions": "Action" @@ -58,7 +60,31 @@ }, "confirm": { "delete": "Are you sure you want to delete this model?" + }, + "modal": { + "title": "Add Custom Model", + "form": { + "name": { + "label": "Model ID", + "placeholder": "llama3.2", + "required": "Model ID is required." + }, + "provider": { + "label": "Provider", + "placeholder": "Select provider", + "required": "Provider is required." + }, + "type": { + "label": "Model Type" + } + } } }, - "noModelFound": "No model found. Make sure you have added correct provider with base URL and API key." + "noModelFound": "No model found. Make sure you have added correct provider with base URL and API key.", + "radio": { + "chat": "Chat Model", + "embedding": "Embedding Model", + "chatInfo": "is used for chat completion and conversation generation", + "embeddingInfo": "is used for RAG and other semantic search related tasks." + } } \ No newline at end of file diff --git a/src/components/Option/Models/AddCustomModelModal.tsx b/src/components/Option/Models/AddCustomModelModal.tsx new file mode 100644 index 0000000..c0315af --- /dev/null +++ b/src/components/Option/Models/AddCustomModelModal.tsx @@ -0,0 +1,129 @@ +import { createModel } from "@/db/models" +import { getAllOpenAIConfig } from "@/db/openai" +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" +import { Input, Modal, Form, Select, Radio } from "antd" +import { Loader2 } from "lucide-react" +import { useTranslation } from "react-i18next" + +type Props = { + open: boolean + setOpen: (open: boolean) => void +} + +export const AddCustomModelModal: React.FC = ({ open, setOpen }) => { + const { t } = useTranslation(["openai"]) + const [form] = Form.useForm() + const queryClient = useQueryClient() + + const { data, isPending } = useQuery({ + queryKey: ["fetchProviders"], + queryFn: async () => { + const providers = await getAllOpenAIConfig() + return providers.filter((provider) => provider.provider !== "lmstudio") + } + }) + + const onFinish = async (values: { + model_id: string + model_type: "chat" | "embedding" + provider_id: string + }) => { + await createModel( + values.model_id, + values.model_id, + values.provider_id, + values.model_type + ) + + return true + } + + const { mutate: createModelMutation, isPending: isSaving } = useMutation({ + mutationFn: onFinish, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["fetchCustomModels"] + }) + queryClient.invalidateQueries({ + queryKey: ["fetchModel"] + }) + setOpen(false) + form.resetFields() + } + }) + + return ( + setOpen(false)}> +
    + + + + + + + + + + + {t("radio.chat")} + {t("radio.embedding")} + + + + + + + +
    + ) +} diff --git a/src/components/Option/Models/AddOllamaModelModal.tsx b/src/components/Option/Models/AddOllamaModelModal.tsx index dd2bd01..7ca2972 100644 --- a/src/components/Option/Models/AddOllamaModelModal.tsx +++ b/src/components/Option/Models/AddOllamaModelModal.tsx @@ -1,5 +1,5 @@ import { useForm } from "@mantine/form" -import { useMutation } from "@tanstack/react-query" +import { useMutation, useQueryClient } from "@tanstack/react-query" import { Input, Modal, notification } from "antd" import { Download } from "lucide-react" import { useTranslation } from "react-i18next" @@ -11,6 +11,7 @@ type Props = { export const AddOllamaModelModal: React.FC = ({ open, setOpen }) => { const { t } = useTranslation(["settings", "common", "openai"]) + const queryClient = useQueryClient() const form = useForm({ initialValues: { diff --git a/src/components/Option/Models/CustomModelsTable.tsx b/src/components/Option/Models/CustomModelsTable.tsx index 4bc57b9..74ed12b 100644 --- a/src/components/Option/Models/CustomModelsTable.tsx +++ b/src/components/Option/Models/CustomModelsTable.tsx @@ -1,7 +1,7 @@ import { getAllCustomModels, deleteModel } from "@/db/models" import { useStorage } from "@plasmohq/storage/hook" import { useQuery, useQueryClient, useMutation } from "@tanstack/react-query" -import { Skeleton, Table, Tooltip } from "antd" +import { Skeleton, Table, Tag, Tooltip } from "antd" import { Trash2 } from "lucide-react" import { useTranslation } from "react-i18next" @@ -10,7 +10,6 @@ export const CustomModelsTable = () => { const { t } = useTranslation(["openai", "common"]) - const queryClient = useQueryClient() const { data, status } = useQuery({ @@ -27,7 +26,6 @@ export const CustomModelsTable = () => { } }) - return (
    @@ -37,16 +35,20 @@ export const CustomModelsTable = () => {
    ( + + {t(`radio.${txt}`)} + + ) + }, { title: t("manageModels.columns.provider"), dataIndex: "provider", diff --git a/src/components/Option/Models/index.tsx b/src/components/Option/Models/index.tsx index b2cab08..6a63700 100644 --- a/src/components/Option/Models/index.tsx +++ b/src/components/Option/Models/index.tsx @@ -6,11 +6,13 @@ import { useTranslation } from "react-i18next" import { OllamaModelsTable } from "./OllamaModelsTable" import { CustomModelsTable } from "./CustomModelsTable" import { AddOllamaModelModal } from "./AddOllamaModelModal" +import { AddCustomModelModal } from "./AddCustomModelModal" dayjs.extend(relativeTime) export const ModelsBody = () => { const [open, setOpen] = useState(false) + const [openAddModelModal, setOpenAddModelModal] = useState(false) const [segmented, setSegmented] = useState("ollama") const { t } = useTranslation(["settings", "common", "openai"]) @@ -26,6 +28,8 @@ export const ModelsBody = () => { onClick={() => { if (segmented === "ollama") { setOpen(true) + } else { + setOpenAddModelModal(true) } }} className="inline-flex items-center rounded-md border border-transparent bg-black px-2 py-2 text-md font-medium leading-4 text-white shadow-sm hover:bg-gray-800 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50"> @@ -56,6 +60,11 @@ export const ModelsBody = () => { + + ) } diff --git a/src/components/Option/Settings/openai-fetch-model.tsx b/src/components/Option/Settings/openai-fetch-model.tsx index c64e030..5a67f01 100644 --- a/src/components/Option/Settings/openai-fetch-model.tsx +++ b/src/components/Option/Settings/openai-fetch-model.tsx @@ -1,10 +1,12 @@ import { getOpenAIConfigById } from "@/db/openai" import { getAllOpenAIModels } from "@/libs/openai" -import { useMutation, useQuery } from "@tanstack/react-query" +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query" import { useTranslation } from "react-i18next" -import { Checkbox, Input, Spin, message } from "antd" +import { Checkbox, Input, Spin, message, Radio } from "antd" import { useState, useMemo } from "react" import { createManyModels } from "@/db/models" +import { Popover } from "antd" +import { InfoIcon } from "lucide-react" type Props = { openaiId: string @@ -15,6 +17,8 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => { const { t } = useTranslation(["openai"]) const [selectedModels, setSelectedModels] = useState([]) const [searchTerm, setSearchTerm] = useState("") + const [modelType, setModelType] = useState("chat") + const queryClient = useQueryClient() const { data, status } = useQuery({ queryKey: ["openAIConfigs", openaiId], @@ -56,7 +60,8 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => { const payload = models.map((id) => ({ model_id: id, name: filteredModels.find((model) => model.id === id)?.name ?? id, - provider_id: openaiId + provider_id: openaiId, + model_type: modelType })) await createManyModels(payload) @@ -68,6 +73,9 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => { mutationFn: onSave, onSuccess: () => { setOpenModelModal(false) + queryClient.invalidateQueries({ + queryKey: ["fetchModel"] + }) message.success(t("modal.model.success")) } }) @@ -97,6 +105,7 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => {

    {t("modal.model.subheading")}

    + { ))} + +
    + setModelType(e.target.value)} + value={modelType}> + {t("radio.chat")} + {t("radio.embedding")} + + +

    + + {t("radio.chat")} + {" "} + {t("radio.chatInfo")} +

    +

    + + {t("radio.embedding")} + {" "} + {t("radio.embeddingInfo")} +

    +
    + }> + + + + - + +