From b3a455382c35c464345d5d58bb9ac5feb86c4e57 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Thu, 23 May 2024 00:39:44 +0530 Subject: [PATCH 01/14] chore: Update version to 1.1.9 and add Model Settings to Ollama settings page --- src/assets/locale/en/common.json | 36 +- .../Common/CurrentChatModelSettings.tsx | 138 ++++++ src/components/Icons/ChatSettings.tsx | 29 ++ src/components/Layouts/Layout.tsx | 20 +- .../Option/Settings/model-settings.tsx | 123 ++++++ src/components/Option/Settings/ollama.tsx | 4 + .../Option/Settings/search-mode.tsx | 1 - src/hooks/useMessageOption.tsx | 92 +++- src/models/ChatOllama.ts | 406 ++++++++++++++++++ src/models/utils/ollama.ts | 201 +++++++++ src/services/model-settings.ts | 101 +++++ src/store/model.tsx | 136 ++++++ wxt.config.ts | 2 +- 13 files changed, 1271 insertions(+), 18 deletions(-) create mode 100644 src/components/Common/CurrentChatModelSettings.tsx create mode 100644 src/components/Icons/ChatSettings.tsx create mode 100644 src/components/Option/Settings/model-settings.tsx create mode 100644 src/models/ChatOllama.ts create mode 100644 src/models/utils/ollama.ts create mode 100644 src/services/model-settings.ts create mode 100644 src/store/model.tsx diff --git a/src/assets/locale/en/common.json b/src/assets/locale/en/common.json index 41df538..553c5f7 100644 --- a/src/assets/locale/en/common.json +++ b/src/assets/locale/en/common.json @@ -50,5 +50,39 @@ "noHistory": "No chat history", "chatWithCurrentPage": "Chat with current page", "beta": "Beta", - "tts": "Read aloud" + "tts": "Read aloud", + "modelSettings": { + "label": "Model Settings", + "currentChatModelSettings":"Current Chat Model Settings", + "description": "Set the model options globally for all chats", + "form": { + "keepAlive": { + "label": "Keep Alive", + "help": "controls how long the model will stay loaded into memory following the request (default: 5m)", + "placeholder": "Enter Keep Alive duration (e.g. 5m, 10m, 1h)" + }, + "temperature": { + "label": "Temperature", + "placeholder": "Enter Temperature value (e.g. 0.7, 1.0)" + }, + "numCtx": { + "label": "Number of Contexts", + "placeholder": "Enter Number of Contexts value (default: 2048)" + }, + "seed": { + "label": "Seed", + "placeholder": "Enter Seed value (e.g. 1234)", + "help": "Reproducibility of the model output" + }, + "topK": { + "label": "Top K", + "placeholder": "Enter Top K value (e.g. 40, 100)" + }, + "topP": { + "label": "Top P", + "placeholder": "Enter Top P value (e.g. 0.9, 0.95)" + } + }, + "advanced": "More Model Settings" + } } \ No newline at end of file diff --git a/src/components/Common/CurrentChatModelSettings.tsx b/src/components/Common/CurrentChatModelSettings.tsx new file mode 100644 index 0000000..d2b7cba --- /dev/null +++ b/src/components/Common/CurrentChatModelSettings.tsx @@ -0,0 +1,138 @@ +import { getAllModelSettings } from "@/services/model-settings" +import { useStoreChatModelSettings } from "@/store/model" +import { useQuery } from "@tanstack/react-query" +import { Collapse, Form, Input, InputNumber, Modal, Skeleton } from "antd" +import React from "react" +import { useTranslation } from "react-i18next" + +type Props = { + open: boolean + setOpen: (open: boolean) => void +} + +export const CurrentChatModelSettings = ({ open, setOpen }: Props) => { + const { t } = useTranslation("common") + const [form] = Form.useForm() + const cUserSettings = useStoreChatModelSettings() + const { isPending: isLoading } = useQuery({ + queryKey: ["fetchModelConfig2", open], + queryFn: async () => { + const data = await getAllModelSettings() + form.setFieldsValue({ + temperature: cUserSettings.temperature ?? data.temperature, + topK: cUserSettings.topK ?? data.topK, + topP: cUserSettings.topP ?? data.topP, + keepAlive: cUserSettings.keepAlive ?? data.keepAlive, + numCtx: cUserSettings.numCtx ?? data.numCtx, + seed: cUserSettings.seed + }) + return data + }, + enabled: open, + refetchOnMount: true + }) + return ( + setOpen(false)} + onCancel={() => setOpen(false)} + footer={null}> + {!isLoading ? ( +
{ + Object.entries(values).forEach(([key, value]) => { + cUserSettings.setX(key, value) + setOpen(false) + }) + }} + form={form} + layout="vertical"> + + + + + + + + + + + + + + + + + + + + + + + + ) + } + ]} + /> + + + + ) : ( + + )} +
+ ) +} diff --git a/src/components/Icons/ChatSettings.tsx b/src/components/Icons/ChatSettings.tsx new file mode 100644 index 0000000..e9cbf27 --- /dev/null +++ b/src/components/Icons/ChatSettings.tsx @@ -0,0 +1,29 @@ +import React from "react" + +export const ChatSettings = React.forwardRef< + SVGSVGElement, + React.SVGProps +>((props, ref) => { + return ( + + + + + + ) +}) diff --git a/src/components/Layouts/Layout.tsx b/src/components/Layouts/Layout.tsx index 2156710..676623d 100644 --- a/src/components/Layouts/Layout.tsx +++ b/src/components/Layouts/Layout.tsx @@ -7,6 +7,7 @@ import { useQuery } from "@tanstack/react-query" import { fetchChatModels, getAllModels } from "~/services/ollama" import { useMessageOption } from "~/hooks/useMessageOption" import { + BrainCog, ChevronLeft, CogIcon, ComputerIcon, @@ -24,6 +25,8 @@ import { SelectedKnowledge } from "../Option/Knowledge/SelectedKnwledge" import { useStorage } from "@plasmohq/storage/hook" import { ModelSelect } from "../Common/ModelSelect" import { PromptSelect } from "../Common/PromptSelect" +import { ChatSettings } from "../Icons/ChatSettings" +import { CurrentChatModelSettings } from "../Common/CurrentChatModelSettings" export default function OptionLayout({ children @@ -33,6 +36,7 @@ export default function OptionLayout({ const [sidebarOpen, setSidebarOpen] = useState(false) const { t } = useTranslation(["option", "common"]) const [shareModeEnabled] = useStorage("shareMode", false) + const [openModelSettings, setOpenModelSettings] = useState(false) const { selectedModel, @@ -108,9 +112,7 @@ export default function OptionLayout({ onClick={clearChat} className="inline-flex dark:bg-transparent bg-white items-center rounded-lg border dark:border-gray-700 bg-transparent px-3 py-2.5 text-xs lg:text-sm font-medium leading-4 text-gray-800 dark:text-white disabled:opacity-50 ease-in-out transition-colors duration-200 hover:bg-gray-100 dark:hover:bg-gray-800 dark:hover:text-white"> - - {t("newChat")} - + {t("newChat")} @@ -193,6 +195,13 @@ export default function OptionLayout({
+ + + {pathname === "/" && messages.length > 0 && !streaming && @@ -228,6 +237,11 @@ export default function OptionLayout({ open={sidebarOpen}> setSidebarOpen(false)} /> + +
) } diff --git a/src/components/Option/Settings/model-settings.tsx b/src/components/Option/Settings/model-settings.tsx new file mode 100644 index 0000000..a7ee8ef --- /dev/null +++ b/src/components/Option/Settings/model-settings.tsx @@ -0,0 +1,123 @@ +import { SaveButton } from "@/components/Common/SaveButton" +import { getAllModelSettings, setModelSetting } from "@/services/model-settings" +import { useQuery, useQueryClient } from "@tanstack/react-query" +import { Form, Skeleton, Input, Switch, InputNumber, Collapse } from "antd" +import React from "react" +import { useTranslation } from "react-i18next" +// keepAlive?: string +// temperature?: number +// topK?: number +// topP?: number + +export const ModelSettings = () => { + const { t } = useTranslation("common") + const [form] = Form.useForm() + const client = useQueryClient() + const { isPending: isLoading } = useQuery({ + queryKey: ["fetchModelConfig"], + queryFn: async () => { + const data = await getAllModelSettings() + form.setFieldsValue(data) + return data + } + }) + + return ( +
+
+

+ {t("modelSettings.label")} +

+

+ {t("modelSettings.description")} +

+
+
+ {!isLoading ? ( +
{ + Object.entries(values).forEach(([key, value]) => { + setModelSetting(key, value) + }) + client.invalidateQueries({ + queryKey: ["fetchModelConfig"] + }) + }} + form={form} + layout="vertical"> + + + + + + + + + + + + + + + + + + + + + ) + } + ]} + /> + +
+ +
+ + ) : ( + + )} +
+ ) +} diff --git a/src/components/Option/Settings/ollama.tsx b/src/components/Option/Settings/ollama.tsx index 969a845..688ae96 100644 --- a/src/components/Option/Settings/ollama.tsx +++ b/src/components/Option/Settings/ollama.tsx @@ -15,6 +15,7 @@ import { SettingPrompt } from "./prompt" import { Trans, useTranslation } from "react-i18next" import { useStorage } from "@plasmohq/storage/hook" import { AdvanceOllamaSettings } from "@/components/Common/AdvanceOllamaSettings" +import { ModelSettings } from "./model-settings" export const SettingsOllama = () => { const [ollamaURL, setOllamaURL] = useState("") @@ -219,6 +220,7 @@ export const SettingsOllama = () => {
+
@@ -229,6 +231,8 @@ export const SettingsOllama = () => {
+ + )} diff --git a/src/components/Option/Settings/search-mode.tsx b/src/components/Option/Settings/search-mode.tsx index cf09183..8973ec0 100644 --- a/src/components/Option/Settings/search-mode.tsx +++ b/src/components/Option/Settings/search-mode.tsx @@ -8,7 +8,6 @@ import { useTranslation } from "react-i18next" export const SearchModeSettings = () => { const { t } = useTranslation("settings") - const queryClient = useQueryClient() const form = useForm({ initialValues: { diff --git a/src/hooks/useMessageOption.tsx b/src/hooks/useMessageOption.tsx index 64100fa..40341ff 100644 --- a/src/hooks/useMessageOption.tsx +++ b/src/hooks/useMessageOption.tsx @@ -8,7 +8,6 @@ import { systemPromptForNonRagOption } from "~/services/ollama" import { type ChatHistory, type Message } from "~/store/option" -import { ChatOllama } from "@langchain/community/chat_models/ollama" import { HumanMessage, SystemMessage } from "@langchain/core/messages" import { useStoreMessageOption } from "~/store/option" import { @@ -29,8 +28,10 @@ import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama" import { PageAssistVectorStore } from "@/libs/PageAssistVectorStore" import { formatDocs } from "@/chain/chat-with-x" import { useWebUI } from "@/store/webui" -import { isTTSEnabled } from "@/services/tts" import { useStorage } from "@plasmohq/storage/hook" +import { useStoreChatModelSettings } from "@/store/model" +import { getAllDefaultModelSettings } from "@/services/model-settings" +import { ChatOllama } from "@/models/ChatOllama" export const useMessageOption = () => { const { @@ -66,6 +67,7 @@ export const useMessageOption = () => { selectedKnowledge, setSelectedKnowledge } = useStoreMessageOption() + const currentChatModelSettings = useStoreChatModelSettings() const [selectedModel, setSelectedModel] = useStorage("selectedModel") const { ttsEnabled } = useWebUI() @@ -75,7 +77,6 @@ export const useMessageOption = () => { const navigate = useNavigate() const textareaRef = React.useRef(null) - const clearChat = () => { navigate("/") setMessages([]) @@ -85,6 +86,7 @@ export const useMessageOption = () => { setIsLoading(false) setIsProcessing(false) setStreaming(false) + currentChatModelSettings.reset() textareaRef?.current?.focus() } @@ -97,14 +99,25 @@ export const useMessageOption = () => { signal: AbortSignal ) => { const url = await getOllamaURL() - + const userDefaultModelSettings = await getAllDefaultModelSettings() if (image.length > 0) { image = `data:image/jpeg;base64,${image.split(",")[1]}` } const ollama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) let newMessage: Message[] = [] @@ -163,7 +176,21 @@ export const useMessageOption = () => { .replaceAll("{question}", message) const questionOllama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: + currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: + currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? + userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) const response = await questionOllama.invoke(promptForQuestion) query = response.content.toString() @@ -172,7 +199,7 @@ export const useMessageOption = () => { const { prompt, source } = await getSystemPromptForWeb(query) setIsSearchingInternet(false) - // message = message.trim().replaceAll("\n", " ") + // message = message.trim().replaceAll("\n", " ") let humanMessage = new HumanMessage({ content: [ @@ -314,6 +341,7 @@ export const useMessageOption = () => { signal: AbortSignal ) => { const url = await getOllamaURL() + const userDefaultModelSettings = await getAllDefaultModelSettings() if (image.length > 0) { image = `data:image/jpeg;base64,${image.split(",")[1]}` @@ -321,7 +349,18 @@ export const useMessageOption = () => { const ollama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) let newMessage: Message[] = [] @@ -521,10 +560,22 @@ export const useMessageOption = () => { signal: AbortSignal ) => { const url = await getOllamaURL() + const userDefaultModelSettings = await getAllDefaultModelSettings() const ollama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) let newMessage: Message[] = [] @@ -568,7 +619,10 @@ export const useMessageOption = () => { const ollamaUrl = await getOllamaURL() const ollamaEmbedding = new OllamaEmbeddings({ model: embeddingModle || selectedModel, - baseUrl: cleanUrl(ollamaUrl) + baseUrl: cleanUrl(ollamaUrl), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive }) let vectorstore = await PageAssistVectorStore.fromExistingIndex( @@ -596,7 +650,21 @@ export const useMessageOption = () => { .replaceAll("{question}", message) const questionOllama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: + currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: + currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? + userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) const response = await questionOllama.invoke(promptForQuestion) query = response.content.toString() @@ -613,7 +681,7 @@ export const useMessageOption = () => { url: "" } }) - // message = message.trim().replaceAll("\n", " ") + // message = message.trim().replaceAll("\n", " ") let humanMessage = new HumanMessage({ content: [ diff --git a/src/models/ChatOllama.ts b/src/models/ChatOllama.ts new file mode 100644 index 0000000..69138bf --- /dev/null +++ b/src/models/ChatOllama.ts @@ -0,0 +1,406 @@ +import type { BaseLanguageModelCallOptions } from "@langchain/core/language_models/base"; +import { + SimpleChatModel, + type BaseChatModelParams, +} from "@langchain/core/language_models/chat_models"; +import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager"; +import { + AIMessageChunk, + BaseMessage, + ChatMessage, +} from "@langchain/core/messages"; +import { ChatGenerationChunk } from "@langchain/core/outputs"; +import type { StringWithAutocomplete } from "@langchain/core/utils/types"; + +import { + createOllamaChatStream, + createOllamaGenerateStream, + type OllamaInput, + type OllamaMessage, +} from "./utils/ollama"; + +export interface ChatOllamaInput extends OllamaInput { } + +export interface ChatOllamaCallOptions extends BaseLanguageModelCallOptions { } + +export class ChatOllama + extends SimpleChatModel + implements ChatOllamaInput { + static lc_name() { + return "ChatOllama"; + } + + lc_serializable = true; + + model = "llama2"; + + baseUrl = "http://localhost:11434"; + + keepAlive = "5m"; + + embeddingOnly?: boolean; + + f16KV?: boolean; + + frequencyPenalty?: number; + + headers?: Record; + + logitsAll?: boolean; + + lowVram?: boolean; + + mainGpu?: number; + + mirostat?: number; + + mirostatEta?: number; + + mirostatTau?: number; + + numBatch?: number; + + numCtx?: number; + + numGpu?: number; + + numGqa?: number; + + numKeep?: number; + + numPredict?: number; + + numThread?: number; + + penalizeNewline?: boolean; + + presencePenalty?: number; + + repeatLastN?: number; + + repeatPenalty?: number; + + ropeFrequencyBase?: number; + + ropeFrequencyScale?: number; + + temperature?: number; + + stop?: string[]; + + tfsZ?: number; + + topK?: number; + + topP?: number; + + typicalP?: number; + + useMLock?: boolean; + + useMMap?: boolean; + + vocabOnly?: boolean; + + seed?: number; + + format?: StringWithAutocomplete<"json">; + + constructor(fields: OllamaInput & BaseChatModelParams) { + super(fields); + this.model = fields.model ?? this.model; + this.baseUrl = fields.baseUrl?.endsWith("/") + ? fields.baseUrl.slice(0, -1) + : fields.baseUrl ?? this.baseUrl; + this.keepAlive = fields.keepAlive ?? this.keepAlive; + this.embeddingOnly = fields.embeddingOnly; + this.f16KV = fields.f16KV; + this.frequencyPenalty = fields.frequencyPenalty; + this.headers = fields.headers; + this.logitsAll = fields.logitsAll; + this.lowVram = fields.lowVram; + this.mainGpu = fields.mainGpu; + this.mirostat = fields.mirostat; + this.mirostatEta = fields.mirostatEta; + this.mirostatTau = fields.mirostatTau; + this.numBatch = fields.numBatch; + this.numCtx = fields.numCtx; + this.numGpu = fields.numGpu; + this.numGqa = fields.numGqa; + this.numKeep = fields.numKeep; + this.numPredict = fields.numPredict; + this.numThread = fields.numThread; + this.penalizeNewline = fields.penalizeNewline; + this.presencePenalty = fields.presencePenalty; + this.repeatLastN = fields.repeatLastN; + this.repeatPenalty = fields.repeatPenalty; + this.ropeFrequencyBase = fields.ropeFrequencyBase; + this.ropeFrequencyScale = fields.ropeFrequencyScale; + this.temperature = fields.temperature; + this.stop = fields.stop; + this.tfsZ = fields.tfsZ; + this.topK = fields.topK; + this.topP = fields.topP; + this.typicalP = fields.typicalP; + this.useMLock = fields.useMLock; + this.useMMap = fields.useMMap; + this.vocabOnly = fields.vocabOnly; + this.format = fields.format; + this.seed = fields.seed; + } + + protected getLsParams(options: this["ParsedCallOptions"]) { + const params = this.invocationParams(options); + return { + ls_provider: "ollama", + ls_model_name: this.model, + ls_model_type: "chat", + ls_temperature: this.temperature ?? undefined, + ls_stop: this.stop, + ls_max_tokens: params.options.num_predict, + }; + } + + _llmType() { + return "ollama"; + } + + /** + * A method that returns the parameters for an Ollama API call. It + * includes model and options parameters. + * @param options Optional parsed call options. + * @returns An object containing the parameters for an Ollama API call. + */ + invocationParams(options?: this["ParsedCallOptions"]) { + return { + model: this.model, + format: this.format, + keep_alive: this.keepAlive, + options: { + embedding_only: this.embeddingOnly, + f16_kv: this.f16KV, + frequency_penalty: this.frequencyPenalty, + logits_all: this.logitsAll, + low_vram: this.lowVram, + main_gpu: this.mainGpu, + mirostat: this.mirostat, + mirostat_eta: this.mirostatEta, + mirostat_tau: this.mirostatTau, + num_batch: this.numBatch, + num_ctx: this.numCtx, + num_gpu: this.numGpu, + num_gqa: this.numGqa, + num_keep: this.numKeep, + num_predict: this.numPredict, + num_thread: this.numThread, + penalize_newline: this.penalizeNewline, + presence_penalty: this.presencePenalty, + repeat_last_n: this.repeatLastN, + repeat_penalty: this.repeatPenalty, + rope_frequency_base: this.ropeFrequencyBase, + rope_frequency_scale: this.ropeFrequencyScale, + temperature: this.temperature, + stop: options?.stop ?? this.stop, + tfs_z: this.tfsZ, + top_k: this.topK, + top_p: this.topP, + typical_p: this.typicalP, + use_mlock: this.useMLock, + use_mmap: this.useMMap, + vocab_only: this.vocabOnly, + seed: this.seed, + }, + }; + } + + _combineLLMOutput() { + return {}; + } + + /** @deprecated */ + async *_streamResponseChunksLegacy( + input: BaseMessage[], + options: this["ParsedCallOptions"], + runManager?: CallbackManagerForLLMRun + ): AsyncGenerator { + const stream = createOllamaGenerateStream( + this.baseUrl, + { + ...this.invocationParams(options), + prompt: this._formatMessagesAsPrompt(input), + }, + { + ...options, + headers: this.headers, + } + ); + for await (const chunk of stream) { + if (!chunk.done) { + yield new ChatGenerationChunk({ + text: chunk.response, + message: new AIMessageChunk({ content: chunk.response }), + }); + await runManager?.handleLLMNewToken(chunk.response ?? ""); + } else { + yield new ChatGenerationChunk({ + text: "", + message: new AIMessageChunk({ content: "" }), + generationInfo: { + model: chunk.model, + total_duration: chunk.total_duration, + load_duration: chunk.load_duration, + prompt_eval_count: chunk.prompt_eval_count, + prompt_eval_duration: chunk.prompt_eval_duration, + eval_count: chunk.eval_count, + eval_duration: chunk.eval_duration, + }, + }); + } + } + } + + async *_streamResponseChunks( + input: BaseMessage[], + options: this["ParsedCallOptions"], + runManager?: CallbackManagerForLLMRun + ): AsyncGenerator { + try { + const stream = await this.caller.call(async () => + createOllamaChatStream( + this.baseUrl, + { + ...this.invocationParams(options), + messages: this._convertMessagesToOllamaMessages(input), + }, + { + ...options, + headers: this.headers, + } + ) + ); + for await (const chunk of stream) { + if (!chunk.done) { + yield new ChatGenerationChunk({ + text: chunk.message.content, + message: new AIMessageChunk({ content: chunk.message.content }), + }); + await runManager?.handleLLMNewToken(chunk.message.content ?? ""); + } else { + yield new ChatGenerationChunk({ + text: "", + message: new AIMessageChunk({ content: "" }), + generationInfo: { + model: chunk.model, + total_duration: chunk.total_duration, + load_duration: chunk.load_duration, + prompt_eval_count: chunk.prompt_eval_count, + prompt_eval_duration: chunk.prompt_eval_duration, + eval_count: chunk.eval_count, + eval_duration: chunk.eval_duration, + }, + }); + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + if (e.response?.status === 404) { + console.warn( + "[WARNING]: It seems you are using a legacy version of Ollama. Please upgrade to a newer version for better chat support." + ); + yield* this._streamResponseChunksLegacy(input, options, runManager); + } else { + throw e; + } + } + } + + protected _convertMessagesToOllamaMessages( + messages: BaseMessage[] + ): OllamaMessage[] { + return messages.map((message) => { + let role; + if (message._getType() === "human") { + role = "user"; + } else if (message._getType() === "ai") { + role = "assistant"; + } else if (message._getType() === "system") { + role = "system"; + } else { + throw new Error( + `Unsupported message type for Ollama: ${message._getType()}` + ); + } + let content = ""; + const images = []; + if (typeof message.content === "string") { + content = message.content; + } else { + for (const contentPart of message.content) { + if (contentPart.type === "text") { + content = `${content}\n${contentPart.text}`; + } else if ( + contentPart.type === "image_url" && + typeof contentPart.image_url === "string" + ) { + const imageUrlComponents = contentPart.image_url.split(","); + // Support both data:image/jpeg;base64, format as well + images.push(imageUrlComponents[1] ?? imageUrlComponents[0]); + } else { + throw new Error( + `Unsupported message content type. Must either have type "text" or type "image_url" with a string "image_url" field.` + ); + } + } + } + return { + role, + content, + images, + }; + }); + } + + /** @deprecated */ + protected _formatMessagesAsPrompt(messages: BaseMessage[]): string { + const formattedMessages = messages + .map((message) => { + let messageText; + if (message._getType() === "human") { + messageText = `[INST] ${message.content} [/INST]`; + } else if (message._getType() === "ai") { + messageText = message.content; + } else if (message._getType() === "system") { + messageText = `<> ${message.content} <>`; + } else if (ChatMessage.isInstance(message)) { + messageText = `\n\n${message.role[0].toUpperCase()}${message.role.slice( + 1 + )}: ${message.content}`; + } else { + console.warn( + `Unsupported message type passed to Ollama: "${message._getType()}"` + ); + messageText = ""; + } + return messageText; + }) + .join("\n"); + return formattedMessages; + } + + /** @ignore */ + async _call( + messages: BaseMessage[], + options: this["ParsedCallOptions"], + runManager?: CallbackManagerForLLMRun + ): Promise { + const chunks = []; + for await (const chunk of this._streamResponseChunks( + messages, + options, + runManager + )) { + chunks.push(chunk.message.content); + } + return chunks.join(""); + } +} \ No newline at end of file diff --git a/src/models/utils/ollama.ts b/src/models/utils/ollama.ts new file mode 100644 index 0000000..d3524fb --- /dev/null +++ b/src/models/utils/ollama.ts @@ -0,0 +1,201 @@ +import { IterableReadableStream } from "@langchain/core/utils/stream"; +import type { StringWithAutocomplete } from "@langchain/core/utils/types"; +import { BaseLanguageModelCallOptions } from "@langchain/core/language_models/base"; + +export interface OllamaInput { + embeddingOnly?: boolean; + f16KV?: boolean; + frequencyPenalty?: number; + headers?: Record; + keepAlive?: string; + logitsAll?: boolean; + lowVram?: boolean; + mainGpu?: number; + model?: string; + baseUrl?: string; + mirostat?: number; + mirostatEta?: number; + mirostatTau?: number; + numBatch?: number; + numCtx?: number; + numGpu?: number; + numGqa?: number; + numKeep?: number; + numPredict?: number; + numThread?: number; + penalizeNewline?: boolean; + presencePenalty?: number; + repeatLastN?: number; + repeatPenalty?: number; + ropeFrequencyBase?: number; + ropeFrequencyScale?: number; + temperature?: number; + stop?: string[]; + tfsZ?: number; + topK?: number; + topP?: number; + typicalP?: number; + useMLock?: boolean; + useMMap?: boolean; + vocabOnly?: boolean; + seed?: number; + format?: StringWithAutocomplete<"json">; +} + +export interface OllamaRequestParams { + model: string; + format?: StringWithAutocomplete<"json">; + images?: string[]; + options: { + embedding_only?: boolean; + f16_kv?: boolean; + frequency_penalty?: number; + logits_all?: boolean; + low_vram?: boolean; + main_gpu?: number; + mirostat?: number; + mirostat_eta?: number; + mirostat_tau?: number; + num_batch?: number; + num_ctx?: number; + num_gpu?: number; + num_gqa?: number; + num_keep?: number; + num_thread?: number; + num_predict?: number; + penalize_newline?: boolean; + presence_penalty?: number; + repeat_last_n?: number; + repeat_penalty?: number; + rope_frequency_base?: number; + rope_frequency_scale?: number; + temperature?: number; + stop?: string[]; + tfs_z?: number; + top_k?: number; + top_p?: number; + typical_p?: number; + use_mlock?: boolean; + use_mmap?: boolean; + vocab_only?: boolean; + }; +} + +export type OllamaMessage = { + role: StringWithAutocomplete<"user" | "assistant" | "system">; + content: string; + images?: string[]; +}; + +export interface OllamaGenerateRequestParams extends OllamaRequestParams { + prompt: string; +} + +export interface OllamaChatRequestParams extends OllamaRequestParams { + messages: OllamaMessage[]; +} + +export type BaseOllamaGenerationChunk = { + model: string; + created_at: string; + done: boolean; + total_duration?: number; + load_duration?: number; + prompt_eval_count?: number; + prompt_eval_duration?: number; + eval_count?: number; + eval_duration?: number; +}; + +export type OllamaGenerationChunk = BaseOllamaGenerationChunk & { + response: string; +}; + +export type OllamaChatGenerationChunk = BaseOllamaGenerationChunk & { + message: OllamaMessage; +}; + +export type OllamaCallOptions = BaseLanguageModelCallOptions & { + headers?: Record; +}; + +async function* createOllamaStream( + url: string, + params: OllamaRequestParams, + options: OllamaCallOptions +) { + let formattedUrl = url; + if (formattedUrl.startsWith("http://localhost:")) { + // Node 18 has issues with resolving "localhost" + // See https://github.com/node-fetch/node-fetch/issues/1624 + formattedUrl = formattedUrl.replace( + "http://localhost:", + "http://127.0.0.1:" + ); + } + const response = await fetch(formattedUrl, { + method: "POST", + body: JSON.stringify(params), + headers: { + "Content-Type": "application/json", + ...options.headers, + }, + signal: options.signal, + }); + if (!response.ok) { + let error; + const responseText = await response.text(); + try { + const json = JSON.parse(responseText); + error = new Error( + `Ollama call failed with status code ${response.status}: ${json.error}` + ); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (e: any) { + error = new Error( + `Ollama call failed with status code ${response.status}: ${responseText}` + ); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (error as any).response = response; + throw error; + } + if (!response.body) { + throw new Error( + "Could not begin Ollama stream. Please check the given URL and try again." + ); + } + + const stream = IterableReadableStream.fromReadableStream(response.body); + + const decoder = new TextDecoder(); + let extra = ""; + for await (const chunk of stream) { + const decoded = extra + decoder.decode(chunk); + const lines = decoded.split("\n"); + extra = lines.pop() || ""; + for (const line of lines) { + try { + yield JSON.parse(line); + } catch (e) { + console.warn(`Received a non-JSON parseable chunk: ${line}`); + } + } + } +} + +export async function* createOllamaGenerateStream( + baseUrl: string, + params: OllamaGenerateRequestParams, + options: OllamaCallOptions +): AsyncGenerator { + yield* createOllamaStream(`${baseUrl}/api/generate`, params, options); +} + +export async function* createOllamaChatStream( + baseUrl: string, + params: OllamaChatRequestParams, + options: OllamaCallOptions +): AsyncGenerator { + yield* createOllamaStream(`${baseUrl}/api/chat`, params, options); +} \ No newline at end of file diff --git a/src/services/model-settings.ts b/src/services/model-settings.ts new file mode 100644 index 0000000..278576d --- /dev/null +++ b/src/services/model-settings.ts @@ -0,0 +1,101 @@ +import { Storage } from "@plasmohq/storage" +const storage = new Storage() + +type ModelSettings = { + f16KV?: boolean + frequencyPenalty?: number + keepAlive?: string + logitsAll?: boolean + mirostat?: number + mirostatEta?: number + mirostatTau?: number + numBatch?: number + numCtx?: number + numGpu?: number + numGqa?: number + numKeep?: number + numPredict?: number + numThread?: number + penalizeNewline?: boolean + presencePenalty?: number + repeatLastN?: number + repeatPenalty?: number + ropeFrequencyBase?: number + ropeFrequencyScale?: number + temperature?: number + tfsZ?: number + topK?: number + topP?: number + typicalP?: number + useMLock?: boolean + useMMap?: boolean + vocabOnly?: boolean +} + +const keys = [ + "f16KV", + "frequencyPenalty", + "keepAlive", + "logitsAll", + "mirostat", + "mirostatEta", + "mirostatTau", + "numBatch", + "numCtx", + "numGpu", + "numGqa", + "numKeep", + "numPredict", + "numThread", + "penalizeNewline", + "presencePenalty", + "repeatLastN", + "repeatPenalty", + "ropeFrequencyBase", + "ropeFrequencyScale", + "temperature", + "tfsZ", + "topK", + "topP", + "typicalP", + "useMLock", + "useMMap", + "vocabOnly" +] + +const getAllModelSettings = async () => { + try { + const settings: ModelSettings = {} + for (const key of keys) { + const value = await storage.get(key) + settings[key] = value + if (!value && key === "keepAlive") { + settings[key] = "5m" + } + + } + return settings + } catch (error) { + console.error(error) + return {} + } +} + +const setModelSetting = async (key: string, + value: string | number | boolean) => { + await storage.set(key, value) +} + +export const getAllDefaultModelSettings = async (): Promise => { + const settings: ModelSettings = {} + for (const key of keys) { + const value = await storage.get(key) + settings[key] = value + if (!value && key === "keepAlive") { + settings[key] = "5m" + } + } + return settings +} + +export { getAllModelSettings, setModelSetting } \ No newline at end of file diff --git a/src/store/model.tsx b/src/store/model.tsx new file mode 100644 index 0000000..8fadca8 --- /dev/null +++ b/src/store/model.tsx @@ -0,0 +1,136 @@ +import { create } from "zustand" + +type CurrentChatModelSettings = { + f16KV?: boolean + frequencyPenalty?: number + keepAlive?: string + logitsAll?: boolean + mirostat?: number + mirostatEta?: number + mirostatTau?: number + numBatch?: number + numCtx?: number + numGpu?: number + numGqa?: number + numKeep?: number + numPredict?: number + numThread?: number + penalizeNewline?: boolean + presencePenalty?: number + repeatLastN?: number + repeatPenalty?: number + ropeFrequencyBase?: number + ropeFrequencyScale?: number + temperature?: number + tfsZ?: number + topK?: number + topP?: number + typicalP?: number + useMLock?: boolean + useMMap?: boolean + vocabOnly?: boolean + seed?: number + + setF16KV?: (f16KV: boolean) => void + setFrequencyPenalty?: (frequencyPenalty: number) => void + setKeepAlive?: (keepAlive: string) => void + setLogitsAll?: (logitsAll: boolean) => void + setMirostat?: (mirostat: number) => void + setMirostatEta?: (mirostatEta: number) => void + setMirostatTau?: (mirostatTau: number) => void + setNumBatch?: (numBatch: number) => void + setNumCtx?: (numCtx: number) => void + setNumGpu?: (numGpu: number) => void + setNumGqa?: (numGqa: number) => void + setNumKeep?: (numKeep: number) => void + setNumPredict?: (numPredict: number) => void + setNumThread?: (numThread: number) => void + setPenalizeNewline?: (penalizeNewline: boolean) => void + setPresencePenalty?: (presencePenalty: number) => void + setRepeatLastN?: (repeatLastN: number) => void + setRepeatPenalty?: (repeatPenalty: number) => void + setRopeFrequencyBase?: (ropeFrequencyBase: number) => void + setRopeFrequencyScale?: (ropeFrequencyScale: number) => void + setTemperature?: (temperature: number) => void + setTfsZ?: (tfsZ: number) => void + setTopK?: (topK: number) => void + setTopP?: (topP: number) => void + setTypicalP?: (typicalP: number) => void + setUseMLock?: (useMLock: boolean) => void + setUseMMap?: (useMMap: boolean) => void + setVocabOnly?: (vocabOnly: boolean) => void + seetSeed?: (seed: number) => void + + setX: (key: string, value: any) => void + reset: () => void +} + +export const useStoreChatModelSettings = create( + (set) => ({ + setF16KV: (f16KV: boolean) => set({ f16KV }), + setFrequencyPenalty: (frequencyPenalty: number) => + set({ frequencyPenalty }), + setKeepAlive: (keepAlive: string) => set({ keepAlive }), + setLogitsAll: (logitsAll: boolean) => set({ logitsAll }), + setMirostat: (mirostat: number) => set({ mirostat }), + setMirostatEta: (mirostatEta: number) => set({ mirostatEta }), + setMirostatTau: (mirostatTau: number) => set({ mirostatTau }), + setNumBatch: (numBatch: number) => set({ numBatch }), + setNumCtx: (numCtx: number) => set({ numCtx }), + setNumGpu: (numGpu: number) => set({ numGpu }), + setNumGqa: (numGqa: number) => set({ numGqa }), + setNumKeep: (numKeep: number) => set({ numKeep }), + setNumPredict: (numPredict: number) => set({ numPredict }), + setNumThread: (numThread: number) => set({ numThread }), + setPenalizeNewline: (penalizeNewline: boolean) => set({ penalizeNewline }), + setPresencePenalty: (presencePenalty: number) => set({ presencePenalty }), + setRepeatLastN: (repeatLastN: number) => set({ repeatLastN }), + setRepeatPenalty: (repeatPenalty: number) => set({ repeatPenalty }), + setRopeFrequencyBase: (ropeFrequencyBase: number) => + set({ ropeFrequencyBase }), + setRopeFrequencyScale: (ropeFrequencyScale: number) => + set({ ropeFrequencyScale }), + setTemperature: (temperature: number) => set({ temperature }), + setTfsZ: (tfsZ: number) => set({ tfsZ }), + setTopK: (topK: number) => set({ topK }), + setTopP: (topP: number) => set({ topP }), + setTypicalP: (typicalP: number) => set({ typicalP }), + setUseMLock: (useMLock: boolean) => set({ useMLock }), + setUseMMap: (useMMap: boolean) => set({ useMMap }), + setVocabOnly: (vocabOnly: boolean) => set({ vocabOnly }), + seetSeed: (seed: number) => set({ seed }), + setX: (key: string, value: any) => set({ [key]: value }), + reset: () => + set({ + f16KV: undefined, + frequencyPenalty: undefined, + keepAlive: undefined, + logitsAll: undefined, + mirostat: undefined, + mirostatEta: undefined, + mirostatTau: undefined, + numBatch: undefined, + numCtx: undefined, + numGpu: undefined, + numGqa: undefined, + numKeep: undefined, + numPredict: undefined, + numThread: undefined, + penalizeNewline: undefined, + presencePenalty: undefined, + repeatLastN: undefined, + repeatPenalty: undefined, + ropeFrequencyBase: undefined, + ropeFrequencyScale: undefined, + temperature: undefined, + tfsZ: undefined, + topK: undefined, + topP: undefined, + typicalP: undefined, + useMLock: undefined, + useMMap: undefined, + vocabOnly: undefined, + seed: undefined + }) + }) +) diff --git a/wxt.config.ts b/wxt.config.ts index aa890c4..69fab0a 100644 --- a/wxt.config.ts +++ b/wxt.config.ts @@ -48,7 +48,7 @@ export default defineConfig({ outDir: "build", manifest: { - version: "1.1.8", + version: "1.1.9", name: process.env.TARGET === "firefox" ? "Page Assist - A Web UI for Local AI Models" From fadf736f7034d5b2b685fdca46dac3d127f24e21 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Thu, 23 May 2024 11:22:46 +0530 Subject: [PATCH 02/14] refactor: Parse keepAlive value in OllamaEmbeddingsPageAssist --- src/models/ChatOllama.ts | 3 ++- src/models/OllamaEmbedding.ts | 7 ++++--- src/models/utils/ollama.ts | 10 +++++++++- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/models/ChatOllama.ts b/src/models/ChatOllama.ts index 69138bf..2a4c28b 100644 --- a/src/models/ChatOllama.ts +++ b/src/models/ChatOllama.ts @@ -15,6 +15,7 @@ import type { StringWithAutocomplete } from "@langchain/core/utils/types"; import { createOllamaChatStream, createOllamaGenerateStream, + parseKeepAlive, type OllamaInput, type OllamaMessage, } from "./utils/ollama"; @@ -112,7 +113,7 @@ export class ChatOllama this.baseUrl = fields.baseUrl?.endsWith("/") ? fields.baseUrl.slice(0, -1) : fields.baseUrl ?? this.baseUrl; - this.keepAlive = fields.keepAlive ?? this.keepAlive; + this.keepAlive = parseKeepAlive(fields.keepAlive) ?? this.keepAlive; this.embeddingOnly = fields.embeddingOnly; this.f16KV = fields.f16KV; this.frequencyPenalty = fields.frequencyPenalty; diff --git a/src/models/OllamaEmbedding.ts b/src/models/OllamaEmbedding.ts index 2d57ef2..eadec2d 100644 --- a/src/models/OllamaEmbedding.ts +++ b/src/models/OllamaEmbedding.ts @@ -1,12 +1,13 @@ import { Embeddings, EmbeddingsParams } from "@langchain/core/embeddings" import type { StringWithAutocomplete } from "@langchain/core/utils/types" +import { parseKeepAlive } from "./utils/ollama" export interface OllamaInput { embeddingOnly?: boolean f16KV?: boolean frequencyPenalty?: number headers?: Record - keepAlive?: string + keepAlive?: any logitsAll?: boolean lowVram?: boolean mainGpu?: number @@ -98,7 +99,7 @@ interface OllamaEmbeddingsParams extends EmbeddingsParams { headers?: Record /** Defaults to "5m" */ - keepAlive?: string + keepAlive?: any /** Advanced Ollama API request parameters in camelCase, see * https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values @@ -138,7 +139,7 @@ export class OllamaEmbeddingsPageAssist extends Embeddings { } if (params?.keepAlive) { - this.keepAlive = params.keepAlive + this.keepAlive = parseKeepAlive(params.keepAlive) } if (params?.requestOptions) { diff --git a/src/models/utils/ollama.ts b/src/models/utils/ollama.ts index d3524fb..55ce05e 100644 --- a/src/models/utils/ollama.ts +++ b/src/models/utils/ollama.ts @@ -7,7 +7,7 @@ export interface OllamaInput { f16KV?: boolean; frequencyPenalty?: number; headers?: Record; - keepAlive?: string; + keepAlive?: any; logitsAll?: boolean; lowVram?: boolean; mainGpu?: number; @@ -198,4 +198,12 @@ export async function* createOllamaChatStream( options: OllamaCallOptions ): AsyncGenerator { yield* createOllamaStream(`${baseUrl}/api/chat`, params, options); +} + + +export const parseKeepAlive = (keepAlive: any) => { + if (keepAlive === "-1") { + return -1 + } + return keepAlive } \ No newline at end of file From 315163ca626e3616cbbb3618a0aa2d5a3b752c04 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Thu, 23 May 2024 22:48:46 +0530 Subject: [PATCH 03/14] chore: Update Lucide icons and add Current Chat Model Settings to Sidepanel --- src/assets/locale/en/common.json | 2 +- src/components/Layouts/Layout.tsx | 3 +-- src/components/Sidepanel/Chat/header.tsx | 16 +++++++++++++++- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/assets/locale/en/common.json b/src/assets/locale/en/common.json index 553c5f7..d03b6b9 100644 --- a/src/assets/locale/en/common.json +++ b/src/assets/locale/en/common.json @@ -49,11 +49,11 @@ "noData": "No data", "noHistory": "No chat history", "chatWithCurrentPage": "Chat with current page", + "currentChatModelSettings":"Current Chat Model Settings", "beta": "Beta", "tts": "Read aloud", "modelSettings": { "label": "Model Settings", - "currentChatModelSettings":"Current Chat Model Settings", "description": "Set the model options globally for all chats", "form": { "keepAlive": { diff --git a/src/components/Layouts/Layout.tsx b/src/components/Layouts/Layout.tsx index 676623d..200d0ef 100644 --- a/src/components/Layouts/Layout.tsx +++ b/src/components/Layouts/Layout.tsx @@ -13,7 +13,6 @@ import { ComputerIcon, GithubIcon, PanelLeftIcon, - SlashIcon, SquarePen, ZapIcon } from "lucide-react" @@ -195,7 +194,7 @@ export default function OptionLayout({
- + +
+
) } From 11f5eba3edb0c3cfe0100e2aef98a6e90dc33fc0 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Thu, 23 May 2024 23:00:42 +0530 Subject: [PATCH 04/14] chore: Update Lucide icons and improve Current Chat Model Settings --- .../Common/CurrentChatModelSettings.tsx | 2 +- src/hooks/useMessage.tsx | 55 +++++++++++++++++-- 2 files changed, 50 insertions(+), 7 deletions(-) diff --git a/src/components/Common/CurrentChatModelSettings.tsx b/src/components/Common/CurrentChatModelSettings.tsx index d2b7cba..b1d1c07 100644 --- a/src/components/Common/CurrentChatModelSettings.tsx +++ b/src/components/Common/CurrentChatModelSettings.tsx @@ -33,7 +33,7 @@ export const CurrentChatModelSettings = ({ open, setOpen }: Props) => { }) return ( setOpen(false)} onCancel={() => setOpen(false)} diff --git a/src/hooks/useMessage.tsx b/src/hooks/useMessage.tsx index b38944f..40eb5d9 100644 --- a/src/hooks/useMessage.tsx +++ b/src/hooks/useMessage.tsx @@ -8,7 +8,6 @@ import { } from "~/services/ollama" import { type Message } from "~/store/option" import { useStoreMessage } from "~/store" -import { ChatOllama } from "@langchain/community/chat_models/ollama" import { HumanMessage, SystemMessage } from "@langchain/core/messages" import { getDataFromCurrentTab } from "~/libs/get-html" import { MemoryVectorStore } from "langchain/vectorstores/memory" @@ -27,6 +26,9 @@ import { usePageAssist } from "@/context" import { formatDocs } from "@/chain/chat-with-x" import { OllamaEmbeddingsPageAssist } from "@/models/OllamaEmbedding" import { useStorage } from "@plasmohq/storage/hook" +import { useStoreChatModelSettings } from "@/store/model" +import { ChatOllama } from "@/models/ChatOllama" +import { getAllDefaultModelSettings } from "@/services/model-settings" export const useMessage = () => { const { @@ -39,7 +41,7 @@ export const useMessage = () => { } = usePageAssist() const { t } = useTranslation("option") const [selectedModel, setSelectedModel] = useStorage("selectedModel") - + const currentChatModelSettings = useStoreChatModelSettings() const { history, setHistory, @@ -75,6 +77,7 @@ export const useMessage = () => { setIsLoading(false) setIsProcessing(false) setStreaming(false) + currentChatModelSettings.reset() } const chatWithWebsiteMode = async ( @@ -88,10 +91,22 @@ export const useMessage = () => { ) => { setStreaming(true) const url = await getOllamaURL() + const userDefaultModelSettings = await getAllDefaultModelSettings() const ollama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) let newMessage: Message[] = [] @@ -166,7 +181,10 @@ export const useMessage = () => { const ollamaEmbedding = new OllamaEmbeddingsPageAssist({ model: embeddingModle || selectedModel, baseUrl: cleanUrl(ollamaUrl), - signal: embeddingSignal + signal: embeddingSignal, + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive }) let vectorstore: MemoryVectorStore @@ -204,7 +222,21 @@ export const useMessage = () => { .replaceAll("{question}", message) const questionOllama = new ChatOllama({ model: selectedModel!, - baseUrl: cleanUrl(url) + baseUrl: cleanUrl(url), + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: + currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: + currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? + userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) const response = await questionOllama.invoke(promptForQuestion) query = response.content.toString() @@ -343,6 +375,7 @@ export const useMessage = () => { ) => { setStreaming(true) const url = await getOllamaURL() + const userDefaultModelSettings = await getAllDefaultModelSettings() if (image.length > 0) { image = `data:image/jpeg;base64,${image.split(",")[1]}` @@ -351,7 +384,17 @@ export const useMessage = () => { const ollama = new ChatOllama({ model: selectedModel!, baseUrl: cleanUrl(url), - verbose: true + keepAlive: + currentChatModelSettings?.keepAlive ?? + userDefaultModelSettings?.keepAlive, + temperature: + currentChatModelSettings?.temperature ?? + userDefaultModelSettings?.temperature, + topK: currentChatModelSettings?.topK ?? userDefaultModelSettings?.topK, + topP: currentChatModelSettings?.topP ?? userDefaultModelSettings?.topP, + numCtx: + currentChatModelSettings?.numCtx ?? userDefaultModelSettings?.numCtx, + seed: currentChatModelSettings?.seed }) let newMessage: Message[] = [] From 845b7259704ae3b69b4e729b329ef772bd8abcbf Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 24 May 2024 00:22:24 +0530 Subject: [PATCH 05/14] feat: Add support for Brave search provider --- src/utils/search-provider.ts | 4 ++ src/web/search-engines/brave.ts | 112 ++++++++++++++++++++++++++++++++ src/web/web.ts | 3 + 3 files changed, 119 insertions(+) create mode 100644 src/web/search-engines/brave.ts diff --git a/src/utils/search-provider.ts b/src/utils/search-provider.ts index 5fb35e6..d623df2 100644 --- a/src/utils/search-provider.ts +++ b/src/utils/search-provider.ts @@ -10,5 +10,9 @@ export const SUPPORTED_SERACH_PROVIDERS = [ { label: "Sogou", value: "sogou" + }, + { + label: "Brave", + value: "brave" } ] \ No newline at end of file diff --git a/src/web/search-engines/brave.ts b/src/web/search-engines/brave.ts new file mode 100644 index 0000000..b387fbf --- /dev/null +++ b/src/web/search-engines/brave.ts @@ -0,0 +1,112 @@ +import { cleanUrl } from "@/libs/clean-url" +import { urlRewriteRuntime } from "@/libs/runtime" +import { PageAssistHtmlLoader } from "@/loader/html" +import { + defaultEmbeddingChunkOverlap, + defaultEmbeddingChunkSize, + defaultEmbeddingModelForRag, + getOllamaURL +} from "@/services/ollama" +import { + getIsSimpleInternetSearch, + totalSearchResults +} from "@/services/search" +import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama" +import type { Document } from "@langchain/core/documents" +import * as cheerio from "cheerio" +import { RecursiveCharacterTextSplitter } from "langchain/text_splitter" +import { MemoryVectorStore } from "langchain/vectorstores/memory" + +export const localBraveSearch = async (query: string) => { + await urlRewriteRuntime(cleanUrl("https://search.brave.com/search?q=" + query), "duckduckgo") + + const abortController = new AbortController() + setTimeout(() => abortController.abort(), 10000) + + const htmlString = await fetch( + "https://search.brave.com/search?q=" + query, + { + signal: abortController.signal + } + ) + .then((response) => response.text()) + .catch() + + const $ = cheerio.load(htmlString) + const $results = $("div#results") + const $snippets = $results.find("div.snippet") + + const searchResults = Array.from($snippets).map((result) => { + const link = $(result).find("a").attr("href") + const title = $(result).find("div.title").text() + const content = $(result).find("div.snippet-description").text() + return { title, link, content } + }).filter((result) => result.link && result.title && result.content) + + console.log(searchResults) + + return searchResults +} + +export const webBraveSearch = async (query: string) => { + const results = await localBraveSearch(query) + const TOTAL_SEARCH_RESULTS = await totalSearchResults() + const searchResults = results.slice(0, TOTAL_SEARCH_RESULTS) + + const isSimpleMode = await getIsSimpleInternetSearch() + + if (isSimpleMode) { + await getOllamaURL() + return searchResults.map((result) => { + return { + url: result.link, + content: result.content + } + }) + } + + const docs: Document>[] = [] + for (const result of searchResults) { + const loader = new PageAssistHtmlLoader({ + html: "", + url: result.link + }) + + const documents = await loader.loadByURL() + + documents.forEach((doc) => { + docs.push(doc) + }) + } + const ollamaUrl = await getOllamaURL() + + const embeddingModle = await defaultEmbeddingModelForRag() + const ollamaEmbedding = new OllamaEmbeddings({ + model: embeddingModle || "", + baseUrl: cleanUrl(ollamaUrl) + }) + + const chunkSize = await defaultEmbeddingChunkSize() + const chunkOverlap = await defaultEmbeddingChunkOverlap() + const textSplitter = new RecursiveCharacterTextSplitter({ + chunkSize, + chunkOverlap + }) + + const chunks = await textSplitter.splitDocuments(docs) + + const store = new MemoryVectorStore(ollamaEmbedding) + + await store.addDocuments(chunks) + + const resultsWithEmbeddings = await store.similaritySearch(query, 3) + + const searchResult = resultsWithEmbeddings.map((result) => { + return { + url: result.metadata.url, + content: result.pageContent + } + }) + + return searchResult +} diff --git a/src/web/web.ts b/src/web/web.ts index cfadf09..e9c1765 100644 --- a/src/web/web.ts +++ b/src/web/web.ts @@ -3,6 +3,7 @@ import { webGoogleSearch } from "./search-engines/google" import { webDuckDuckGoSearch } from "./search-engines/duckduckgo" import { getSearchProvider } from "@/services/search" import { webSogouSearch } from "./search-engines/sogou" +import { webBraveSearch } from "./search-engines/brave" const getHostName = (url: string) => { try { @@ -19,6 +20,8 @@ const searchWeb = (provider: string, query: string) => { return webDuckDuckGoSearch(query) case "sogou": return webSogouSearch(query) + case "brave": + return webBraveSearch(query) default: return webGoogleSearch(query) } From 8899a42331a2cc573d7d8fb8745aa86868e04542 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 24 May 2024 18:26:28 +0530 Subject: [PATCH 06/14] feat: Add support for Mammoth library for docx file uploads --- bun.lockb | Bin 408050 -> 412090 bytes package.json | 1 + src/assets/locale/en/knowledge.json | 2 +- src/assets/locale/ml/knowledge.json | 2 +- src/assets/locale/ru/knowledge.json | 2 +- .../Option/Knowledge/AddKnowledge.tsx | 7 ++-- src/db/index.ts | 19 ++++++++-- src/hooks/chat-helper/index.ts | 16 +++++---- src/hooks/useMessage.tsx | 12 ++++--- src/libs/process-knowledge.ts | 32 +++++++++++++++++ src/loader/docx.ts | 33 ++++++++++++++++++ src/routes/sidepanel-chat.tsx | 23 ++++++++++-- src/utils/to-source.ts | 1 + 13 files changed, 130 insertions(+), 20 deletions(-) create mode 100644 src/loader/docx.ts diff --git a/bun.lockb b/bun.lockb index c6cbef9f5c016fc5a384a6a4834d305e8a188216..52cde83b1fbe06a48230c620b1ea9a1e8ed54d69 100644 GIT binary patch delta 63744 zcmeFadz_8c-~Ye&p4rUCIF*=iqt^c}sgeB$+=hUER#u0qWl-aa^M_$_~>ttwilQxTtD z(a{U~R}SQyT|Oa{m!6cKo|%*4YjB&-SIp;|TGM)3=2*9E=GdIn%nYAzMDFl00;97h zr7CCgV4tr9<-0ZZ`AXq&xBGlo;;Gq5Sy@RFeX+#KADc8LQ_H2LCd+)ihWLCTcsbTn z*5|u!>aeujYwgZ>=k^ObaRLv{0dP=lJ7mYg(PRedzV z=POP;A~QWVCp9g*%t$Xk3NHzNi+uSOTs1Gluf~U`BqwL3X8L{}eJeS<3=on3EYQhJ)Num?V~d>3AZ{LP-pD)AR71+2J1Ed8WAeP^q=@ctoKQD(++xb;iVZZr&rHOO1y<)v>@$vMn z+S4t^YhPikJ}(8oHrMN)inz9?G_LY}{9bcLrX{hY&sXaXFMqYyZf8UB26^}1>8&d% zBZtxO`7Ye$#YgdY>Uc5F=c|Bc4X>v$^<^>EIrUPr50S5RZJOe(`#oZ93*~aMl9PQi zrg?3<3(mBfTJvtNo_2P*C7xHG8av%<>=RT}j)M8b>cclEpvkr2F6X`ymGiD3)HbR9 zq+w~v^JjR)pSAVDvvZQiX0NpId|YD*&q~Tk_GR1UN89}L%=F|8dSx)NhN?TRzHDmC z)wS_Ev)q2jQ$?xiNu!eeWN5=H<67{m+1`T1EdQ15P(#17{-yQ$_j?`t39%aXCa$?~ z#T;+YA0$>s<>Hm_LAVB>D_+Xi)i<>fiSmHa*_jy))5rNrJmB^CwQyCmgjgL|mRJ>4 zW%RGWvvVeITsz@N8+XIwh;P6vVze?rh_maaB+iSGf|nI{IwBw}Kr-CAnYgCV!ZqGp9G;StHinAhp7Eww zN>X+`4kDvk)$q2s@Bq_bt=GV*xP~r)imT#_sIN3WIy*UgIQ87~tT(43&v_eI^Evuo zTa-?Qyyf#=mmX)qlEgc4O{zZYyak%zR}-&-YXvjwdpqI8q%`Jsj_>6cyq#+8dasLF zZBkm&h~&qJ)q`_z&6%mVcD_X~(Epmn_p+eICX0$Srd9BGd@)_DE_;=oNc{Q@UIkAP ztHGz(Dh)w)?(nRn>}21VOt9ja-ZT?;06!jdvMirJUTTeRTZaZ*K<3%#4fM=99#c%Jj8P8 z$yuY4=_8MGY&b+gb==6L?3{5_kd>U9twuibp0{PU?e-?pDqJ1+t}QY-j^GL`Icda*)U2_ezwb3PDPv-KQV#oB)+o1pD&@64w@2B_rk-KKX{Hv~iMIz= zfrU&!P1q@WoH|$5&-?x(Z~g7S%Dx%bw7T*WuLCc9?2XW7VjZK#PsnlC*JqzMuRG)F zXs#ja(LP_2dm!=oUfS<v3G1)p(0I@^wf;hbar9P<v#>gr>?hr2lCak zTaS3lyF3ptCV8Se=2MS)EniK6D^(F*5f8JVhNc>>L&;xXd2w0G(``>NM>EEz`+WV0 zRc_?h-iim|>XE`8qTF!x)$r7MsogV1X8O9>A<$8JVp3K{Ya4ev?zQ-JTn$=}Ys**R zs<5SvzdqsRcmCG-_Nwdh#(w8zcmCGvm{G*)pu(%m!NlsQob<8oh!u|HeRj%Bz%@gk zI_cGuX?c?$ybiB}YYL@iXAT>kJUnM8`HDOL=+>JzwJ$&`C~UyMP2P6AM}bQ4McBN6VGJcnelJ0iJ+0yBY~RH@y^+~$R8CgXW6izxxs z56jHW7@l0p)^pwO&Y=qR1J9jzipAA${n&Z0_Veu78uje4X{k9hE+-``Gj~*q@8b(z zV+wD63UBu|!Zlr&UT`MGRj$6o=XXzZb8$7c1Fmh!9-f<~Ge*Sccecb;YJI`)cRQ~k z`Rc^lxccsDTRwzqKQ4?L(5yJ(v00hvV{@{7ErL#beEmSJB2MS{%6XNF_}!@)!qxcm zLBG56{)nsgW4LW(EEtB51wKDD_ z{{Yv%)`Wa{zfxWU{%yrmz4dT7`}c}7M<#Qc;OeK0-#z?{NY2j5qQ&Q;y)8On{h)PS z4rMju^ldK~W|P(ibicyu(WW2p zx)GT<$r*QWoXgC}&P+@4ZKGm!DTlaG>@~h?;_V6=B)DT(6Xt{jMa#yNC#Bk%wx@@wA73- z$$G;hW#U*J1`6K;nP0_gT}o!=7~QXqN*$HVZs^RbR6FngojaQU^$pEUQ~&cj8fSFn z%DrBy?aetxWaO~S3BL05frdIKJ1tcgv48CK4k2IQI#9ojYhQmF*VX$>ygL40Bj_Be zTsg0B=VYIEk3fs+do$#n8~pCI_*&}*zlz20!c}b&uGMt3@r}5y0OD}XV84w|QlAbc zM{ylI_T!qtcjMIOjK8L8v9w0sq)BSzthgqUcRK9Nr6bnoBzW<)Y^81sGMaeDpJBMF z>Vj*P?yVhOcn4N^2UmDQRCwoBc&Fyn9-2RTQwp`w?*Am~KKmPu1pwth0`@Zq{+fLu7 z{2f>NUzxaM@rNvHpyhF zMmbgQ35RA9wIuR8%R7Wa?+~>o3OGqQ;ZVKXeZE_~s7E+Bg{ZMJI3qE%o|Lyt)ehm{ zX`%+s;lYWaYlnDkpyoE=&>*6w?s}FF3x^gGb#>R1)FT{QK$M_7wjtUnNU7`G(lRCx zH`IxLFcRuL)aPsB6{SfJ6E$-qRu=maQF9_6uNY@}p0{ABBsY=HzB)ZXyhK57ii6TVQ+d3SoIMO!JSxEuQM#V9F*&gN1@n;q>`?HdmEBND_GUQw z4@LszGMz)%>zPix6A6_bTi6cvgFxn3C*O$#-v)MeyQ}m#C;p*GaNs!F;8bdp7|JK5 z{l**09YoqXzf;gI91L)ZZ|E#-ofy2CRAZ$=qm`lrt!GpoBhvUqJIlL{sfWM7j(!A9LaSKEsBI9le{SaWEN)-X#pk#D_S+l$zL1^eFbE4 zF$Za9pzCBOK0gwAe6n}Qpxc?4`-!fnh~KH|gac7`I)^CI_0Gb|wDTAuZ4pDm>?$B? zK*aWq35QDNUEEqK>rZqWc~R=K;Cn><&ERYO&HPiH{3Vg#psDQd&eE2N!G}mOomwV_ z-Y372NOev zl(*G1G_dDxCw^HZRAGAI=v2)Ohx!tEBcRQFmPoUgZD}74{Y0ddvmesu*WYt-x%6;g z^gT}gW0BAbpr&+5r(k?Sj159g)n(yO=^0*m<^-eCZ-$ee- zaeSq+KZx2B1)Ty~)q0kb|3oA-ZI%Z)KxBl21w`GQrEL;}H_X=IrP9#E&?r)>Ipi+3 ze719FWh8VOsJfz^q$%N0ll#0PMV*5C6JiK7C7JYhg+uEuM(Tc3E#!lrqm7K(2^*QW4&eCCtfiZKOfvY0HrvbOBb%8(UIEPj-8|M}-Suijh znnTo?dfX9=+Ber{2G#UOVjl2DnZ1=g@^&I`f2leo99Z{&Gw|t1=r^Dmz|O!P5}xNA z0xp{8?F{ZgJn-H;XW*JhAa=f!za|oDKi}K0qfJ3E&dm!h?rvklp|M2Xv5Va%xPhpV zGk9&YXvgOp?6RW9=NsatMqW&9CpFj^OvQ@|GpCXA)_RB(=c8v5Lk$*tbIm>R2htWhhn{E7 z05xZT9NqsQYUUo@+vj`z!6>z7g5^7h)WSvQMciAFRUhY?`zkO z5}lV9E-{}}XScpXq`dW3TTxgpgOqIxDYw4B?<<@`8zO<`k2~=jBf&2oS9>aLObpd~ zqHx+*eJUJ!nW!rr=N>%+ft61DrbsY#rF%fiOAKsV>Er{dKdBC_l$RJ9LaK*5>ykJm zK1-A(9%%ZMlfO9j<7D;uJ<*8&-RhuDFg2fge{p z@vlUJgP-P@>m1(JEQUmXcTEMY!l5#23THd54)k2(#Bb%`1Z?e8+L{>phLpG7q^HB7 zn$OS)EXt{bBghz{1S0Qz_BhckMBa7dX(H`e%n!~ek+sgDS0kbO)_R9ginIy`-zG|S zDs4&()_vAp9W8&7lwBRihXc<#@vn0le9js8I$Qgkw?nf!rrfX3IfsCKo@XGvwX&W^ zo_FH&B7uX?JNerpp|R@9aBv6FKxgUZ#6YzdoPlpdf^WRQcsZ5YCx-qYrBgVU zsT}MZuJ`#`5pkI0n&-{+&cHV#p{g%l+*VdT;6>-qn~}hh7oGUGB7q-XbOvILUUKr^ ziUggPG_^Dg2bH2C?{Iz1%RXOUqM{BR9n2-la0mG4%g&+gkzmUWK3@mt@b<*O%ni=K zwjT*hf@+m9Z9LzIZ1Q* z`XG_rEG|5ED9^pZDYE6_k&%<(&@IlP9g)DxTb%fvkx;2uyzS)J%?NiOvZo);dv_Bx zrFIVerHQi*m7o5`kPMthg`P+N9y1jZ1t9VnC+PunnJ1_m2ec99uB=h zbh8_2waNuY{&`{W^_wnSd%RDPR76%n(B#nXt?soLg*7=mVQ zmIm`jA~l`05;b_oYeOj~=~zNEfp-S@kkpNCA&s7Ihqw2;*EpeDh%~ZMPQmvHF$8La z*QAe#yftwz7An8fYXsA#Z#dMG$ZNbBIFCpr>G%N&F$Ah4Xp*ku9Q&@fn0L=K)JE?0 zLty2*PX4|~Xdh6MpDt!SSMBoZVm)+!FCsOsh*NMpA%?*A16P6H5~&FsqPT0W{GM0P zolb$<-g5>Xhyw+3-cWvb;XoC13WrjNT6(px-)tai@0A=H4qhN? z;VkWt7;5&ucc51NQW`^ULaKJLr(KV%mlh)B$3)j=Zs=qM7Ow+?#q8A>gYzgE{yDP;txkctM?Qh zGL~=R2A`;GQblU&cH9YQ~kv2YwkapbQwflDZ=@K9{iA~b72(dx7Qi?Wh69uAD2l$F6HvV zp{+z3Qz904b-y<_?%6l=3z6Ev4CM0kx&wuCP8&G!fD`{!B(xEz`NiUl`yWIaF*b{d zxa?rzEYR`bu#LFZWxuX?$m;<2YBkiCNR9CF9wJhm-Yor!NPQmT6zooj`P2*C9Wywc z=z3@HfyB^eQr#)!?s>tO&os2+ElFu;+1Gg+CWFY^RvqS_A<{DLum@ufljkh$pBTy` zr3t{v;GJ;jU7}`Q4VmFkkhmNYx+dNjn785jOF&cN>@p?8mXtBElMRXG?O^@_Ni9=PSG6Mr%ix*r(vS~=e>VJG+p z2{B)JdxCctG?qxS-5aBgL>-8DQ|F_Em}6d6z$6u6Eks`X)#nR{)GTkh>?P7c!)tQr zYr4_RV}1p5h}t@pIwS@+l8QKkdnAT_C8g1HXJ_!*ZXyK9FV9_L@OC0@k~pM%LP}f8fwxmQaP0|aU`8Y~@`P8I6CeHi1d+zb8^7O) zysn|zVDE3)NcX06E-9_f8=>t)+CXo2I!C14iC$uVsQaB4ahRs|$wao%yhFH&DA8HU zJAuEDQuldlt^d6j6*Wnf=vpF8P4d2T1N99(%M|*EEN^b>pit#x;TX`^z`&DE{;!eH z3qY;W-l^Ln?#*g5=0APW}b1K+jy<1`gAyXPo#y zBca7W9lO~~M&u}wW-R^3y+quv-qa$G(@HXtw{beHEGM$V#QOkW5H(>r?oim&3cqb1`EJ%nZVB~pk`aM#b z?|xJD8h=9cIo>pJ2EUycYDY?);$DM=@`$uB$0~aJOCnBH90|j}7ap550jCi)bW3p} z39SF!42<$eLdT%jL%qGD%=t_9sKA)>CO(=nZ$LGXsh( z;zVo>r}I&KU^&o>n6n{1x^C=7y20&4)QpH;;Dd}AM6{UtHjvUV@HQ*gsbM}6(=zNk zOp0_OZ|Z2kRuXwzqi8>o+QCK5tqCzj{q8*R8r6wNqfV7{!W1H}N*yoO6Gg~#O9oF7 z-Qsp!s~Ep~+Hq%EU`C9IFHI+GhpN%uo?nX3(%W8)dK0N$?}lhLk(x?J&@J1D)YVJ} z-g2nUXYt+LJ!Tv!4Uc!MUPGiaq!*ng@^(j^hWhgHxi=wnoll@G=e~?7?k_=g7Z>Y6 zq-J<$$QeXh%xmygB8{wfI=S#Kd5vPdJa5;^CHk+1Jx8QbWd~+VP7!q_@|xMZq*u~g z!(1YjWR`Hncc`Q}RE}h^VxqV{3CFO+)w46q-$>vofnp z3Wwe!y2&faB&i#3@~hCSH2!))>t#)RQniGrv%3Vx?cmo$oFzCsTwk$p>T9RJooFPb zikN~#u5O6D{hZDVo>k;dtn07#mZ6I{?A=bJz1G`#RuEm>K>~-aHUq2EmQW?{AjGvQ z(=AM-;b(}LZsUm>5EXNyRYY2+dshdRC=ZJ(DTae34J}N8CA6jt1)iN>Aw~C49 zo%hfZh&tch3qv0fY31I*r*u^|#y!&BLQ0#?#}cc;p;<)sXgV|;+I%t6-tawWu$EW4cA?)>kg8(g4ameYNKmj zq}@o`#qvqo<`$6b;;roZS{KXalC)*t_%B+#c457tTS%(&y&dviq8o_VN4Ry~PUKx< zXjY#l>gb*a66;(XFuHpjkp_(6;|}>bB6}d?Qm=rhr(2&6YYprAeSN%$`eqV!)8QxZ zNnI1)kcrUedcSW3`Rr20DUcgJaRh`TgIoo%j=tgH#6`jZ}96A#+w=Lb~j#3E+u)3M^BRM>m}PX zpaw6woTR0{k-XKTT^jm*y}aa7l9rw#+0~_?<~RA>59Zt*A(TU;8wdK0UVM(oJ5A|Q zr0C6s$1#06)SHOg{mjJB4pKVM^DgYE1b-uwpTK5MZshHtTuU(Dw-MQ$m*_7d9q`#} zy7Hz?W0T*6i^FA&%^~@b#wNZgHB=8XjVR^4fjgSW-qG|72Udp7A<6`z6HI&}-Z8-p zlrK#%`FN-x!S5T*vP>WjK6f@LyvSJ20q=Dp9g4hbbYD~N*w0N3SBZUzyiMUv_|SYJ z@0?Zj!*Jlkre!Yazs^D&<3Z^4%=}a5X!hPoC5P$hRW#5lfd@j;UuHp-j>R*hM z{|Hj~B`AoN+jxbIpTdivwU$4NtDWnR>e*!DS8%=X(QbuX2}+~gNGslh^pY#yYyF`0 z!?-H`(#Bum8UfzEa2tBk@>6&TbODty^;+5O<)Y7hUFzCCF5KK|xny!z%e98=0Yz3X=W^I!H8unEtuC4D<55fDx)z7!v{7XGx znz#0IjyIKt*{w~srN`LPawXIFPg|W~`QN$n$J%_k1|!GDa(a{vWy;a*tMEy0!6N6O?0RO~>g6hSzxBD6U+O{+ST0xj`MA&m%P)21JGR_H zn~$6C+W12Q$n!mHpY#@`Fkvvi|@DbrLJZjwEPn1PT}7&bZGnB7L+Ub zg^mBo1LkD19k-*lxLn=x4X*W{uw1SK-Z@<8cgy7}|0k{^Oc54RR#7edZ?5ew2A9X$ zd_2zur7gJBRq>T@RanmYRkqxvE>ywh$Jucbx%#*o+|Y8l4)INF{6Bgg1yn&(TkxM;du)2fW?bs3XdPTx>-kR=ylBh)uiVt@WRJfuvzV&eg6ll+maRsv zJ6F~Y{!@SKvgQ8C)y(%OC*Q3(uEhIxLAjD2+5&qlm#f@Icyau&<$vcx$#1UK{F`gT z+QU^(2bYr&^*wfK3P|1z$_$g4L0P3t>wRs4aCKf$%!VO-^p;i~^T8=uD2(BE*a zC+`oN@IwWP;98)BjmucS3fBc#HC!*b%GbnIz8H^$YV=D2z&f~z6zE$@bFxn4Lg z)31v^Hcu4{u^A(9Es*9Fa9tIp6U#F!mup42xGJ24tDdPgU#|FGT+7Y0T(0FFurcmU z!g&B~!6KXScdiN_C0~|GB#tLH{Qeuy)%3M$@;|HstTTJo%s4{EMFV z@8JLAIQe2r{(}{3%nwptV|xhKmLImu|D6ZS+;o5WR2?~vP^=Vw$`=1S*BS1-&6jI6 ze^|eOtDe7btu{bg6i4GAUkn~)mUicoh{#uhP>aV}FNy1QscW&amdjP$l{UWAwYu__ z%N57tnwV8^t*5H>YC&c;X!no!@Yg8 zDc~v!+=6T0>t_pG>T1Y9xVB)B_1kQ@OI;|5|Fpv43e3sO!uunOM+x(it7Fn}RX-M2 z_2aB(;d;rHpKJLYmdnK_+W7BW&6uo?){BI@z+JdjlxG)|E1qiOzjLi}Y<#Kf#CVg5V74=X7bAjf%lb|P|CglLT z+(5hB-?`c`$d#J~` z`ku4#^SECBv+M1|e=DF0*Rf!%nLFOj{5NcUZ`%6gYS-H~maErz*Kl)B$k8sM+#o~KGGRkc9pi}mldhw@I+SM?=Rs3%pueW+I?x}ZcRhTK zDJxH~%UtT}n`Uq|wz)0W!j_YZx3sZbaVuQ)wXtzqn}4aR&7F%fOO)5s7WgOE#@%Ww z>|@Jc>RMqxo8RB&U+U`pB)IAsfosE3iZV;p;#7bt7-JWdi>KLGuKjl$u8JmEeyOYc zWXt6$Kh?%^t?(WjUvv$RP6cyp#y@#^lYX238gu7u{?h;Pe=&KwJ%5h1X!~EVYrE9d z{PmXqoh$!En=jYqZNi0Kw_L9J-?Xt@@!J|`y-2wI@Gc%pyx$i5zv*h|0b8$J@j(r= zUI%T4Tp5ROp#sbQ&QEa$#w3y{7J0a9hX0ez5Ge6 z{Y0k9Ke%?D%bപ>K{hvOfz5Ge6cR>CB?UUF}?7#naK8f9MXo~;MVJ#c3eCSa5 z;(5KAw0z?Q|Aw8#e&6->#76=!;x!(aL<{RDS!2qhj0J3|Ip9lZsa@Pd~Ew?DSe}; zsy@H9ujsQSd(C~eXp7!A|Ma%m`Rs|WwjAB~{=45k_iLHQUi`hz4fFmeId<0-@9las zuH4Jd-2Z#o(TkpMI_QpXhShog>3MT%PrmoagCCl?1vKxstCv6iS=C)b8*gZ`DP?Af z{%cOJT2u4w4Ht?&82RbsFDv>_S{Q8BEN}8pn-@R!K;-;gdx}KAR5oMSH}{|EI;h1L zp|1yD(!A=IHt)$+b<&!x{Q3S(S^2N_-T7hqkNY0#x@mR&Q!Q>PySMwqgsNL>o&V{| zNYl9Y8lP%8>$gdPUgdVbd2T}ggflmNFfY4hFzb8M{D{9ow>LV}Te!CSiGClPK2ZAX zvY%tWE3v2kwv$===PX}8 zTJXd6g>COR()zDwDs}0wWY-(}=6*9M|MjfZ-+q4dsgIxkD3CuOE~!&Y{LVHXtSVo8 z-4~m)KR-F6)!oxV{_3AyZ3chsuV7yK!QZ7@gG(!Z{GpUxYr0ju=l(x>KM@l(sYAk% z(d~PE`cRjl13zo{X{Xg22LDuU&5}~fDn=&H`n*SG_tmeIdCn{<{bA*1Ig8U4PCnk< z#D2r9DYd`U-CcGq9DMS#IlDe=UZMV}slUXG`0}?K`UNY7JM@a*I$&K-Aa`O__EUegM_DiRJzGSocs{X}S^ueLUr!<-H_S^^O4_(y$^EI&z zhB%+(W;dC(wQa-wDa8(~Sy1b~zGcf-jLU5HN|}WxKREkU`{nyjt!taTy!9s&_7^m| zYPi`-^UTsO{ayG%|A{aC{mg{p3|zMYK-erU0Mt4GxFFEPbUFe!Ah7lbAkmx?nDs4S z@KHc>v-&6?;X6R=SAZ5~z*m6d0$T-Inb0x7qVECe#{g~2CV`G80hPW6v@@w+1I`NU z7U*E&z5%TI0WjqoKqs?Pp#P76ddC4>%*5k>n4bWL1-hBqCjc7-=AHobFb4%vP63*K z3+QQPe+#JaGvK5^Z`0&Ezz%_>-vRoV69N-X1G;?==w}vx&wsUk0bCFmU^<-y91vK0 z5-`Y|6PR@dFqo<2A8hxB)V%7;97by1=Aju^C1X%PNV7tI@Q|1()<5@uN zDL}H>CU91u#?OFJX57z!Rp$VE1yW7b(}4cJ17@5Cj4^uzV$K5^{Q^ig(|!SL6gVc3 zX&RgXr2GL`cm^=e6bMwf0BH9sAloeX6|h6#j6kkw^&4QqpMaIW0mhqC0=51E^f?Qd zXqKM^91sYe157qO&-wc;oE3;QFDag4{J&Em!4F9O9gt_%2^<$FcOEd!B%KE=3IMhX zOgClz0CbE3E-JjoBj* zQwGqe7+|fLRt&IF;F!R3ra^H)N?E|d;(&FgK%l}EfOaJS>&=1^fqwoM%?a5{rd2HV zvRN$KU{1+4nocFLO=h`lvpFZ*VtSUsUNNg>TaCXo_No~md(Etqy>3Efux%y@Gm9#) z%I#%X<(sBVSwP1)KyF#UcC$_3tU!$`0PmP_SFq#`vs1RyRJ{^=*G!b{GJ9n2ncC&B z-DaBXeRELufoX6R_Mw?A+hYo3ADJfQv5(CH*(c_NY_Dlm0o!L5%l4a7vIC}59Cpww zmmM1nu)US%pTeIrgjzVq?soB!5ozRXc|<-eloLVr+7Lk_OoeH z4LfZXU}i^cI`K?3I`NEYRUI&)4q#<jaJqlxqm6YLXfP7BvBE z7pQK^+yv;@6p(uppoZBda8{tk&48L_+|7VhiGaNVwN2GVfd0(@Ga3QvnmqzB%>j)X z1L~P+jR6}4jtSgg8iWBU{Cb307zQ*j1p*aX0NN!0ZZZoJ06PTE2sARSngAxW1gvZV z2%A#^wORrCGzBy<%bNlY2m})WiKb^FU{-6uO9IV}zZoE*4IsH0poLi{a9p5Vb3iMT z)Euy=EnvGq8&f6%=-3XB8v(R4+XT)E)Mx?dV8*ootZEO~E6~YQZ3*b#0WhN_po`ff z5YrLRs1=}_nbr!hQQ(+B57VGEAf*#vVQWB7Qy@^GGoW1?KyS044Pb}B8G$~gRa?LW zW}I2s7SPX}5~$S`(5D?>fLY!Sa6ll~9x%xCY!8^#4e*k{VB_xqNazko?f@8K)(IRJ zDAy5?WRf}p7WDvZ7Z`5JbOLm|1(4ebkZiUIoE50i88FI>>kL@c6R=kx)l}^Q=-&%4 zqYGe+*&`6s8_=jLAl*#s3fL%cOd!)V=mto+6|k@yV4NussL&@+(zNd$=&Z|y?tmSA zC~>AcC2~!x9)Jmb0V{g|#+y?DwfX`2+yacm~49X1kCCWcu8Q2@%I8G z3;-ne0_2%>0>=f)^#)8cNxcD!1_HJVOgCk21#}z)$h{RX!)y~cD^Q~kV5S+@2e9fk zz+QpbrfOe6|G|J6eF68IJpwVe0~+-M%r(>c0X7O86PRZj^arF20W9nfSYQeSDhvg* z8vt<3f&qXX0%rsknpOh=6OsTc2Lg;aB~WV^pwA${VzYb@;DA7-A%I6r>JY%J;eg!& zOHJHRK*9*Xl%arSW~ab$fqF@R=1}e0lZ`eqyQ$Q0k#TkFridHt#m+oDqxe@Byd2W(rCaIlR6qO zD+91wV5^B614zgOOc?`s&FmC7E>JHGu+2=1~}2JAHhvH=su1GWn6H=!IrtqFki9Kb=dN#KA$rCh+L zCN&o@Ya(E`z+n@22Owb*V9FhUFU(GX;{x@@0}9N<@qk5>0fz;Sn%WZp9q$Cpod7sy z4hozVXg(3}jhQ_Wuxbk6q`(Q&WD=nNU4W&N0N}iVC|iNQ|6pNg=v7nQvj#U>M4L70<;}4_G@BP{y1Ss4xdGcoyIavw9X_hd}IXKshsDHekYB zz*d3sCUhU5)&qd_`v7rflfVIiO7{aQn$-INv*rPI3sf?3a{vkR0aNAxt}#0WjtkVA z3#e))&IK%505~jA-PC>n(D6aQ+y?+P%t3*(0?p?EYMR;e0IM9pNrBp?$$UWnhX70G z1L~R+0x=5#-4+1qnZ*kL8wD;1++aFA2uOJtu=YVf19MKGf&mP605_S{4q%5s>_dP? zX23&$35x(*1;Qq@5KwC|AblaAiPMZA|UOfR0N6a~A{JnS%mn1)AprI+)q{fK`tIP6~7~O&$UC zUj|tE2%w8OArSKzpxY8aH?w#NV57hVfgYyQQb5Xbz}lsNp5~lDg%yCoj{5VKR@xIn$f0ZC@!+pV0x@d<-JSxZo5fE7HVRx2$TXc+0aBg;tX%~dXU+*!SPK}u z8jx*PuLkT8hBET`LUj*zBhct zZUD^M0@y9E)WmHBB)kHcvJtS%>=ZaIP;V1pxtX{LuxKmbu)yP{_GUoGR{?W316G=Y z0%rx9Zvi}IW^dtsbd@QPtu{?w!Jak?WNXX`*)yiqR&1?VEPK|Rl09cSy^1|=mdn&9Dp|}dcKQ&Y*x!Y zG5%fHUNbyLVIVVH5X0{Btu-_J!Fg z`_feX04p#PWk<{&*-=ybL+mRvO?J#2lznX)?7_YFJp{DZZ_PHr#b>)UyFIKPu`) z<(L;w2d)kd9nH5qH;lUwDC&3Nm%jvJf*+>yt!h)|SfIDqhW=*)-xm#DwZP{qZpK{| zRk2uBW?EYASRRA6U_-`*zytoP67pHV$0LP#`#(8*SZ-?C2;Zc9tuQMP(uV>ipX4bQ zsjJipIVCg8SI2sr(9h{n6Rp zAH1wx&v!G-sd7=7+L}3LYB1{c;4NGDs`rKk#iF*jTT<-sHuqbaQ@`0}O2$Op5ghiW z+x`oI$!-&dXM5{gvfVaejJMe6)SOgW;mdz_!vn>l$_9c{c=BnPiz}O;l|^ru9~;%o zEx+NOQc*V-@w!plr(aW!4n%%7Ka`8Qqgdnp?&p|O_n$TyS4CYHO!>vO!QHmlnq>2Z z(W!1XXpM-2{cn+06nvjLieKF95n{2~ zao!J_--*Yn{-?Y?EBvkbpDUY4^{7L^pDXig;uq&nv45QroQdl5R92poGPONk%B#oi zR&%^o)YXADH=820qpk}C8f~awJ1WZ`yf}IKZTO>3)Z#b(wYg)8Z#=6nzw$KHV6=Jc zWK^>}-=BYZFVQqIH3j1p@G6WoSFd&n&lj>~R8Qg8finp7tzGFG`}ZVSww%6LeZsPA zn3mJul{8a^zDIqBEvParTYLkNTQ~)%t<|GuPFSXAbEsGJedUMTFE2A5mg#Aj1zLdDG|TjS zG(AsE_->f0DT^Ml%g%%;k7?%1)AJhjnr-2gq+45dzpbzw?4(-2YmQ}l!qj_~&9zL= zJG;iR2Q1Swc7pax)_NeHs@6khih3;1x4=S;`5D`Vj$K$!<@nXIhb+@P_|39~mQ{ky zx4osO{izL=k)9r>p)oMGaisNSdkxLAFg+5{SA}4Mtx!(@)WTKK{g$n>tQsuEvKK6? z4*OWMiPw6VdiYwj&$11+Tn*S^n1*^IOucX&T9;=FZngz$0>8Igvc(px1^dCWt(Mh> zowDpTyKEiUY0F->tS;<~W!o&f9`>7Md2ayu=c|X#TDaX7tPeYH+1r-g0Q<|bcPzUR zcEHXVJtI*)*Z^&`Y$r@fJ=F0=W%AnPmCMuP@qIiI-hI7i7v|?3d=blbTc&eRE6d)u zjEDaF_SspjhcYTVjP_ghAxtagHy3;dEc?io(>MLw=#iA#4L-K8De1PP_0rQFRbe91 zlcx3BYgseWdWf!G`(Ro`j||uZ(?mOH%jxj}{A7XqI%HW3qG2BLeOkD0w*-u`@H1Pm z6>-3JwH`64dRn6zmVItn8(2llzObw<>}t!tw5%Ph0!+JP0ZcuU*Pj1|+JZ-I!45Dz zi$brjVEogs5csyjs^H()g*%ZRV7KmsUA8l9kY(Rm)&+KltxwO!RCZUy(`R)2W5(MG zyOF;5C>vX_J4{bHsgD0>Sr5`3E&Iu`TVVQIB$Yb_Nxyvi8DmDm>hH2joSazGZtuV^6!LXv1MZ@^#KB!+$E7L2+ zmK#F4EG$p^bI8J>q_40Ai&>Tg+eUY2e=cs>Fw*a+i+JhLrE1`CG@FF>pjeo)N1(ni z?Lnn%xn$C(kzS<>p9VUTAjl+ah3grp^p3kXB-@F3B}~Z_q=%w(z~gL%sie1A7H`>T z*c+Brv}_D)Dvi-ZzS^=h%|AWsNVBn$h3TYul#YII$X6MrEy+OLEvsT#ChTUIUV1>Q z%8f$j zZNZtOdt27VvRN=qB+ZGomdz%uXH;pd+gWxWX+5w^!>R}JYQ^`X6cQT34q_VrIY`f{ z(-?LHYD?y#0WghMXS?tNq`SlP>SEbE(iJT03e$$nM-^cjt{%4B0@9OMv4-Im%N``1 zH`&6T7COK?)l^=3Y_O6KAw5@BjqPpOLelC54a2RLJxn@-a_ZhbmKoBSmi4u45v(k| zMX%-g`T@xPDWdw1wK&={O7k-R%b-QqiWy@jLT9#_r3YZ$GiboeN8zjN2Yxxa# z{2mqX=Oqy6PDOVn$C2(vzD3`m@6kzg6n%yCFzyzVZ;4tVJy7XW^cgygK1W|5{kgB6 zef$b)O7|ur-O)5h5u_WLmZ+5;)Y+Ou8`KuHL+w!~)EVjSrYq`(x+C4$yiMirpdCne zHQU9|J7@>miQYxK(0k~8^a1)1?Li-*PtabpFK8P2qpRf=kUoNrqGL!mJl`PQtZU)N4g7Ig1WL$chm#*M7>aNbgOAqEBcDOK_qWOgVF712pWo#&@eO{jX=q0BpQWM zP%0XY#-KE$+tmz|iN>OFNVlfhC+M*7qBkGL0qAI8=s)nkgYteN9=3q?{ zwNPyohvHF1q$h2ZN6l&2cIJK=()wYBZ_#(?dvwyI{~2{n-mfI}TMl2Lo#uwdKNu`mY_$` zVzdzH=2MUVoq_H}Q_-~G)f^s4^g_MSt*8&`iw+bu&z_HpZ}lN7--A9vAEQsuUi323 zP3a5h3G^gdjh;qp&@*T)dUj#+=xg#EQV*ep=wW2gBD5IgqesvZv=lvxmZ8VcaP&T{({aR-Qu1#RfEyhdj=?Wg!%ObNIz@vG5Q4Qrd2no`l$!r-}2Q*xSZu668 zKK+rj?oAt`FiJp8P*bzjPq()r*$(Lo;kpggZKrNCXQ8{%bd-toG<}_Jx}z}C&DS;f zS~VQqg*q|2J0smI>8C7~p~uhy)P;QgSi(J|BX|qcQpbo^Bw8cgvbIC|EsHfsKi*LX z)kXTL5&ir~C8XQfj;Ir=KpbIZ`bm?Ls5trqeh%qURac?9>eQ`Ff21o1T>x;#bgwAC z@N-$Od*BbyX7mDDk7^)(^Uqfk>E8Dk3w({fLC4Vv^sOlqMF;AXtIMM#Gz<+#BTzCL zh1#J2om&l6M>SAIq&wS^RDC604wXgMqdasM(v!3G$muC` zpphsE>6yqw&~0cC(!FK~6+^|5?lWUiNmL4zMrDw0E3ZU)%yu+-f!=wJ9@H-{J*k_J zr_ge=0xd&}Q9fFN^ixv$Wr=P~2i>XZw`A@`{G^HdGbsA06y1-$g|0#R;jf;kH@X#d zMqN->beiRVL1)mf=r@#imj9RyK0gYeC=`tfSm0Bn-$(igeT+Uqd(l2r2h~Maq4MYl zs+oZ9MC;H~XcbzGQjzXK4a2$;J&x`{$vS?HBry=FCrU7`W00QV+ZS(%nxhE18C{P8 zNY4O%4n2wtdH~&trXYRYcoZ6nbX#~U>WA8*_NW7DiCUrZ8vmf!u<;Ef1zGb8Cs2jS#)&%ho`W)#QWm{pdqSw(jv>l}(J-hx%bei>y#rv@VtxyY8 zN2d`z8MqYE6OE^#$tax)lhH_Y3$bo~^pNn*s4LQqPaKLzcT*+@<)ZOO_b{cGHsBadQ}R2$kuK`dW$a(`H&y|Nm>1v#(CrrZ^d6luc6n`wrIxx4H9pn zx6pR zJNyFr06m?@e`}C~9zuHOY&_DNV5w*{N<+zLFwz@W>)2xbgwi&|&#w7ap-0fe=nEQh z7=4ZoqV1HK4eyH%q0i7^v=!AxdAe=adqzRJrzWb3s-T})_&1b9`gK~p4ZVTxVxg%h z3EhrjVZYI+v*;A+LwqaJ+eq%)MtCEnw}!4mHPK&`^WlDt|5Acw=wY+~%|(;Y7%Crz z^mb1#;y$P^>W|tW<(0#=h3}&KiRU2knbaRc!$YVT8in+R$!IhNr6Fy|G`$ItPB4w& z9F_H=RlQN+RyHObMh#Fyq&GUEQ4keFkFm@ml)~}@@$u9(0M{>lbipekHE24Th$bPu z?NEaJ;%K~k+6Jgg)d(%92I;*7y>}p1*ub)^@Np_s&lEOLE9?Qg9@Rtjkyhw!op;M0 zYwIawS5i+obQS7ef&b!AJgSIPxmKD$MTw{}s*I|j!j-D0YNG3q%1$WHMB(*f*l^ol zE$?@BO}&+T2I;+L9Ys&$w8Qr^uJ@?*-t|eOi$EPc_2#&)xAn&Pr%3mFufcT@I3MZ2 zHW#0R?nARsJ*4ZrH_5vj#ZrH3KOdn^A<$8Ec({Zq?8|R8keMg04l?kzz>~%5{;h?{uA~li+O1YfsYk-ZIj9KSXc9)Fak~H0^VT zT^pcN$nE;+fc zI{9rZQ@SJR_I~!{b|eZb(MpsdR=9BCCAc<7*C{%g_Cq>0^+JUk-RJ-5?YiTlNSgNU z-YjN81PQK+5f2rKD|qVZ>6vqmNK!-*5D*nf225~>sFWfG6myPX&N&BA6csTi%$@=9 zdunFZMGx66?^^b=3V-C|F48Iak@lkwu9Ul1*Of)&qP|sLT+50o*3o}dz5t530-e$yz40Ue z_gxDA|64y7C#>t=qQ-r%^yfNtjo_{m4dx-mAKmcK;Bpzt|3-0fM(C7t*RZQO`hRS| zDm-IjnTKg~L?akQ=^}&2VsV;v^x|miWKBh_{x^QE{Rcf!Kkb9fU}k~h#v50-4K(gQ zu29$fADdiU-*A@FKSvN09o)M>!&8coLP+YLEGJbvtPM@DO+a+z0LfcY!?M4v-7n25td2fgIp7@E#}tJ^}AIOY?(|zz5)O zK*z+-c~_d-#yiv{0b_u-xOkVEcdMDV1WJCLXMTRQ=Yw^2%vap6!lh2G0-hNFYJgw+ zrFzgoEWmpKtboA^xnx0|F{N+CfB?WB7zy|QLxCXxSH#if zf$PD*AYdTSALt42jxrn31K`~<-ZkT$G<$$|nz54>rQq|#qFZYnKxhi^Z@T8KHy41n zI{CT@Fb({_qihDW0r(*FPn3N4*$QQIpoRRLpZ$UBmOy)e`I)yh&<<#;LjUuFPC!SX zJJ1#A40Hjy0eyhpKrf&#&<_{@@Y0uC${uh-*+KROCtG**WE@6%9hk&-hwr z_;g%zw~K#~8Kr_T6aT~my8GFuRzJxr=nZ{%m+B|cxQyq z1nUEw>j*qaKt0^o1S$ijfC*3$C<}0Cvz|04Lqrhq46mSMO2V?`>`YhlQz%9xIt^oYmz-9URD#~lXb>I$= z3$V9t18sQx-@?UBpq2DM9M2P>-~sMWpnQz-5%2_f29yL|0WX0UKtAvq_yD{Iq9OAR zV?kK&0!9Z`I7r=7{j~X{z z4+8i@g8o2Xfc5l2*$)^1cmVDIe^j9Rpg@%8@$ZX=!vI#`1LzDHitFy6`=J~GuxrDC z@jw{BUlfc6Mgi@?7le{0vk;WQ^7S~B?4>dCy^eP*?wKbl0{^feD~LqNK{EyAL|_8I z(K;FBBp?Qu3QPky_!x6>{Sl=Dgx?xs{=J;0UmInfD}$AM$OQQ!!0 z7&rtREQfy%0Q-S`zmV2OaERieb{6-na5}IK@W(SJlsd&{aP0t`2AIze7z=~Wql^J> zHcB3D*HC5xJnXKbEUrtk@SMBnTbEqN15TS9`Hyj(19XG}o@O86`XTTDxDVU|?gDwh z9UvFD4cr250=ybx88(33ZUxi^Uf@|iz*D(5?s;{@yQX|lCVq+gH;5krm4LJiD1fiH zF9f~-1;A(EDew`(y7vd3AB=$tfB~Qe1VFcxFS^_}!*dg$B2X6)Wt-um3SbFX06)HD z`~pK*i2^j;OU5HU@yCDurPS$N>I>BbOWluo#V>d4y{q?LXBD`m|I+>K1Utv5LhQ$t z$5b>fW$3iH$ac$BN;f-O7h5foMUYRRM^Nw(m0^Bn??bnasg#lSwsy9Tm{JB}4Fj1z z-BT2WFFu`ujJ>V1tvwQp-{>KpzCqp-LeFNI>}#G18An?uTRS9U$QVQB*!daGzx8P{ zLX@$yb%xOJ;4t4n7?rBjZ0*Uao1lrZ zhRV`VsSK}ud{CWL!$g(#ju5n$DSuBWec^Yb770wTgY9+-@6ld-FsktH+;G4i_XC$j zjrO+o4vK)l5Z?fQjJ5rHJqEV)&WGuCwhnB?PwLTt_7f5;tbR=Bq8UF(oPVR|tz198 zd#L3t(LyvDGqW4T9TPgLEZpe&F=41}_ra1&-^LCVP7k)*DLPK%jB}S-IlK2tW!vS- zJ=_gc)=@Ao-OU3Lg7&ZWpj}?jwabHSLj-fpX*{oh=iYHUdi&-qLQC8n&=Pb)jt4dI z793O$Jt)u{oUg%&^+B`{+pA^kMO0O6GrAw)?LltA;4I-u*Sv*jt%;}9eJ;-%&mUA` zY6a+Yv~_SmG}MtP@8hrj`s7^7I}@fjvb$Y8DSn7xuIlGW(W6nDA2_R{wq{>k4O^xx zxFB)5xY*kLLJ8b=7$I*I@ z%bv6-R4~^ny%22B5$z+?=rO+6aTDcB$gn>ufntPmZ^g{K;43HXAj^KSw^P*dBC9d5 zw1pR~LoKS_UUXnAN_Q{H{UFOF}n<#BK-09|g&JvZ|cTBqh3Wk!c6SA8ZX?)Q;kG-g?FLb`~qT#-%y#NyCs6Dq%Sk&nj zuN_4@*?kHVZ@L5tjg`05kGXFL&T2YaYpl1=!JDjxgX0gG!*kG!iONT(6QP5LJrY7s zZ}J}wix-1KgIbhEDY4e;KK7Bu5SzRk6iy7SvJTI`x8HmtC{DIKAQeZv>A(nBddZuz zh6_VgU4~FIzW-$?mG?tgVJNwe6ilg9zF1?K-6L4NmBv^o6_wAB=H^r zlpg_g-ni$Ouc4~ZIAM#hTU5u1P!R%-KzM#)05ypOXFNE0l(-vDt-T>JY_dvek0k7X z$Xy&jfsv?i9m-LHxo!7Ai8HAE&iwk<8mwhbQKe!XbeKR#zg16}gU@=0>g|v-w00vn#OywJ;LH-sF4Pva zV+KKnk+r)e_uf3dQEUx;z1PQ(KeD^_0XPhy{qpgLzl>NoCK()dwyvC=-hpBPigw4= z6G2lGdy1Bcvv2vavlbscZdP#{OU*E=Xk)=?2~M9uetSpsxOGt9%H^PN>jaDD&sQxXv`#mB>>5jPkky<3 z2e;wawf)xj9xS^o)`E2k-Up>QOM4X@JmN!p)>qd)7ZVQ-w1hqM5ELGr1{b~T+<)0p zAX1!ISf1y!pJk5mw^H8ss(5WWI2<609x}ww?U+M$O;58>%d%FdOq9)<^5oe_VLgrp zjzcUvkE76Wn1lWVColBc`5k$kK4Wz&6D0>5vbLhnIJyN+8y|3R8y404JAP^_(;*Uv z7963VaPr%K-M>YOVbV$p^bu-lVlb=m(^|*ZH%$$mVF{~vRzZuR#*urN;GmrjInE7B z8oht|Oug4nZZmpzIVe0Ttd}l!PcvAJ^}=jCVf*CyShH1@eg3L>=!&&Zc0tzOL5rbf zzjquJK)dSXIId*@&z2+_LQ=rwVo?O*Fp!^ zM2<#WyE2lR%!AI-kz_joWyMHx`wL~&Nb;Nj*Vl`rP5iuZB%PXoI{%EMLfmUR<2z;U z%t?KR6FT|L7?E{nCQ2CMw-?szjqlZ^3rX1Bz2q!UBQtwbRt4G5}e%P zz@P(0uaa)_!WyZWbtHho;nr@qsQ?o8NU&((p-7rM3E_7J96THvhBxonuFkJDMT_kb#fs~Zbcs3cf`gM& zNUlq*nKN4Abwjrt{Srx5li`hRQRF{aNamifn}#J@wHRRp-^9hhB-u2wOA#8<^BAGW zHwv6Ka0-l#nLr6sumY6D)fo4Yk<>g+Xh6SDmH45yK?z6B>edtKB_fA%rwW~AiD*@k zP!Q{sG@Gl+O-h>ntpZbOH(lr^%b2TlRSZrMTq!U{FfHML@K7XABBNx%R1K%HJ#*0l zX!Dza5)xBpU|}XbZ$PIP3Y}^3VnJQBen^ z`4gVzw6^f$_R21))B3^FJRKsP=#0(&3t~UCwsF$ZooX*b@)S3ir=JMooO%q*VZm#J=eI)9-3Rw!Kotr5+S(PB1 zll|L(z9xL@I#cqUB}IfTJS@mRPU!xvFW}*zMA@@)0GU(u*+mqHmLhu0=;dERlcFww zO1Yy=)sm-L&k-KUog_M37ZN-|>E2vn*bm~S1nmYiDM_$brAN|^B;@VmIL+grA9{M| zjS)4hXNyh83?M~?Y@-2oN+O+It=<_=%8R z1D7H~V9rG_QuX<8-@WM+KOat#B2>(K>`opo;A@pE`A^qa$mt_><^rLI#x_=3q{pwB zS7Xt%H8aHM65o8JfHqa5bwx(&L{-H&+LnS0@Kak|iw4wOL>do?681b!C7t4v9=dcd z_qXV8F+qsoaB-1PMI{ewm6THON)u1b79&L)%jw^IQjhQmgXA7CT7Kn9iKn2&h&$O` z+=mu)WwGF*tBvX`5iB)fGo`#cs6+R1LsuG)g#zBq!tkCtlLjt9?8vRB*d;=YCO$z* znA<*9y`B)gC_}6diNRShKY{9}!lM1)sEf#`V^yh9weVgybsUb0hY92k4%Np53QHAi zR0gwXMJjrs+bpWPRIpNc&!YBAasO-8hB?C|QF?ja{`J}G(w3)c3rg%~ zaVk>Sfx_>b-IDY3n=iRosHb#Eq|PimQs&rNTNq^bD5j#GBTlBcyd2-9ywTK)ddk{F zN`b8AEI6t{`-2v>OTKH_>WH4>X(C-4m2HBzDr~-~EVO^B>!!x7x9Fsud%>u)`i}v*kP8aAYi)WKx8XU81J}qG# z>*q^-S1)r}(8Up32Ex_yi^HB|X^y;gYyZNv4tre%EGZqth1a=c%4gX;a8yDq-J4yg zr>Rn5rJmyhD7@%3|Mbbe`zh@ZJ*DgdYO)-jslGsZ`LX*Wd2qUcp-Ru;w15J^p=}Qi z)^SLY-=@xERdYRu8z{!0`0i}7%E@5l13e{h0qtYi$+GsPt0RZ!m#lqB&q1KD&1+lx z4hs6+Iz>-ezktfFfMrMIdTosBIx1uDrs_GaEuiM$(B{h=o|Otp8CElSujf#uNR~ZJ z3VEIS`}^H`ib)E^v#d2ZxK*=%dscJ!U&+(-9KXr32k*~XRne@?-+D@?6v|=CJit*A zvL!1ttCyjib4Je*l0rr+!7*u>)SHGaTi-Bj`rJhtICgkpNLWVEs|6D}uu>3IDa*(W z?AonhXJ?d6{Ih3;$=BMbzP?iW6~$U7W=ANsU4>z-E3H;i$|l^$Cr~(^Yl2ovv!r6^z9Y{Q zM&*b)#eImWD`^urwDZ7Wgr+X!Rxa&d47M|98AYHFmteMu@;Mz_bVykGyLhkS`akqRnn5w^25mIt2@-- zv@a`=yk)L3Sw+dKg;uIMt0;H1&{!pzU8$T`Tp-{!!L9o<+6i%_Y$(4hlK-B8!5+&s|;@t8v5s3h3QD$P~@%gB&>S>?K!+&7@AUYn)f4v3un zXZA{)v#1JgE7GSrxBX)tHDXS2ORz|HV$hpwrfiXhWA^Ks-I35pj&e>U5csshdo{-^ohsm z7&axu_qIFfRCP1Vdj}4FfAjz7`k~YC?v+Ijaec4MpzfQoS!+lE>lc2O=$O4x>4o9?1@+tAJ_yChG@ zL`~vu$#)jU#(q6(8eTyia+ELKO(xr+ZtZUJ+>RvmZa1ykE;QCO*(-JZqBV(6E16!3 zfdp1{ylQHC$Z&~T zr7QrQ-*>9*qv#!Ic@uDOc-fbJ_0*=diJ(%}#S@+(kqEZ!!25ME34rGlcrSWAY0}Yr zaB9=>a%+Ep7Nw&dZ4S~iF3%mL#u?aXPCX>mQTlP8D!(s@`y{qp94KjrCHs~Q4;?+O za6lDMFseD%Z8=PPGN60UVam!tbi6-8rFTLn`lIho+#@vNcB0O_WAqmHn$Y8t)yt%aBFADxg^Z;tp3BZKBJ z=aLLM!R3N8RInSqI3-KOde7Os=lV&Uudp+b5fuZ^O7Swa#IK7wE!(nE)P+=w4k`yNt)*mtb>=I4chxGL2vJ_oi-W=G}H(`vK zO8Ej$c;uK|p_2Or8+ElSsF6Ho3Z|vSOOSS5q2BuuNK-TMaUkw%p0)WHQO~rTN{Q2R zaS^d5llJXLmmC9!1vugdPQBaffL}||1>#BCZBVL!vas*$tLN)}@{*l{&ML^HvIpQ4 zvn;A}058RpGSZPov`S5lENM|-YP5NG*i3(E&Jc5z+;$2(g#8tH-lM!jXs3M^6&`{& z+GkP6!%*5Qi-sPCr$1*=GM9!|=^RQ;)vJ=npWfT>*Vd-}k%Td&bLc3nuaf2nIJH-) zE0@1trO7C@?I6M9Y|&EV(2aY2D`B!kKd?w|P&l~_a~`~+Vg2WGRZ5((in`pd(z7Fo zvH-cZ#ICE}ta&{8fK(eoTQT-3Ssew(44ETiR`~5dmW;vCzkHITn0=Lc9)-m#z`?tA zmRU(*@8-GSkdhM|Qi)>wRa$x!B*>vPMTF@z*K5*GLo9drH zIWn8vPk?WHHiex)w@%NNUMDXn4C-|tHuwm-Mjm-{vMCP|nhw_`j@ZRvj*go%1N0o% zu9MwKaMZdX%?X=^X}<31b154f@>pzigCbADnvFN;3`*^6w4);0(PjO`G0(4B;6N-= z&PJbdq)2?*t6J{^`y)$32XYB#fYvvu^C^Uv+f6B^%iCYPxATaT8#wsL41Ev`3eQ@j zrdZEEuE^ahQqT$ZiUT((CuUudq#VqM zv|MU(hAqsaz%&19;p#j}JA>Zenb=poW#|J6Ig2b0@4IITrq#6gZtam&;iW#A zdADCD%+WJQ1F~jdbU0y zw{r-@1K{AvuUSO7N_T_OVXQbwJL@>iH4h$39DfhUYTtHWh$%SaaBu&Fa#_cqr&71> zcHQ5~W7KXNQHNMRPIYb1!@WLF$rGif{S&Dp4jTXVZg9s$Uf0OeU*J>PbRHoiPt%ll z9;s^Ob8@%Q=+;5SiB{6xYvpQ4$g{R_zi-wq>88zxSV1_xH=Jly4X z2+F%4RMngT8)wzuW-QHdsQ&SwY$-YqD;&*5bWJf9I!aqhC0|k3i|Cp;uPEap0-@P! zsV!q4Et^xV@QtHv6m-3MO;(qnOZi3$+5CGO^9Q$^G8!DR=gPgIo|mAb>RYLQO52)5 zwQbiUoZl$KvkJwpZ)wjZ)VBOB)w+xZTD_Avlh@tJE{q$(34?nQwPn4dAaJm=R$PY8 z0q-SO1RRRpw0^2nWrq}0f6~Y+LJ8`B1?itt-xYkNVn2}TXToCswH2JSZ5rJ$ z*jD26V90X9KrNDNvZ~G}D#(OgOFol#7HZx2IelK1&{e5f_(gh+*gno~aED{%UO|+z z5qcow3klaCa`_ASq5P$kLP?p|gbo^w0>4NawH=D;d?(=cZca};QXKL9r?fP-g7mLo z;<>(D1?A*mvkBY8<{;1Di$v%dl`VAmr5P^vXAAbfw86!jY+*>1zaYfPf8~(pvsz_; z;!Sr118YtJ%joOi{i0OT>gz(3PTGwAxB-vIuUGi{9m>~2rILImh2Lo->Nn#}zI8Vc zq;mNS8bEh$V5ePPYLRh{V64jdh3e#BYw(~zZFA5e;_uRiF>A`)ceu8=Wtiw0ad_a9 zi%mJ`+-yN9u4>O5&P%;_dfg3h$VZ=#1uD!zjUNRvzlo0eT}|h1!tM9fU#LlQ z3mx}atrYX-+St@76U+VWE!P1rl`^2N;80aDppH*a)-|BXx3Fx$_a*+fG0*7e%2Jed z8@=?Z6gA5QpQ1Ero}x4=P1>hIJ^UqBp+s>pFQ;eMGIW5&T9u*extO-v9F*dQP@=_!<=-@RaB(Sj%*jd7$B^da zVZrjhwiw!BaRF;D#$ukN!BNEG_ID+V=W(g2KneU2RN-C0QoG$4zcC%gJqxKD<{DeO z6O5A+#X(Rw`b&MCwb(u6juI4B>|m$BmqeZK!4;>BY1lnP1QKTo-{ak6$3668=ZaMI zK4NPSet?>FHy*ksc(%3MaA?5WDVwH?Azu@k0?wiuuJFC2!CVD;jR&}g=H3rrdf(fW z#`h*>bdAfu%&6=`)SP5SbsmDg)0{#cB357fkj*2kp}$&4gK?wp+RQT*_hBU{wgFR2 zc}rUN2olvS>62LQqxiRQiu+@XL{u047;3RElk^x&I&%^7g?O(0w~$wgbK0mCHx;4Q z7Sb1^h#=&C{U8|n1UjV`xO#N^iBMhhs+45s%#u|fr3Np*3Ok+QDW15$8w(Cv-EY+Y zbC&n+alwrQaLQ@Y6`{*vW|z==)a8ePJP+|g8C7FoEaLHmaLafqBy0XH+{phK8h8M@ zI6W>;TJ%h9<=#THM+~ZB!mZc=@@FZza*UH&&(T%Vr))UB?0}?|U`27yVG+KBTfxNd zzq1oR(u{|B@{%FafLA^pPI=s7$<;k9CmOSccF3|KKvPl;?uw(lWJ;HU%+>&K_hPO8{xhMr?R zD3+j%?5r(2F}M>BU38WmZ%E;g)!YFGr{&YVlg?hcGI6l{Efe}iYPwCWoL`jULE%pl z1zL$c)Izr7pL?i1ou$;_fs)fvn*(k%-n3p2e8&rmM)>C5nL7TB!LQo|4?RxNe+xsk zd5xq$BI1~F^v;7t=~zhEAph}yZ)8fHgR?!<>GiN$(^2AFNb>=0`sX>TxRLCNZe2-T z!J+-m#IhY+rM~nX@0ND(zG12g*+cv|Tj2=``*OTtti~sA zKpt-wh$}>ezbifafXJ4Pjqnw$cw|(ms7TQL90fs+HizSV904$e>DR`A2seHjo~M3~L~MkZl{{6V`m}NI&m@kqxE7(<69@)~UhpU~gZ4 zA5V{v1|b0rM*I7E1$Yk`?e9G#DA+3?Xh_mKbu*9pX z!Vlhngn#K^p1O8L>1Si`M7n5zj7+1-8`LO|pQgc6{-GK?!wi>RgWnfj_5)7)WdlM1|Y!qy<#8ypzq>mPz25tyT{W-48y zxlkw*8Vsj^E9yFxzxNC1l7nEI3q{{h*GNwop!O(7Q*Wp%r+4vHFYrxol&L;hjjVrH rS52?`PQ9yT>1%TT81XLw#D6yJ3|M$Q5Hk;YQ$RVMeL#S~Idzehl#xP}+Q$mAb#%VA!q0*3q(Nxoli;9wj zLPZ=bJh@J#DDsr{b5!49gmmo0B;@d-E+`{2FXo_ypo* zGqE9T3bq<{XhwSagv>18<3oJDn)nZ5m2PNucHG2_w46*zS3QTZ$~Pn{YxIz`>=Nl) zZ+41e8s|+*_xZ}e*WK*%#bQ@tm2O2J!XT%5l zIFzZC#(Diz2CL)uW0k(Q-)qkB(P=qKTXnn_-`8ul)3;2syonU1CZ&zd8KyMHCwu;f z*h}F{Z}a)8U?&WX)7bhZFy=XNnb~g8e?O!*sVF@>Edf zn6we;KhRDc_(eA3p1Zw_zgd15tA>7R?QUyp-s5%bc6>GL1*|$gYNj{n)A7|&!?D$| zJ+K;pw%Ch&J$zH^6Q~RrnVmJZ`Izy(v$MP&uLM^?1+%>#DS@woYBTy3vDrD3M`vdH ze3SCMbffZ}2g}yad*D8=o*Zm7lJ&KIBDN}iEo=?!?*%?zUF@gWTG&^y;*VnMV5eZU zcn4YA&c@faeh{njzq!}vy9~P@8|(AsO&yn&ox@7^Eqcfsfrqdf@UdxQvoe{B>Dfba zGe-~er5o>fdSP`uiP(#=S6N=g`av82<2*0@=UAoNiM<57&iae6m3?`>sk3ZEE>;Dn z*#y^P)qx4tuZO(^zXDe2&d&8Z`Y2Wve1KK_X6vuQD*rcgyaU^dmH*NrFWoe3nLNPU z$GnJath%xvR$cU@O_+nFtELuUD`TIdpa$4HY-y}!LQB@{i1e}9lgAGA1($f`{rrSC zB}a_T8j?0T+jrNKK3{#(r(-p>lCgQJ=&(&V39Bh|&r{x99Ga0fdK3l6Ec2#VMp||p zo5+av^}HjFY%q0}dkq|q)zCGh;M&;Pl&6hsWOjNsd$%ulg*T^~JnbD|m8a={9nl~n zWSc$Xb?H7bmc@SwTNB%5rI(=*R+DG}Touf0>@C8{X``9jIlg79yhXKWwb#W|n>IRa zSb8D8dTLp2^-20MFIo;Qmpl2Kzbl!7Z^%VV|b^66rA*{duf@#@ce1uVl?gHLi) z8iMTHp%c=w(|tF;;Kh&2)NYgQ8^gA#Pvdo7x}1#kv6;gs`!2VB_wr;=murla@X;5Y z4i)ORabENetjz|m2jj3hfN~o>{$KTcxsA>f73z1W^Rm}}kz~^;P6KggbFUfkuXuK1 zM&{5AYR$|}pTtSjw;8V8@4-z@OvM^`t6ufGe+gFOIu~0O%RI=MIM(MoyxD8j5A=$9 zgJtXv^yh26bZ=v|Qf6VbS8m0soDa5m`Ch!BE1I6?NL_Q)UWAU&D8g?nz>uC zN-&oRSOGisZKvqs#(Dd9dG&V&E4l?%(<=C`*MZ01@kZzweC<&aC*`>Hb=mFB>jbPi z`nJq*akS5u=58dkKN~{I2{*t|3LvpR1pnZj1#CJCZ zRV2Zf%(0nc*b6X<@f{LXtC zEU*rLDZXsvV)PMwbw|z^&8NS|>vlUaGO-niKlF`P&MlTt{MPH%46G(fW_H$)k?BKo zQi+#8>pQpHys2FQsvy#U?k{-95p&Et+oeyKkToGLeNva>UIlGPuj9(i)m4Vi_dZ9&d4qkX6*SN{U6_{}!o@N62HGa>EqwqE%fmk{gqSSBti$4Hf&6sR_{e*)JP0VACi?jc4)fK7Bu!3r+L-JfnQEL zL#j5;J9649{=41Yux5`NovE3hlQAJHcSMHovfsSwR?-HQHe_^e`jE^C!+dZ}%R|3< zc0;l^%W_$UtDppIQ zBUXDs7^^15VKtQ9N;=vGlA}W{h z+C7Z~YX4*SYQVV_M=CnE;H=^4oTGfcVP*a9mNhIrJ4apEo-Wk^HM6$1wYnCY&^(WI zvC$jT+0DH%n^(^3xHeaNYh%LjVebB20j`c)Q{JW=Ivw%Zi#&dA z`p6CUB{ZZo^-sO|VlUz3wA|Y?sGnJX57wP*>0{l+AK9$O<$Bwb{}Qj~hGpfXkG<9B z8=5sXJ8N{BFGRuWQ8vsGtU=$1%C`Jw@$Mq;gI}KbuGq%v;-RB6b2CX8`EJAgBB7R!7uva&|$HhM(nh;&xFbGUlLy#G5lTL1BVR^HVA>5bOV%(%?4>=r|t zG2Qq~7~0ykXX|^@ltCOmBx{l{n$FQO$jKg^sf+Go?|EC-d)Vr_BV12FTgM~VYS>BG z%dzKopdslJlD$jn7y@dETu&Qqbwt8Cv z4_Gh>tBa5{tky#}>$k*e1G*BcbyE|oby9StQ&h8l%nQxE^}V9G^J~q-Aqg$LN!iHS z53Q|BQ+0FmVk>VOdm5`!9IR?{Z&0z3`^dWJ6W^KJ=;vhfn}qMg)^;m}UJc)Su$YNv3>&lakD zL1%Z@aHuU_C%k|utmco8j(E=Q>~Ls1UK`?~oZWrG!9Vf(ItBM71o{kdk_!_Coklb?->qt$7_X0(?*0t1$gX)bVd2Q!-3b*o&1LqL%#w$ z0?Rn59m0XO!=0G9iJ|E7^=Ug6MvcnNqh zZtXkq!gvfz=Wxi!j^7T?=X&k%)b&AA7)6KT*>?7F9ra4UE#z0CuEp~^_eKH6xv(`H3O_s7S-;mcT8eoa6m|@hi>)xTDU1S$9Ldeg-6HD4+nm_)yaQ6 zG1PWqq%f9Na28&&6FcylXadbh%b|@9hyLXJ;T@>T>2{mXcQbL`8ry`|FX9C%-R>km zkr?c8yU*9!Nok)DoI$9K6Wcx^w3(2WM!?y9Z#eWDUT3$0)Is4uyE~lxC5eFXgI zyhLYLc0%ZyyS&3>u`{-}-sR-4NDMs#)S*N|&UH>or-WdwyJ?QI>z0I2KSJuhkWO0{N?HJ56p0iRwjm;-s4RQrXj25Has1kJ1K%MDbCq7KOuPWOjeAO zGBhD@^GqjZbz<-lz>R8M;QN_Q{%U66tVqtn0pZXTJa2Vt*xtmmE0bX>J=+^#)+uZ8 zdOS6e&3$k<@WgB<=DEbs5ulpEG8h%_&v)`+v-7=G;O^~#P5DmDn#8~#`A+hh#87yS zx1bqVimY(&U+ZjqIFyFx?O|Fk%kOoPpHB>~y3gku;G{gC5RAUx=j-o=(g=xN6Gh+# z7hUv#&o|HwjV6@ohV~Fjb3?5PBe4#lbeEkbG}sOGeJ~Pwk&stPMJK{05bE#5uDvFT zKwl?i-8Incs=MY)+(p!A0KmyUQG<%^tihh&kZu2Oq46SUIYnF9n}8jhU0V}EdkJ}!mwzrCskT1W8#~3$6;9E1PJ&N6F>i3h zPkT$7J%wp^_-Q8}*yR}p)2&qX-204El$RJNdd5k9GclO9Qav8K^_nOGH#sSgaa49;N-uZ7#R10Q$*aB7rX|u;|vW4 zf5E%TDacF+wO;4tq4&9}osMS@i|f&iigPb*ej#MH$8q66>WfbPJBfj1FFHlwDUWw$-Dcxo zD}=La4xXlmd!7nCkEhMXyW0B+&)aMY*}|K?^j9gICPv~Zj{O<$DeIMVFQC4_)85TB zhw{Db)xasOe>gN2PxIDi3afLeY2!GXb6t&-L{tf<@MwJWE0K1o{At!>+$iw~o}HWB z;-fc399=B*z_Y`xO9wn1j<+lh;(3dt{K@#}SG}X*xVwY{gI;xt_9TYh1`a2^ySoOP zZ`QUD+bbb7mQZIn-Ewz0v=*<0>v7o@`U%h5pEW64z2+o;lo&FvMK)$N>McASGZ%(b zyoId_hiOCZfp~53yjgGX)Tx}u8O{TEe-B2TtzH{0ayxtmo>F?}fh~AV@d9SIIu%dL z-@V2OwcqA-q}K;|cxsC0ZNu~G9wZy_yKj+wLTPnizT$s8v93Gk&Gs@CxIaiGjw^1Y`KO(*%Y#L)Z1sRF;dWTJOO#;Y(n9O{p!>FAw7 zm*aKuQl^E2$MLRpQj!uvaXYBh;2UID!uG{&$?1Ly4iT<)u-|>1n1zehc7#V7oNa0 zg7tfx{KJW%pMjdww3)fq=3idj-mKh@r{iHPxh%NsBRuyazds?ZJC0>ve6%nQ%kE>> zQEA?qsI%8=WVCy3n`S+CvkY#*yUL0EEFpB7&~@Z-H{W2FeVR$)cM{T}a0A4eSc&KL ztxhlB;3u}js{N_Y*M!(Ij;`WAz*F~n`$fh5#dnXscz?$kybh#f7I6W7+{Uq*t_uene@6Ga zXZZ|5T2~zVnfPcN%_PQwUEw&M4uxHkalPjANL6%sVCv^i(f5g==mTCI(VCUPWV}uq zhroRYoct4s!JU9aC$?uosMNv8BGSb~Upy^0@3w0-o|cq1G(Y2Y#q+kZlrOwE@BB9x zPn!`pCUo_?cz;ijieGwLlh@j=c)dvJ(~Jr}k4MA0CIo*buU$Br+8{Pkg-Xaw5)|0wBKKh7vh~Byl<7wT}87!zw>(PmH z@J2jUOVdv}B7zKP6e?L33< z>=1APKmQvi=GVl~381R;z7c5rt(Sy#$Ksrh*G|O-w-MrgBqbpj`p#WSRMno4Mw_ZW z4Tm1Y^EP>|mqPDY&pV_mj(JyIEbY6)!6A6upHS9HLPOkUmp$(F6+O+eOvUqhTGRi2 zypGi4e)Sgo6tAhXD>orj=6kP3zW=1#lJHuPiY3D~IRj6J>Mo$*X2rQzia**sCC%63PB=x`iJ=bx?a9V@Iw>5ibCN554GJp81?Obdx`@!Ukegtjn=G=XKI8^&bZ=TX~oF4n)*;&RZYYCqEkvMkO&++W& z@Qp+DpE&&xSK2+sK6u(Py!AB?&(1ck@!rAfLLB4BmF>l+yuP7Kyk2-}OKJCXbU&W* zd0(r3h^I2W8GrH5wq;&#;f0;pw-Z7Igw(t4^h z+jn<{1Hb)Zib~Qiai_g+-@KLFAJ1JyfwiYiektN(&Uh2pyWHr9ca>YBM*MEP7Ot22 zR5-K&Pp6NP?nTQ~OH=ukQGFO9V(iiLK;s>T@&y3nf$UeXo%169*;X6f{)@+Ro8^jXM{9q z+%t2iqaTjP!LUa?i+7XfF=bN&{zy+f$9KbcnzD2Pt7aD-4W_(5326}6Xt@T?!(eir?_GSiCEX z)BJ3>5+ zRu?l1@J8WXhvzMnjd+?N-kJSJyrhU1OyF5Ex6gcyxrmU;^HTi_Pshwn9~)LT9!;w} zFV5hp9quVAxC@UjEGZ!v<9CPCJ#Pd%<9U3F<=)w46W-sg`2|no$YNy_TJxm4N2H$M zBY0d(Q1Tu^O2uZ{J>Fl&Bv)mkq?PeH)N9mgJa2k%0t)_w$1w9{L;YB<>0aeIc$#tE zZPyEUYKpf99LG~7UWYd2A-UEs)244YkXzQ|Uq+pqq3RY&V^=*{)+AT+UmMM1Z1t$O z;Y|D6V`lEeQ!(6-Gc&94;9C`j+$}2_PmS?zte?iSi*-P}qOK(>MyKFlYo4ETR$;Mc z64GS$t`1(do>#yxczuYY`#A-tRPYMtvX;qLfTv?~uYCgVRxtTB{fQ-FD*BzbvfGA| zD|)56IfM7$^>MrXQ$ot+P3IasY}W+l*iXm%FE%l?>5wOYSG$nAfxtHxn|x^S%1RnJ zZL{fwyrr*sG#5|J4?2Ze@zFNQ8^5wVs#jg~_vUb@Ctgdua*lS-8F-p-?(P%Xji)m% z<1~b`2ao9008(;=pIzC+)TLQ3KvgY^jYnW#;dy&KJ9Dsc6-{~F2<8y-Cb<^&Q+UJ4 z!v*V!_~@#Um8|Q}cJkcmltV~am_V%Xr|`6P-Bl9&2G5=GfjXC(qWUx=^-^z-@$RMb z@HF1;O4(P!6xR}qs+;^q zOpULq`?9n9C{B=WwE`d@!e+4h^HCf0#5_M ziI4sohNpbqgnblGCmSX?U(D>OX=38E^VRbEI-A%ye?qVq!LD9#0l{u&R}=p=QGdr@ zQCrYWI*y=Cx}Kmd;P<~1bgtv~rFdx{BxtjJLhyQzHoE+;1QQ9`!d@k4)0V3HSGhe1 zS~{0tiq|kgHdn)XeqVR5*c^g3=_Z0HCMAwzsQ(wdfuL>ee1fgqh4&7jCT@@F9y|65 zZx*rexUfjT({AJK?bGqJG&md4+Z*vTN$|L{{RL0sLcS5cwA_4A1*M z?D}SrJ&Q+5!AJ18S09%UD#yRh&|c0G$sTHHVy-1`!&ZLZNK$*Jx7YDHxn3$IMTfoJ$GhlG!qdruGb)#Ti}1W-(X)!Z zHYG>4Cmg5}Z;IMc$BpqOraj{{k1Mr!6WiXO5O^ov?$zw@tS^J_8JB-hdxQ2Hv z)6xCNmOxA#_j5k0eC$`g0HWb;{?1r^WR;((?|x+Ech|q!QAS40=QC|P_$zZI*|~$i zW}ceU3zb6sk^F&3#Sccxbqmtx7WeP}%&NQ*Nac+}DrYp(r`M`Ps)53QgfJjnh=?PZQ*W9TExrkF6Rq-BxgqEm&5;nfl8t?ZS#zvV2_G zdxg)jLIsw~D&759p$9B4ZWaHaP4|$EmsN1C^^4h?+=zKLfw2i>6`aps>cB^_DtM9L ze`x(?X;+R<$sf1bWYynGtS_tJll-M>mRtT$R!LUabh0XMrS)YMTxET%TfYm`{%37O zajS~g!WF;H+Vxl^-C+HftHLjwM*l1bpjHi_h zd&Y8Eb?jNJcAp@b6jf50&#^l05L~v5jhB^wk@d0OND8RnidYqRv9*4| z<7HLwWmuu=mY>h6{2Ii|)(m*{6YxMSY)2x7V%5_bmSxi9LSPzyX===rW33u+FTM&auv}LDebzspRn!AE{(M&D&Lv(+ z@(h2g!1*@Af5$4{0-H}({=?QUZdKu9aP88|ZM-ah%tbx?^}4Bxp0yGGWL3d)q?27^ z?ejLDY`_%sU{04NdW*&9v$qgmTXmje)xJh>6@R6TmsPN_^<@>`#M-8o%X0VO8&bl~ zX##G-p*Df6rpgHG%c^5WTVGaFBHP;Au$mE5t-S}UkF4^|!K%Rz+4zUCT9i+eU`{K+ zGMjKERt2xM{sycvZow+UPOK_=*ZLn})zD9{D(8T;hphbutMn(Vf6CflvAXUl>0a+D zKq)GqG*%hPV^#5`ST(3NR$W;KtA;ePyct&cT4D8(Rrl^0A^ zz*N7UD@>I$2v_-r*>tkz`TP~~mPP;zi?*%-o2K;6d%F6!(tJxbxzp0|2wI#9Y%F@;^BZrTy ziYSLw{)(2L*J}QqFG4M>WHZXjuWW5q%Via;!C#kPTVS<369mt(YDgkn2XL*m?Xd3r zS3nUHs|s(#DqU}D`(X8vReb*-U-l}&O*TSS z5d*D%KC6Zew(+t`KLpDi^9}50c)npay0}%c5tbLXsy;*6v6(hrR@IEMzO4K)SdHv> z%Vov0t&h!f6HElCz)3dZpR5YF&8Cx8zT2(6!`dk}{(M&H@3irE204BKZR=up+Xz|l z8CX>~)B3Y)ysUV><#R0mC#y4yW79um?c8bvRFJU+6t@9Wy|=yfd&EY|D)~a|%c`A= zuo|(&mKV2*e*&(Nc*@4hD*X!U%htz#*ZQ(m@DFQ*RN+w@A*+IpSzlIb=qKy{lT|}b z*>tih_ZRCIw`Ji$tqa%M5h@8#0i|q&to&%K8dS#mvDPn-)#snA@>d|8dZH3m4XSMO z$tu2z`xS!%HbPb><;!h?x|SEWDyY8Y|72Cc6*ir$da|+gBQ{R~C2V3N{>ds`Q=9Ie ztQy$drju2^tF139zlG@LaaJ^o{g7PIYu3# zzz6@87 zY{JTa)u!KS)644eZ8uiwKCryF)usKXaOK~x?L&djY(iQ2pJUaqFR&`;D;qB>|7+`? z&no|6o9-x9{x>$gtPb$F<$2%Rh~icWezaUxE&kp5vMTVb^=0M%Vf}xyYPdVj){^fM zpI-ydM^*&|WX+hH{a3hiGZt4JQO+j&Ppq19iOpBl+G;lc`K;zkdj(y{6Ow{714se=9o<>=du z>W`68WATJ7pt$v$l)+pAsz;x+(X#5%6q)UX<$Of)>9uA&4nk!O}&n|@MM@};XWbO zmU-dHu*0;t@MM^&aK0zPnkwQt>;C`$WH>}iyeGr!f1T#vHpHCz*5BPkAM(eVl<)lA z{k6@4@BDqu27$8zmz!?K02yBcRviP>GiL;<90m+L4!FXsI1bn$5c@r#q3QoUVA2u5 zCV?wW=mem_QNWlJfH<>JV6Q;+lYk~B^CV!_H-KFN%}muF0P)`fru+c7+UyiKA`tf@ zprx7oBVhh_fc*kt)9@!i(lJ26Pk`2DpTKE>wx<9IX7(w-^5cMG0&Pv}p8@^82Q2&< zaIHBi5PbrW@(ZB7S?~*BgTPsVj;7mbK*mYHs?&hZ=8Qm<9{>Z-0J@qLX8=0{=6(a{ zX8gYbCjAIV{}s^PtPyDN6QI&>fE1JV8(^=%c7f|n`QHJvP62X%2c()U0`Wfs>YWAL zV8)*X91++f(A(7h12F#=z>Gftea&uxq|<dLEL`765ZL4KiCxd0nxt!P6?!$_5r{Kfu#Y!P;*is<99&s5`c8GxCEfeSwJuf zFv6rp0d@$i6Ua3FAYjrTfb<|>lvyLt;7>rMl7KNLtt4Qtz;=NwQ@&K7@7!4d`l1y6 zc(VoH#QOpDq5;`vd^F&Qz#f5IQ#%Bh9{|h<0VbN=0!bwREn@(a&9oT6X@Nrmx0`0A z0n4KRb4vrJn1cfSf`HCt0C}dc3?RBB;FQ2L(>@lkL11YtV7fUekWmWIyDVUaSzH!S zB^nUC2r$#6UIf@7uufpM@s|Tk3IWo~0p^%B0u5pSmC6GOOj>!sUV-fb_nYz+0JBO1 zaw`A|%@%?9GJtv&0gf485pYCckHB10`(nWSSip>n0mkeWNGc0xSqZSfOsfPqEpSNS z5!37v!19X#b1wlbGzSIxl>>CH40y~GRt7|u2b>aEY}!`=Y!Fyl1+c`N6v(Im=v@`C z)GV$Fs8SISycDp^q+SZxA+Szhh4Ehom~=59{W8EaW{p6DN`Ok$0IN(|HNakh?E=r5 z^3?&eE&=3L2dpt$1mY_L>eT?OHREdp`ublmJ7w0H+BGpRn#nTj&2E_ureQ72Ml((3 zC9_ZFWz(!S<`pwrW|KK6^QviG2ea7}%DiTd%4{+1FQ>{4)u?jm|22RBed)z>Jsq(Cn1iV`?|T{L4(1`N-^+*=rhJiP>kS$$V_~$$Vm(U4{A7 z%$C`24$6FHTE}5NH-$0>%u$(xrhQ|~7iNLXm*%9*SEgGN%ptQ_=4*3C=CDa^iaBCd z$Q(6(_8R{;roYU$W(_9ro!`VX5A+Ef^P3^E$NlCd+3)?P!qwOlelt?`q~C0n{lRZ8 zZGrvKZzf>PkvKYYPm4hJz$w3}(-JbjF=R$d$S;2LzDQCNNXu4`Gk$YdE68b)Ln6QV zP4h5hc~i*TFyyS?d?C`W8KiSOuHP!-4TLVg%lL8r61A4as z1kK_$8tWE-U;?0&NlgIk5LhP=GX84-lUf4OuK|=cYXlm!0#s@Xh&5?#0ec0u3tVK% zCjw@L0lA5Q@@9)bd_17uwSbCd{I!510(%51ncD3D^IHREv;$N&y9JWk09v*OR5jDu z15OJZ61dDX>i}4u0GQhWP~999=ywgEb4Nf;Q`iv@-4<|4ptfn>39vz6X(zzt=A=MI zBA|C?Ks~d#GoZ?~fM6HE6(+R{V28jufriH46)>qCAiXQ#O0!0wL3==@BtV==O9Jc_ z*e=k-lysFw`5+Kf*I91++f(9+cI4w&BwFrzylY<3GIbq2KT z0cdTe^#Gg}I3$o@nxz1icLB^z0kky-1^RUbbiNL7ttq??5S;`#CD7iqzaFqbVCnUM zj^?C5MmIq3o`B9~aZf;%WI!+#(AA`-0(Jj5+R0Q#EU z0!cjqE&Brcn`wOkrv(lP3^2|50hXr%=Jo>&GzSIx^#XM64;X9;`vanH0Gtv?Gwp8z zY!Fy_6JV%0DUfj^p!Wbkx>-B`P^C8@cr##xNxd1cLtvdirtuF1OzHzj9|#y_)(AA{ z3#c>*Fvg?}0_+voE|6u)4+hNY2gn@^7;m-+#P<)BHT7=^B|}g0*3@{H_e6umfsARI}|X*92Dp`5YTxTAkP#I z14Iu3oD!I3+NT3H2rNwpOgAS5G6n;B4+qRJi-!ZM+ydD3wZD5{X24u}*x$$O5Selq zGCN?3L?)#{;*LP(1k7zmAPt5<_KOq*OrxWay&?rix!}9soKeBEh5`nTpr%5zVgw+5 z7$7zS;F$gyfFlB%1m>DhCSZO#U`!^!n2iES!vWPt0v4Fek$}?zy96FFRYw7qj{r;= z1z2cy3iQhW#Ek|#W+smYL}vo_3oJGb#{f166pR5ZG5Z8EMgrQ71uQkQ#{#O10vr=q zW?E+fb_guY0<17c1tyIKq>KYRV-}19G#CRoE3nFR8xPnkuxdQuS#w5U)>y#634k?b z#RNcn79chou-5d?1{@LCB(Tneasczk0mkG2)|-t2N#g<4a{(JoW-j2gz%GH8P1Rcg z%O?P)+zQxab_(>%2E=jsb2jE?EMqt)t zz`!Yh-DbrUK>Tfh*gF9qnErPHjtFcL*keL@fcdur#^eD$G8+Yw?f_Ju3fN~drvgq3 z>=O9IRGkJ`J_Rsk8eqTKDbVjuK-^t`&&}k!0MU7X{Q?I~!|8wx0tM3nUz&Xa8B+mm z?*<$)v+o8}nFcr}aM-k-0oWn1a0cM0IVv#eEta_6n?; z3HaWe5twy1VBjpkNwZ=WAbtiQb~fNg(|jwZk1QtF32$`b-lL`PSg@Dp#K_Q^Q zeSot9v8LODfV~2%9t2!u&Iru9A284Xls79JK>P!M*oOcWP5*}gM+7zrR5GEtfcb@h zF>?Wx%|?Nw2LaXR0jiqJd4SUby96#XRSjUd1DIj})y+DW zYMX`&02>4f762|c`vfxP0opzcsApzB45(rN#{{l0tsepG5LoyKprJV`Fljy@#J%g@C;Rs}=&9m@@*i9tI3t1ZZYfECR$o0*HMKaJA|G7~qJ&CV`eF z^f+Mtqku7w1HxvbK+-}$^~HeJCUY_1w7@Qb1XJ}1!16_aDNg{}nw#pvn_~V**`G>!$!a1QtF8=w^-z zOj-g+SqA8C7AylacoJ|{AjNcB4%jQOYB}I~b4Fm+Qoz6!fK;<$1t9(@K=fwt zG$3v@V6d6I8W8;qV81|`Y4|K)gFwNvfT3oeK*mZy+vfo3X7+P{Dysm;1V)(FYXCb0 z7Onwgnxg`fRs&L=2aGZco(D8|7I0QzjOn%(uvcK!T0oXLBQWbZz`z#(GmpMufVET0r#6T0<&HQ4BQMT zG%Gd(;$H#8z6Nki|JMLV1U3oGHK8ql`I`V^wg8OTD3J6jp!!z80+YEFa9UuOz$2#W zHo)@DfGOJm3(Zb}ey;)IUI#p8Cch4d-U8S!u-G)*4%i@2upO|(>=Ve?3TXQVV5yn? z2B69|z%hYkruCcL&@MNHGAqncnWs(r9hhg#0-2TOq|7SQZ6{{6SuFFcIV1C&NiD*x zF)L)AH~zOUYfXQd7t9)&btd#S=0%exv)*i!*!#Um%yu(d<_&XD=1tT3easG1D6`WX z#h6L&a`-79aQJVT1s?z!yazZdu*-D&5U^KZ)rWw0%^87Ny8#3D0Ct-ddnoFCXMhHu0L}^=Gu=K1>=jt`IpBM9Mqt*bfPn`9C(VijfcX7@*n@x{ zP5*;{BLbTMfm0<+%om*6=YIwn^9AIW66Ph5q|XDf>#Khmc=nR@?dp_x+y7wuDAVq2 zpjx18`uag<1DE)NcV_VO;HL0IU<7gN-}@u*U8z8+d(EpQOVkf8xR-DH*AtiLW*i+| zBD!kBx!%u->)%bJXAjBE96ii83RYU_1_Ye<+FfjJxw=GD@XyD2$$$#(s!Ds7E)gB9 zyM+I6_IG&)%E0bh!DC`2`;lZ-m+|sR9tG)Fnj7mk)m6)))jT^jyX%$sjqevrycTT# zBL8_-rM#?G#lG{3`@h(x?!}f8(uYklrNDVEO^aUJ<2_l-z!q} zob9&Rr2}LMj(wZAC#aso>!T`{Xy-Sz%9eP6i1kTTO0+0RkC{7blxQBf<9qW?y%G~t z$rbOrkGQ8+I%&op3)C<@Yf|b{Cn+^@KrxXYiLblF460S)>DWJicHe0^wH!ZFP$kk@ zGqpAuSDdDaW?GFBP0TB`OEe7}Ib)93F3~2q;8)x9FV*yz8Gi4_gXj6z*Qi_KVgDtO zUo|h@XLo$Ro<`0q5p;ieTL<+4c}u^1iTAt5uV3O{fB%WV3bz9zKVjX5cVm`OlipG_ z_6MkaZK|2it|~DkSZs{V)b9g5mFCul-mf9w*KqwGO-dB`19O_0qGn8-WZs?Y4$&z8 zIg_F9`q!>5v0!WIy~n(NBtH7r5+?6-iEHxy_|y9^;J#tR>3P?sa`;4i&4y|&VeZ*9 zDkkzDABW@Uk*f3&Ppb48Y18Q+W$d$T6ioT_k63CcLhrd5YZEHX!KSlWLKLxj-GQUJbI# zvWYgcp1)1fA<9~&mkn-L20oK5)2mbUrY&K;3r)pTK#_kqoCi}JZN<8>Aw(b7?#4+n3`yrEl)3b zqmDdZE#U1IF1H2NhUtHb>$AeLI)t~`fBy6|Oe1kQdc(5SHeFrVE|`Y(S(qxWhxAGZ zeb(4?^$EYH_wwlTyoFZ~e%~IQUOT7E4bUFT*4cCoVINucqGgR>`z%{;*_E(QEYs`j z_@f`Y@O^68%bE=8>Nxbdg|ArF81|)Qn=ESrdqu6{^QvV{2|sGtW|)G_P!ko*XNygz zxBT$}ZTGX)vb?Kt+giBILVj++*UqxnEo%vTNqx>|yJf8iziinXFjX8zuUNLjrqlml zzJjo3yk6R;qV-nZ29Z||7FpN^*u_@-mSqVry^mC%w_*Ho|0DDUm{x(_{-?X?&Se6VMZp%ApS$A0E^&Vf?@_N8NV@0Tb-&Zza3gH%Hu7f>f*>!~DE&JNC>tU@e zI}GEGet6@rS8IG@)1|^H+QxiqSua>6%f8e2t7~pRdZ9>N>~UM*jfC@I`kaI*U2inQ zvL9`MePEMdTBbi))|c=In3k#D@~BSghlaqkOnZC~Wz!8M`~%V_XxT8rM+vvX>Xnw%?Mp}9Y@x-fkzTQ-((4VWfWRm-vn>s>RNJD0&!@i=tSvT86L@pyDUO!KLxO*et? zJ+R&y|5_Gi1BX(I#=o{@IfNsxn$p`oRY@-DOlHlA%PqT=a95Zraj zK5kiK%T#w>cP4-)MH36B67FGHQ<%nS8oJK1<~H43u4h)^f zKWx*@Al%VodA@iH?*VEeF)4hlEt^SLuk+DZx3O#%VZ9GZVc9=e0Eqjpg30)Lwy!1*~#XCs7FcsU)vWE!wqFdFy$(GF}e1m1( zEt>}m&|B)Y9x#GFgB%*IUc14jn@?DEsE6`yv~U5=W%h`B!_-|5qiQhqMqium5yJYP z4*K-7=^iDlm)PqlZ?bG5Vg0K$HR@)YcM;)=Ht#@>UF~}ec(H|pY{JK3YM}}qY}tIh zrk78ZGVAZ;-zfR>boZgVjlD>B7$2ig(5GlW((7A3L@#hCiDbMMwL^Lz%v24NybW2*shsNOvqvQ8UyW>2Bpk z3SW;lAl<3x&g5zI3|fg+q1EVF^c;E~twk@Ob?8O30c}Jtp_g^#`U-(fXft{Z>DFc| z+J?5HH_)4CC(<3xTS#{`?;yP!I0xmTTaoT;CL_JUaS77vr?;YQ=yjyKoHtPnqfiSi z(wXBiq*p~QKvmJD=rU9pO`)(mk!JW*VHS9jBZ1>qdU+PbSJu!v{#`xq_?dPLnBZvR2S7l_0bim0lE@hg&L!#NdK|F zD!LT$dieUjO9)&;rLQyL%MsQ~?2n>v(6>mpgU8K+P*lylF9?2#cA%Z82Vm(hH#UwyBrVD`*pX6>UbZp)F`D+J;_7+tE`< zx1vv=htNEeRyyjkwt5@b6m%!j+nA=JY3MFA9o>y)pnK3vGz-l}`Dl)bE6o_`or&2f zM*>eoy1&)^>`_|#4f+;+hxFF6(P#|HLS12lu(zNaQE!xlx}jv$9rZvd=sI*g>WTE0 z!1odN$9>!z;x}=2pq;1)y@lRJx(R*)EkRGBr_eI=3|fg+q4#LSZuCC-0DXw|m?@>A zE^mK;;6d~S`VxJGzD5U`Z+g}1i%j43Xamw+s_shldWI{Bk3&t&mNHQlE43up3WZTT zYK;=i(K7U&ejuU}CFxdEx0bJ<(P%gtfpiYjTheuI)TM~7{B%270$YgHo!=0Y#0=N{ zQ8JoF3#OyHQ7&pid}E~FNT`Fo9MwhjbURX?0QaT7hDbjRFdyj+6!#jh($5zH~49!%Trx?>ei(X(yfV3__`p|739rGzkZ=x+;{w8c6b77 z(DSG+;wR61^^xvvzasG=^ffw+j-aFH0n@&$rXImtP#PM7hN59;IO>GHrjtt2xusAD z{fY0x#!~cY!e@|P!(9$G6b(W853YJU@jYk+YKPjRj!1X238*b{-x+^DnIAxf=t1P5 zhtOO!4;eHcEkF;WN6@2aA$oy%yaGLgR-)DDS+tfZ`8c*Ojno~m?s#uTKT+v1bR2z; zP6W-MGEpV-ejq%D0Z76QA>U9m45cI88IM32C=>NZHzC~}>uxxea4&K4Ho{(s%AxY8 z0@BUw#Yi`>pOF4Jv9H6VYUJ8@e40L+NNZ%0T)* zeYc=Aq&LJ4KsTTpQ6H3wu0z+O9;iFgJ?Rw2C$0pFLZ{$I&^$V2 zH68aPT8f@RkE6wC5t@${phu8?vPZu%psxvZ7pI>Q8Hx1U75bfvr;%<|^ zi>jd?h~q!Z<+(48y$@$5x)V)7qtO_YiTa|Rs6Fb0x+48I)mBJ1cKWZa%}^;+66se; z^m9cE(MrOr&`iR*j~j<{?=}IYpdPv#{GB6;!ih%vk$!6D71$=U8NG&HN4lT7gCopG zUsBPH*sdH&Gt>lKivEEAiT0!MXcQVvdBadT%0%s8iAXP7Zj;Af*C5^cgis8cMk4)E zRSvop={82c1lF4heN<9~G*!{1NH-8QQ7u#()j{P^8Po(_ff}Ml=qgkf)k77KZVd8t zBcT63-5BXj+104z4(8zxRP-}Cjn1I&(3j{4I*JNNHw}$JEs?&BeiA+w>C51E(7W(q z?COOK!4BfzM(?0^(R*k^6nEPj3DhQ$zOF1w!go2+4d{3HS@ab;guXxr(C26(1uw=v zftH{r(NgpjT85UR73gVmuxwN~Z!K+FhgPBf6n&hwwICcu@u)Rwh`xazLz~efXgazZ z>C3JWXb9?$^!?S%s5eSM0i<^!>4$6d9n(tcScV=(bM+Hc+i1%cv=wbaD@ZhpNPVZY z4}F3@MX#U+NZ+dHYo4Q|EzPk8Q55rjSQ>Z?~XY*Cw5>y%KtDO_*Bq~7q3g;d)4ULp2I1T9=n-2J$Q5Teiu11Or zVs&t=_sb6RKl`HDXaqx>xd#pSfB6} zs5YvD{OBjrokG8&2hc1ug8a$Y0hE>3jeve>Apu(&sik+JNoX?CcLyhE)b~gkB9*BT z%BKePLc0Ag-oOBrJCvuMiR4lKYhl$;byP!poGOeQ-5rGW3mqyb!lEcBh)SYp6hbkm zG*aP8+knEZMDz;w@&P;b`nhLsW)7_hXuPO)WPD__( z&k{El#Zo?3qL;XPUk0EyDu*;gwbqxf5_Py#XVUwIiO5))jR|olr+~4XTV1 zP(0H0tgdTyecQtL8%14aUaJ*#l{rx>DvE!976H-xi?^z^qXy(%MLJy?H$u9^tAj2_ zy5y^ibT-riyaH{ZO-dJGvXQXjlur4ZAf=1sQ@O7Z-`FyR^$@6;c7mn^B88|DMTkW* zH@8d&)CRRi9Z-AJ7DW!a9pOZDE$V`jkUCBsrYiuqL$HI;0HkfNug-R<1bQN!{+7_P z9@y)Vt|$5+T~BCRx)E_*;a&jhs`)0unlcv=9*C8OAWiLCkSdOhPVt6D8sd)X#U#>c zP^UvB*6_9TMyb~2Q3n_$f8D52dH>$%^S4=NZJk+1p^<1E{uu1jNKi3px7|mamp8gVs!OrV3T) z`Qp@AaqW2L=2L_JEf2HguS&Evo}1y{5|cL4ovLJtE$lz4h`f2E*8wSSq#^DT`r=3S zUsZT+!!$&ZLe7mlw+vOP;f)Msv3z-db?f<#ta!nZ_;U-NNx`ZhQs}w8ac=s5A5!}N zpT_xonIq#{ym9|t=zm>iMlYZoVz4c&q;@y=p(FoV@>X3>K1D8P2!fRN>>qY5Y`S zx2;U|&O<(W__^dIi0VUP2qu2DBc%h}NMO z&|35c+KygFpP~KeQ}hY?80|xQ(MRZCXb<`jeZY@oH)#|Vmvj1jSgD>5cVtd zC5og`I==LEzuVQfML*&HfkK4!W28Z(v_GQbNa>FuJ(K(n`z_MFZzSJYogaPxoQ;Zkp@J1 zv5NL*{l zB-919L;3MFB0Ut)0|GrL&_e=+8=zUF zzY^OBU5)e%u^CoR)S6=T03ptDvBrd(pjJ9RD}xd@M=enc>$k?nqeOHKYJ(C`ThtMC zKE9iyJ;TjUtQ&)C8#0!^PVE0NwpOF2lh$q zV)QuDSJIkfTDSLLAH}}{`v_L8pND+_-G?;snq!+GeFdS7Z#rr2LbdR#pmIo0;6f-G z`Sq2xD*XZJ-wYhb)<8$l7w7=`6n%{Lp}puM^dPz)ts>9eST$}2DnR$5S!gDjgJzQ^ zUlu(^SdVcvPwvAn)cSvbz;5&&dIN1oucNK#RkR7cf()9Arjq#~tYgFTu=CMV=rOba zDb2&!MQ9;<6g`3-M@!KXv=}|1@l=8*(aUHf+JM%eXVGf33O$XMqZMeG4X?yLgPud{ zkI*UgnAJRYBbo0qKO?5 z6&tb8L{LODYEUd#VALpLS5)j`!G^{LDuP|?9l?mb;`hw%-dvLC`@P>kUw*&I+1+_& z&YU@O=A1oexuU#|@*3y{=oX06_LeR#U0QCPzmTUC>}yZC?A2EfQmt-pr@ce zK~uoz7nYB>ehYd7dI5S4dIfq3dJTFHdIw@dI1V^2K7hEQC0z&g3LfRAWKkH5HI(tqqGK916hIi!v!{m8xc=zN~eGeY?@_9l47 zXYA~_#h`Xj)C~8mgyp^mwFYt6wn5n*)DGke;!bLdvL}d-(K~|p*xeh?dCdI8t>AO^ zE}+h!A3>~$&*VFS{6T!~&gbuZ_TCHB8>Ckdi0i%}mJa~w4e5hxA5_pEWe|ux8wLsm zjRO4w3IRUXLAtd&Y!=SdQf*P;$(SLpcT%1>#VR zK^YAi3yKAe2k~(m({xV4Z*k!O1=GOz4Kxun6=a5IQ&3I@@u>b4tIMQ&3gVT`=!}c;@%VJukX=1C*%*&x=7%K(9fs zKrcZrK+i$XK&7Ckpqrq3kk!9!lzTfcyc~o8pkMiye>RshAviMF&jT&$r>2COnC&927N zyxvXd4Vb61i?b`%1mIwvi5C;Qwf0RV2SFX@0uH`%*$_a*;i8+cBYRBgg2}19 z);@0b6dYGKXIEIzsW16NLPtPf%AJhni~`aU$jh4UV>cdlAI(T-PgfW^u`kU4Qo9KV zYarr>h+qFSuG=vybt;~)3y$=qgN!(b`FAblH+QdLI z8x4!i0;OHh_hRia zT(g7U%B!fu1D^N5;xLec$B1rP0-_RhYzkf*xH9PdI>{UE@SJK{AZ;Ck+S3D5qETsf zAk~OQSDg-|PJDkekcRU8(?D8`d#%tw1Ax0dm{Rku*watIz1FNDHSAleK_GVpMf+#Hp z$OA#Ne=H);XPj7rietnYf?mfz5Q6?*UqW(2EF_nYg^;YqO*?mh?8%;2Jp1}}dsABq zamjeS+VT~$oxR7Q?P&vO7Tc}Yd2pQQAq*Qxub5e`LW8MMu|UXhH{roRx)}=(PaH(f z;~^0}mZ&e+GJH5D*+lE{kU8$1dzpPpXh( zKotSR1XMU7gi?VNl0s`@VK?(8DF?b|p#fyGAAACN{1Hoiev!g{_7VSlP@N{-#1Ha5s zhmy}k3@_7AxsAVR<~F$6^eQ4nCk;akU~noy>j~YbWzNBaJlQfSX(?IDT4TsRbZMeE zRG)mP_arz|Nq&M|7$r@DvB*?MCW(I9ufycr@I2yC%BDe&<|E8G%b~?yVdR8dr40b0 z7Ak(2tF0H(-DC?8?#>==sC!%(1p%R(2Sjxsx`W{u6zUX_HkyjU5U7b10s&<6aV#sQ9fZ!u(Q zAnbt9?c8=e{I|J`K$oz?J}KH;Sq3#*=43Vr9CvUKYE}{CfOM~G5Fw9)>89ybe!LOr zrO2XwZ*bU^URlFxtgT@-RH~O}8~s$F?F>XCAg;c@e&eFfZ>*u+u@RKU+Eakw)`@m4 zOV*|mTIZ-l`Fe1;8*=O#*L-uyxsIgWjZNDfK~Ks}MB3v(S^ydKvy-Oo=_#{-MBCUy zmm;XyZ}8BUr>f!z3I#&PAM4vfNBYk;Q8i3!pR`i@H0Py&m&z6#wxjf=Q~FeK*;{D$ zmIJ0ZAx#DvTdA=|U{|VoMp8>4gdvgCI{`yZp8jlw z&`A7%3+~58QXy+dQR=F{d}+{1qji|TC*z4FbSe|1b|vn4?3a|bj9R_EXb*b@2?Z5p zL{gh+(6uL$qNj<2g;SAqlc&_nk@RjFrrg_+6gwUFLjx&gy6C2R0!=u?j9+XTRi{K9 zwH}&S4`SjYI2=z6bAozIGmGXP=i!BZH5pB1kQJ(pCbvYC+R-#P5vil@XxXZIqnjLP zu+Ck_E!QFbuoO1TsSgvyHvbQqdu|<9e7D8+k3m@#<=|nXChsk>-7la|YfDMILNujq z6s;+9rf9FVj+SA8M;y%tYVx;Yitxnb4M)_Arl&Kpl4uSDyDH=Gv!grf_CUhLAkncy z{G!Qc7Q$yR5cWW945&Z0drxC0ATSU(1b+pGJ8)NN|C%3TuHD5H0FDQolN?Pgm!s3u zf#5D((0OyEN%ME;Ad6ha+P6p3!C9#P4*0V zNo|pGXNwpLN`j@GfWW5^@h^Ieu&!seZv}KH>$X0M?D8s^M!%do-CyD$8R6Y3hBBaC z7!^aa;4xuh41ItuVJ38@qfCh*mpR}sjiKN;#Gn4+a68EBcln9rB_)7p|L>52V^UYc z9=#8oR>24VXx!(G#9A5~+){I;DmzolC8AhfHfV|1M$;l5!ABELiPl1kcsjU5tgW4h zM?Ae;(#BU!zG?kH8kf>+FZYPG2E!Z%RZo>2*&$Ut{rC2?T`IRnii0gn#b22Hx1gv- zerbl@+x(Ki~B$#jldXL2uVmjuC(E6S+;c zf|3)IzLH|7_L_2hRPys0C_6uyE^sNwn6(DUO0NQt8Ee5RXIF!D@;xkfrc3M4@LE&J ziAOF#9APXk59URJ_#1slM{hv2%X&b{wFl5xa#<}m8aLyB`H3Yd)~_#t@Bh(2d5qs7X2>u7d#$Ql_MN?O zlBo7@c9r5zGZVYn(7oeR@v}-bPS!PWzM4s~TTpvTARK@&ip&ole)_p!Km^XD%|PhJ z1HrEijh~L|Y-aJ>Sp#A*IA4R4IQ!$`%x|g|894iAQW?t@0#O?XulgGsyS>lzH6T8K zQwN;66SckKqFy#IaB9yYzpb#$XO_I`ZDHJHaCpLrU<0DZEQ)7DBoN#>^V?@#oj&Az zFd&j<(e|zA?fLU5Z4Z#C^W_e1cz#9r*z{ko<*T!VhZUf>#vb@GWZtPFK4@YE+ zSOa0Xim_XeO#VRV9w`Xhckf)gp41I8AU=X)4Lee6y6h=K{zZ zYgxTAaGs}77G$*=lDD=J#0^9HRc#V&K-dyJW$jKtaBLsCH)m}%o3^hF2p@2`-N#?W z*NR=)ZJvSCmB?oYv=35bb$+=!m(ATi)_{miP1!A48M$D2KQWbNFnDIF+)(>D9YzjJ zn}?TrN0t2F%2YbYh&@15M}^IgR&4N7#bY}R2t|j`b1AtPX{?&yrC1fQR1WR?{$nfr z*ll5^JSg!>;gSpvH>s7?`e8LTbsq-~zlTU zT3^qQ^NN9J+V1Q-?-Od!- z2sSII!7i~$x$0g2s{Y%RGz;3a9ahR&=4ADE)|Jdl&+DsqQ4Ly2xoqg9l_|!WzpQ{8 zYV5`?%I9n6_x&$((1Nszn(7X&o}b6?x>9zPQeAZimoKPeKTlmv4oCy~bRa}5rLKE0 zxhm;Fz^2WfJxCXo*3c7_+OOBj>0&@wo#rhon>R*NUFB`Qu(i}|?|*b@;5rHgLKwY{ zrtgJL+jQE`_ur+{$cJKe2M?9&dO2O(Dm#_Yz@y3nNxSrdrpLIbMz52TUrgzaOIKTD z#R21jw;DGO)qmf)O2_FOH^^x+@bXjVPH8o!$sLCZJ$=F9-QrPk^M1Iz#_^P%_dns*$po_4Aad5}8qdif=lyVaY6gBTu-qJ=|EaQ3 z3(lCwCHRqr+GLabx}$WN?#oX9Z-%h_RS`m(o@-vP8Pf2(X^+Sxs!XU#^=f`w>2 zZz$Z~O7TDl&$iN{1EOOklWlSy?Y)goA3#`!Y$NSKXo}oM{s(~@zl}l;;>9QdIDS2g z)(*?Nl@Rz08qtezsA|qOvN(x_m?yq<*ha+=(yjxzGQiWrL-zIU@aQPOyfp+cqQ3Sa zMEQ0g%z$tib!OoloALZ!jAIGbm%WWf9zy!gRSfI=_VA7@alj&_30UskP-J_p|E8$Q zl&CClq-_kmmu{uBV|Xn(cL*EGvS+QSR<`IRblgrov(b!z?XqKvx(poWpV&Z-ZHzA* zknf;_*=V@)4tkLd$Bfw_cfb7HR{QHtd5gx*(sKCE- z`F$pn@N5AOh9SrBzQNg^>3Qv6EWIba;kd6BTZ-ig(rTc%NO^vh2<0#4lSu}~u zx%(*ZD0(M(zijb}kf_lyWqs^n7s7#4;|g$C_ph4=opJQ;yc-;N3Dc!&<9>2D1`j_t zKwXbPm)u)!Lc%sWcMNqF9HM5&!5@-Mp~vC3aoIHeIF_294wLB#L{-!gS#$H}2e+KG z@Ei)wUUGY~j!;WRnEfGp%kjH*AKvt_;vi)N;_l-g6nO&6)xAgMK1d9nza{I+i5rsL z(o!o&;T&-K{Yukr_`Q6#+l^lzF$4-BE69QFCd@cSZB8PV{;f;d8CCswoHm?93l<)y zql_Gnh~@E+I(2XBwI9t^!`~e2Sm2#GLC!hQjgd1T2R=btH|L-wpFLibTB3|GMwYR} z$XxQv#X@~TE=A`;Q=L<^Ar}kvYp3XRE{=_!oR(K7ou41v+%sv6jiA1XCp`8vXUP8) zBrVQR>?zT zc|hs{E}m)dZlL5lX*;(B9M1B+B6}5mV;nq1;$SwxmtbdUQq=$O;Tm79=NwT2kT`m^v0uFC$h5YV6V|J1MF@aa^Qc?)Yr%o(83xxH_eG3byEEN6ruz>u+(DPrNmn(RYI9SZ-=Yo-lojqxr zHuXFWKL^Dd&ePO$kchuP=gy(R#0ymMJj#>{)QrpZ7bx^RaJw(i^z-ofu?w=#3zGVF zJD3=81aa=rwQO(~>ym!Jzrgt55S zra*#2s6pxVKO=0Oa@|AkxmMzOcL z7&ylYsq8irw4S$nP>N%)LSJm;a-M4{p%*%jo_0H{=-y ze|)&%*pnXo4uE|M_J?AJcK=Pe_eXr|Fd%*YfX;%No4^C5`EByQf?5{dkrB5|oH9oY zZ}{BMmX&vC@)gVi@P6oR(YhAaDslJZ3iErMzkTCb(tHCr@gCK_3a8~O0okhHs>(KK zx`mhjO>>^*+$1f7o6}NrzOwDb8XG;oembDIBFX z0lIj!AF^!vvY+4F93YhmGvgs0EJVmy?4~P)aKODH`u-XuhCY^aOYL4KBdmt^X(~xb z2i{{IQxYQ*isjmeb^OKpySdZifpF!8IzC^LU*+i8HPJy^sYK4sO{Xs}bo=Tpzk(@| zg;kN$b@a+-jB}R{3VokY&~+G<|AdZTM-0UNDK{hf;fndS%AUI`Mj-~idrB@hpsU4G zIa;6G-Snhi`>CUWP@LBBDFxqvj(}3RYbrTgO=#Etr&vCOmW~xvLrN+425P%gN**`S zz`$oRa?yrcm&;}h;#|RYi?TwOk1D!%Pwc1- z{wUX$J)zUBQ8)H-X5!v($2+{VJsp{@_% z5&7NE?XRc2yO;G4F{zXlU^JCJ6m9ftG5Y$EXj!q2S}kn~P}*rIF%hWeBXouIE5a}% zz4AS`OG{gim3$(N34AAU@DaN8D*?aw3qGpAg9OZvep&UQnFm1xNPdADzAf;I-sFT1quQ}7*v4*UZV`H zK=U7CnSqZ&;)*f3==tr9=zcN!$J>}Xl>pbyn4I3A>}gECZ^VZ9_a{Z;&qcd3awb-! zJQkZ#k%~$%S2fO(Bh^1{cvj(-Z5v<^uRPJKU(Wm$O=$QNG{)LQ{-NayoqX3ko_)&$ zvPvxuCX@z*;Ald7p1^pW303?PE}l|ZEv@|N_&qt zGqX~B6!*~_KOT+#FOF<5);qE8k1$S25hfP0D@;DkSsE01OAQVyc5_kf9!!CyaD}A> zMVBHXkS#axJziLjmZBe1EXes8Vrx}ZS$C7c=@Ikl2Zlfcj+5CmeGF}>N-02=*Kn8b zWev6h_-@Z}56y$0!}OFw+Rpa@R`ig|^H!vNfts&dk?sZf>gqK41!5K7JvDiWWmj7p z{w)!FJT`3o`P^#zF&v~eU|#8NLs>5&F~o){y+VmLq^dQx6#5Fo2i47Z1+~}*TFp$o zxd?eP1hVcwg}hpt$ih}%Q$^LcM*yfPhLZ~w zd0DnIWGzweu-(npdKitkZ;bb#j>B#JZJUbOUaL5ot zoGO#04bA@~P7;(ouw)nfyP%&AC;MqI2|Ffl&@}m9$L@FJs@B-^0QOgF{u;pAb{I+) zux;!CtD9C;t>2W2(Q|kZKFiRm+JE zR?t{`<55E=we+%Z-Lp$f-x^Dr0nE!DWDrkqxO>)lCx>>md$v@>(g`t-G27mWvIGrI zY=E!<;&M>}eKU9MDFY%Jzv%#kE(r)bAi9}8nlh-@*tQ15I&kd4X_bHD@ZvtJV+@?5 zPUKSovNwR>X4HR_bL!V>KTbCwUV>8toDrRLrjsImMAzyqGjBvQAglehk(`-xx~H7V zKR2nL^lgx|rjeWOSVzB}_(HBgE3r3gCc=(?@69fDle5PGKC;+m5ZMsG3eb*ni?x$vAE#Zje8~@i-;nX`cufHa}@TKR!IzZOh_zR~wY!42to>bK653TH}i;2d*@>>3( zCH$B0Nu(GP%^4vqiaJ!%I0)mTC`2izbBX8aTqZ=(Nu?}O%1V`Gyd#$^&n5mLkPxMu zj`Hi0ZSpGfYPWGKd#tZ*_XhVI1`}<%nSHYIxGrv|lZ>mJC*!^;slV5KOTzZ*xVHw* zvrF{eXWo~+8|G0ZQ;m<%+Lbz(Y8=`Y;O}KPtq2XL^h;S|TKoX_9Gd2r7yR}9& z-1Fd86`#9z@=SA=UDA*6Vb`azk<{=?%| { return e?.fileList }}> { const allowedTypes = [ "application/pdf", - // "application/vnd.openxmlformats-officedocument.wordprocessingml.document", "text/csv", - "text/plain" + "text/plain", + "text/markdown", + "application/vnd.openxmlformats-officedocument.wordprocessingml.document" ] .map((type) => type.toLowerCase()) .join(", ") diff --git a/src/db/index.ts b/src/db/index.ts index 1b85d11..ea60233 100644 --- a/src/db/index.ts +++ b/src/db/index.ts @@ -7,6 +7,7 @@ type HistoryInfo = { id: string title: string is_rag: boolean + message_source?: "copilot" | "web-ui" createdAt: number } @@ -224,10 +225,10 @@ export const generateID = () => { }) } -export const saveHistory = async (title: string, is_rag?: boolean) => { +export const saveHistory = async (title: string, is_rag?: boolean, message_source?: "copilot" | "web-ui") => { const id = generateID() const createdAt = Date.now() - const history = { id, title, createdAt, is_rag } + const history = { id, title, createdAt, is_rag, message_source } const db = new PageAssitDatabase() await db.addChatHistory(history) return history @@ -465,3 +466,17 @@ export const importPrompts = async (prompts: Prompts) => { await db.addPrompt(prompt) } } + +export const getRecentChatFromCopilot = async () => { + const db = new PageAssitDatabase() + const chatHistories = await db.getChatHistories() + if (chatHistories.length === 0) return null + const history = chatHistories.find( + (history) => history.message_source === "copilot" + ) + if (!history) return null + + const messages = await db.getChatHistory(history.id) + + return { history, messages } +} \ No newline at end of file diff --git a/src/hooks/chat-helper/index.ts b/src/hooks/chat-helper/index.ts index 15562ec..73f8494 100644 --- a/src/hooks/chat-helper/index.ts +++ b/src/hooks/chat-helper/index.ts @@ -11,7 +11,8 @@ export const saveMessageOnError = async ({ historyId, selectedModel, setHistoryId, - isRegenerating + isRegenerating, + message_source = "web-ui" }: { e: any setHistory: (history: ChatHistory) => void @@ -22,7 +23,8 @@ export const saveMessageOnError = async ({ historyId: string | null selectedModel: string setHistoryId: (historyId: string) => void - isRegenerating: boolean + isRegenerating: boolean, + message_source?: "copilot" | "web-ui" }) => { if ( e?.name === "AbortError" || @@ -65,7 +67,7 @@ export const saveMessageOnError = async ({ 2 ) } else { - const newHistoryId = await saveHistory(userMessage) + const newHistoryId = await saveHistory(userMessage, false, message_source) if (!isRegenerating) { await saveMessage( newHistoryId.id, @@ -103,7 +105,8 @@ export const saveMessageOnSuccess = async ({ message, image, fullText, - source + source, + message_source = "web-ui" }: { historyId: string | null setHistoryId: (historyId: string) => void @@ -112,7 +115,8 @@ export const saveMessageOnSuccess = async ({ message: string image: string fullText: string - source: any[] + source: any[], + message_source?: "copilot" | "web-ui" }) => { if (historyId) { if (!isRegenerate) { @@ -136,7 +140,7 @@ export const saveMessageOnSuccess = async ({ 2 ) } else { - const newHistoryId = await saveHistory(message) + const newHistoryId = await saveHistory(message, false, message_source) await saveMessage( newHistoryId.id, selectedModel, diff --git a/src/hooks/useMessage.tsx b/src/hooks/useMessage.tsx index 40eb5d9..8260795 100644 --- a/src/hooks/useMessage.tsx +++ b/src/hooks/useMessage.tsx @@ -329,7 +329,8 @@ export const useMessage = () => { message, image, fullText, - source + source, + message_source: "copilot" }) setIsProcessing(false) @@ -345,7 +346,8 @@ export const useMessage = () => { setHistory, setHistoryId, userMessage: message, - isRegenerating: isRegenerate + isRegenerating: isRegenerate, + message_source: "copilot" }) if (!errorSave) { @@ -535,7 +537,8 @@ export const useMessage = () => { message, image, fullText, - source: [] + source: [], + message_source: "copilot" }) setIsProcessing(false) @@ -551,7 +554,8 @@ export const useMessage = () => { setHistory, setHistoryId, userMessage: message, - isRegenerating: isRegenerate + isRegenerating: isRegenerate, + message_source: "copilot" }) if (!errorSave) { diff --git a/src/libs/process-knowledge.ts b/src/libs/process-knowledge.ts index 4e3f362..e44bfa0 100644 --- a/src/libs/process-knowledge.ts +++ b/src/libs/process-knowledge.ts @@ -9,6 +9,18 @@ import { RecursiveCharacterTextSplitter } from "langchain/text_splitter" import { PageAssistVectorStore } from "./PageAssistVectorStore" import { PageAssisCSVUrlLoader } from "@/loader/csv" import { PageAssisTXTUrlLoader } from "@/loader/txt" +import { PageAssistDocxLoader } from "@/loader/docx" + +const readAsArrayBuffer = (file: File): Promise => { + return new Promise((resolve, reject) => { + const reader = new FileReader() + reader.onload = () => { + resolve(reader.result as ArrayBuffer) + } + reader.onerror = reject + reader.readAsArrayBuffer(file) + }) +} export const processKnowledge = async (msg: any, id: string): Promise => { console.log(`Processing knowledge with id: ${id}`) @@ -58,6 +70,26 @@ export const processKnowledge = async (msg: any, id: string): Promise => { knownledge_id: knowledge.id, file_id: doc.source_id }) + } else if (doc.type === "docx" || doc.type === "application/vnd.openxmlformats-officedocument.wordprocessingml.document") { + try { + const loader = new PageAssistDocxLoader({ + fileName: doc.filename, + buffer: await toArrayBufferFromBase64( + doc.content + ) + }) + + let docs = await loader.load() + + const chunks = await textSplitter.splitDocuments(docs) + + await PageAssistVectorStore.fromDocuments(chunks, ollamaEmbedding, { + knownledge_id: knowledge.id, + file_id: doc.source_id + }) + } catch (error) { + console.error(`Error processing knowledge with id: ${id}`, error) + } } else { const loader = new PageAssisTXTUrlLoader({ name: doc.filename, diff --git a/src/loader/docx.ts b/src/loader/docx.ts new file mode 100644 index 0000000..3a0d50c --- /dev/null +++ b/src/loader/docx.ts @@ -0,0 +1,33 @@ +import { BaseDocumentLoader } from "langchain/document_loaders/base" +import { Document } from "@langchain/core/documents" +import * as mammoth from "mammoth" + +export interface WebLoaderParams { + fileName: string + buffer: ArrayBuffer +} + +export class PageAssistDocxLoader + extends BaseDocumentLoader + implements WebLoaderParams { + fileName: string + buffer: ArrayBuffer + + constructor({ fileName, buffer }: WebLoaderParams) { + super() + this.fileName = fileName + this.buffer = buffer + } + + public async load(): Promise { + const data = await mammoth.extractRawText({ + arrayBuffer: this.buffer + }) + const text = data.value + const meta = { source: this.fileName } + if (text) { + return [new Document({ pageContent: text, metadata: meta })] + } + return [] + } +} diff --git a/src/routes/sidepanel-chat.tsx b/src/routes/sidepanel-chat.tsx index 5921a2b..84e6c15 100644 --- a/src/routes/sidepanel-chat.tsx +++ b/src/routes/sidepanel-chat.tsx @@ -1,16 +1,34 @@ +import { + formatToChatHistory, + formatToMessage, + getRecentChatFromCopilot +} from "@/db" import React from "react" import { SidePanelBody } from "~/components/Sidepanel/Chat/body" import { SidepanelForm } from "~/components/Sidepanel/Chat/form" import { SidepanelHeader } from "~/components/Sidepanel/Chat/header" import { useMessage } from "~/hooks/useMessage" - const SidepanelChat = () => { +const SidepanelChat = () => { const drop = React.useRef(null) const [dropedFile, setDropedFile] = React.useState() const [dropState, setDropState] = React.useState< "idle" | "dragging" | "error" >("idle") - const {chatMode} = useMessage() + const { chatMode, messages, setHistory, setHistoryId, setMessages } = + useMessage() + + const setRecentMessagesOnLoad = async () => { + if (messages.length === 0) { + const recentChat = await getRecentChatFromCopilot() + if (recentChat) { + setHistoryId(recentChat.history.id) + setHistory(formatToChatHistory(recentChat.messages)) + setMessages(formatToMessage(recentChat.messages)) + } + } + } + React.useEffect(() => { if (!drop.current) { return @@ -67,6 +85,7 @@ import { useMessage } from "~/hooks/useMessage" } } }, []) + return (
=> { }) } + export const toArrayBufferFromBase64 = async (base64: string) => { const res = await fetch(base64) const blob = await res.blob() From 2a7d34f4dfe7c4533f4c4fbd82f206f95fed4ba1 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 24 May 2024 19:58:02 +0530 Subject: [PATCH 07/14] chore: Update SidepanelForm to include chat mode checkbox for normal and rag modes --- src/components/Sidepanel/Chat/form.tsx | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/components/Sidepanel/Chat/form.tsx b/src/components/Sidepanel/Chat/form.tsx index 08c0659..148ebfd 100644 --- a/src/components/Sidepanel/Chat/form.tsx +++ b/src/components/Sidepanel/Chat/form.tsx @@ -95,7 +95,8 @@ export const SidepanelForm = ({ dropedFile }: Props) => { chatMode, speechToTextLanguage, stopStreamingRequest, - streaming + streaming, + setChatMode } = useMessage() const { isListening, start, stop, transcript } = useSpeechRecognition() @@ -264,6 +265,18 @@ export const SidepanelForm = ({ dropedFile }: Props) => { {t("sendWhenEnter")} ) + }, + { + key: 2, + label: ( + { + setChatMode(e.target.checked ? "rag" : "normal") + }}> + {t("common:chatWithCurrentPage")} + + ) } ] }}> From 9e2ef724864719a8a42130e2a6719bd01d3c937d Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 24 May 2024 20:00:09 +0530 Subject: [PATCH 08/14] chore: Update Lucide icons and improve Current Chat Model Settings --- src/components/Common/{ => Settings}/AdvanceOllamaSettings.tsx | 0 .../Common/{ => Settings}/CurrentChatModelSettings.tsx | 0 src/components/Layouts/Layout.tsx | 2 +- src/components/Option/Settings/model-settings.tsx | 2 +- src/components/Option/Settings/ollama.tsx | 2 +- src/components/Option/Settings/other.tsx | 2 +- src/components/Sidepanel/Chat/header.tsx | 2 +- src/components/Sidepanel/Settings/body.tsx | 2 +- 8 files changed, 6 insertions(+), 6 deletions(-) rename src/components/Common/{ => Settings}/AdvanceOllamaSettings.tsx (100%) rename src/components/Common/{ => Settings}/CurrentChatModelSettings.tsx (100%) diff --git a/src/components/Common/AdvanceOllamaSettings.tsx b/src/components/Common/Settings/AdvanceOllamaSettings.tsx similarity index 100% rename from src/components/Common/AdvanceOllamaSettings.tsx rename to src/components/Common/Settings/AdvanceOllamaSettings.tsx diff --git a/src/components/Common/CurrentChatModelSettings.tsx b/src/components/Common/Settings/CurrentChatModelSettings.tsx similarity index 100% rename from src/components/Common/CurrentChatModelSettings.tsx rename to src/components/Common/Settings/CurrentChatModelSettings.tsx diff --git a/src/components/Layouts/Layout.tsx b/src/components/Layouts/Layout.tsx index 200d0ef..9ff5fcc 100644 --- a/src/components/Layouts/Layout.tsx +++ b/src/components/Layouts/Layout.tsx @@ -25,7 +25,7 @@ import { useStorage } from "@plasmohq/storage/hook" import { ModelSelect } from "../Common/ModelSelect" import { PromptSelect } from "../Common/PromptSelect" import { ChatSettings } from "../Icons/ChatSettings" -import { CurrentChatModelSettings } from "../Common/CurrentChatModelSettings" +import { CurrentChatModelSettings } from "../Common/Settings/CurrentChatModelSettings" export default function OptionLayout({ children diff --git a/src/components/Option/Settings/model-settings.tsx b/src/components/Option/Settings/model-settings.tsx index a7ee8ef..8db7661 100644 --- a/src/components/Option/Settings/model-settings.tsx +++ b/src/components/Option/Settings/model-settings.tsx @@ -1,7 +1,7 @@ import { SaveButton } from "@/components/Common/SaveButton" import { getAllModelSettings, setModelSetting } from "@/services/model-settings" import { useQuery, useQueryClient } from "@tanstack/react-query" -import { Form, Skeleton, Input, Switch, InputNumber, Collapse } from "antd" +import { Form, Skeleton, Input, InputNumber, Collapse } from "antd" import React from "react" import { useTranslation } from "react-i18next" // keepAlive?: string diff --git a/src/components/Option/Settings/ollama.tsx b/src/components/Option/Settings/ollama.tsx index 688ae96..3315a1f 100644 --- a/src/components/Option/Settings/ollama.tsx +++ b/src/components/Option/Settings/ollama.tsx @@ -14,7 +14,7 @@ import { import { SettingPrompt } from "./prompt" import { Trans, useTranslation } from "react-i18next" import { useStorage } from "@plasmohq/storage/hook" -import { AdvanceOllamaSettings } from "@/components/Common/AdvanceOllamaSettings" +import { AdvanceOllamaSettings } from "@/components/Common/Settings/AdvanceOllamaSettings" import { ModelSettings } from "./model-settings" export const SettingsOllama = () => { diff --git a/src/components/Option/Settings/other.tsx b/src/components/Option/Settings/other.tsx index edb4349..d4c7c25 100644 --- a/src/components/Option/Settings/other.tsx +++ b/src/components/Option/Settings/other.tsx @@ -29,7 +29,7 @@ export const SettingOther = () => {

- {t("generalSettings.settings.heading")} + {t("generalSettings.title")}

diff --git a/src/components/Sidepanel/Chat/header.tsx b/src/components/Sidepanel/Chat/header.tsx index d714627..c90cadb 100644 --- a/src/components/Sidepanel/Chat/header.tsx +++ b/src/components/Sidepanel/Chat/header.tsx @@ -4,7 +4,7 @@ import { Link } from "react-router-dom" import { Tooltip } from "antd" import { BoxesIcon, BrainCog, CogIcon, EraserIcon } from "lucide-react" import { useTranslation } from "react-i18next" -import { CurrentChatModelSettings } from "@/components/Common/CurrentChatModelSettings" +import { CurrentChatModelSettings } from "@/components/Common/Settings/CurrentChatModelSettings" import React from "react" export const SidepanelHeader = () => { const { clearChat, isEmbedding, messages, streaming } = useMessage() diff --git a/src/components/Sidepanel/Settings/body.tsx b/src/components/Sidepanel/Settings/body.tsx index 8d177de..88f4450 100644 --- a/src/components/Sidepanel/Settings/body.tsx +++ b/src/components/Sidepanel/Settings/body.tsx @@ -23,7 +23,7 @@ import { MoonIcon, SunIcon } from "lucide-react" import { Trans, useTranslation } from "react-i18next" import { useI18n } from "@/hooks/useI18n" import { TTSModeSettings } from "@/components/Option/Settings/tts-mode" -import { AdvanceOllamaSettings } from "@/components/Common/AdvanceOllamaSettings" +import { AdvanceOllamaSettings } from "@/components/Common/Settings/AdvanceOllamaSettings" export const SettingsBody = () => { const { t } = useTranslation("settings") From 961f5180c653a8dde8a777615c94cd1b7730f5d4 Mon Sep 17 00:00:00 2001 From: n4ze3m Date: Fri, 24 May 2024 21:01:10 +0530 Subject: [PATCH 09/14] Added ability to resume previous chat on copilot --- src/components/Common/Beta.tsx | 4 +- src/components/Layouts/Layout.tsx | 21 ++++++---- .../Option/Settings/model-settings.tsx | 11 ++--- src/components/Option/Settings/other.tsx | 40 +++++++++++++++++-- src/components/Sidepanel/Chat/header.tsx | 22 ++++++---- src/routes/sidepanel-chat.tsx | 11 +++++ src/services/app.ts | 5 +++ 7 files changed, 88 insertions(+), 26 deletions(-) diff --git a/src/components/Common/Beta.tsx b/src/components/Common/Beta.tsx index c381ae3..2151dea 100644 --- a/src/components/Common/Beta.tsx +++ b/src/components/Common/Beta.tsx @@ -1,8 +1,8 @@ import { Tag } from "antd" import { useTranslation } from "react-i18next" -export const BetaTag = () => { +export const BetaTag = ({className} : {className?: string}) => { const { t } = useTranslation("common") - return {t("beta")} + return {t("beta")} } diff --git a/src/components/Layouts/Layout.tsx b/src/components/Layouts/Layout.tsx index 9ff5fcc..0fe494f 100644 --- a/src/components/Layouts/Layout.tsx +++ b/src/components/Layouts/Layout.tsx @@ -24,7 +24,6 @@ import { SelectedKnowledge } from "../Option/Knowledge/SelectedKnwledge" import { useStorage } from "@plasmohq/storage/hook" import { ModelSelect } from "../Common/ModelSelect" import { PromptSelect } from "../Common/PromptSelect" -import { ChatSettings } from "../Icons/ChatSettings" import { CurrentChatModelSettings } from "../Common/Settings/CurrentChatModelSettings" export default function OptionLayout({ @@ -36,6 +35,10 @@ export default function OptionLayout({ const { t } = useTranslation(["option", "common"]) const [shareModeEnabled] = useStorage("shareMode", false) const [openModelSettings, setOpenModelSettings] = useState(false) + const [hideCurrentChatModelSettings] = useStorage( + "hideCurrentChatModelSettings", + false + ) const { selectedModel, @@ -194,13 +197,15 @@ export default function OptionLayout({
- - - + {!hideCurrentChatModelSettings && ( + + + + )} {pathname === "/" && messages.length > 0 && !streaming && diff --git a/src/components/Option/Settings/model-settings.tsx b/src/components/Option/Settings/model-settings.tsx index 8db7661..7996a73 100644 --- a/src/components/Option/Settings/model-settings.tsx +++ b/src/components/Option/Settings/model-settings.tsx @@ -1,13 +1,11 @@ +import { BetaTag } from "@/components/Common/Beta" import { SaveButton } from "@/components/Common/SaveButton" import { getAllModelSettings, setModelSetting } from "@/services/model-settings" import { useQuery, useQueryClient } from "@tanstack/react-query" import { Form, Skeleton, Input, InputNumber, Collapse } from "antd" import React from "react" import { useTranslation } from "react-i18next" -// keepAlive?: string -// temperature?: number -// topK?: number -// topP?: number + export const ModelSettings = () => { const { t } = useTranslation("common") @@ -25,9 +23,12 @@ export const ModelSettings = () => { return (
+
+

- {t("modelSettings.label")} + {t("modelSettings.label")}

+

{t("modelSettings.description")}

diff --git a/src/components/Option/Settings/other.tsx b/src/components/Option/Settings/other.tsx index d4c7c25..e05a9b8 100644 --- a/src/components/Option/Settings/other.tsx +++ b/src/components/Option/Settings/other.tsx @@ -2,7 +2,7 @@ import { useQueryClient } from "@tanstack/react-query" import { useDarkMode } from "~/hooks/useDarkmode" import { useMessageOption } from "~/hooks/useMessageOption" import { PageAssitDatabase } from "@/db" -import { Select } from "antd" +import { Select, Switch } from "antd" import { SUPPORTED_LANGUAGES } from "~/utils/supporetd-languages" import { MoonIcon, SunIcon } from "lucide-react" import { SearchModeSettings } from "./search-mode" @@ -14,11 +14,20 @@ import { importPageAssistData } from "@/libs/export-import" import { BetaTag } from "@/components/Common/Beta" +import { useStorage } from "@plasmohq/storage/hook" export const SettingOther = () => { const { clearChat, speechToTextLanguage, setSpeechToTextLanguage } = useMessageOption() + const [copilotResumeLastChat, setCopilotResumeLastChat] = useStorage( + "copilotResumeLastChat", + false + ) + + const [hideCurrentChatModelSettings, setHideCurrentChatModelSettings] = + useStorage("hideCurrentChatModelSettings", false) + const queryClient = useQueryClient() const { mode, toggleDarkMode } = useDarkMode() @@ -34,7 +43,7 @@ export const SettingOther = () => {
- + {t("generalSettings.settings.speechRecognitionLang.label")} @@ -76,6 +85,29 @@ export const SettingOther = () => { }} />
+
+ + {t("generalSettings.settings.copilotResumeLastChat.label")} + + + setCopilotResumeLastChat(checked)} + /> +
+
+
+ + + {t("generalSettings.settings.hideCurrentChatModelSettings.label")} + +
+ + setHideCurrentChatModelSettings(checked)} + /> +
{t("generalSettings.settings.darkMode.label")} @@ -129,7 +161,7 @@ export const SettingOther = () => {
- {t("generalSettings.system.export.label")} + {t("generalSettings.system.export.label")}
- {t("generalSettings.system.import.label")} + {t("generalSettings.system.import.label")}