Merge pull request #41 from n4ze3m/next

Next
This commit is contained in:
Muhammed Nazeem 2024-04-17 00:42:27 +05:30 committed by GitHub
commit 50d4ae4f09
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
97 changed files with 3997 additions and 643 deletions

BIN
bun.lockb Normal file

Binary file not shown.

View File

@ -29,14 +29,17 @@
"antd": "^5.13.3",
"axios": "^1.6.7",
"cheerio": "^1.0.0-rc.12",
"d3-dsv": "2",
"dayjs": "^1.11.10",
"html-to-text": "^9.0.5",
"i18next": "^23.10.1",
"i18next-browser-languagedetector": "^7.2.0",
"langchain": "^0.1.28",
"lucide-react": "^0.350.0",
"ml-distance": "^4.0.1",
"pdfjs-dist": "^4.0.379",
"property-information": "^6.4.1",
"pubsub-js": "^1.9.4",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-i18next": "^14.1.0",
@ -47,17 +50,21 @@
"rehype-mathjax": "4.0.3",
"remark-gfm": "3.0.1",
"remark-math": "5.1.1",
"turndown": "^7.1.3",
"yt-transcript": "^0.0.2",
"zustand": "^4.5.0"
},
"devDependencies": {
"@plasmohq/prettier-plugin-sort-imports": "4.0.1",
"@types/chrome": "0.0.259",
"@types/d3-dsv": "^3.0.7",
"@types/html-to-text": "^9.0.4",
"@types/node": "20.11.9",
"@types/pubsub-js": "^1.8.6",
"@types/react": "18.2.48",
"@types/react-dom": "18.2.18",
"@types/react-syntax-highlighter": "^15.5.11",
"@types/turndown": "^5.0.4",
"autoprefixer": "^10.4.17",
"postcss": "^8.4.33",
"prettier": "3.2.4",
@ -69,4 +76,4 @@
"resolutions": {
"@langchain/core": "0.1.45"
}
}
}

View File

@ -48,5 +48,7 @@
"submit": "Submit",
"noData": "No data",
"noHistory": "No chat history",
"chatWithCurrentPage": "Chat with current page"
"chatWithCurrentPage": "Chat with current page",
"beta": "Beta",
"tts": "Read aloud"
}

View File

@ -0,0 +1,42 @@
{
"addBtn": "Add New Knowledge",
"columns": {
"title": "Title",
"status": "Status",
"embeddings": "Embedding Model",
"createdAt": "Created At",
"action": "Actions"
},
"expandedColumns": {
"name": "Name"
},
"tooltip": {
"delete": "Delete"
},
"confirm": {
"delete": "Are you sure you want to delete this knowledge?"
},
"deleteSuccess": "Knowledge deleted successfully",
"status": {
"pending": "Pending",
"finished": "Finished",
"processing": "Processing"
},
"addKnowledge": "Add Knowledge",
"form": {
"title": {
"label": "Knowledge Title",
"placeholder": "Enter knowledge title",
"required": "Knowledge title is required"
},
"uploadFile": {
"label": "Upload File",
"uploadText": "Drag and drop a file here or click to upload",
"uploadHint": "Supported file types: .pdf, .csv, .txt, .md",
"required": "File is required"
},
"submit": "Submit",
"success": "Knowledge added successfully"
},
"noEmbeddingModel": "Please add an embedding model first from the Ollama settings page"
}

View File

@ -21,7 +21,8 @@
"searchInternet": "Search Internet",
"speechToText": "Speech to Text",
"uploadImage": "Upload Image",
"stopStreaming": "Stop Streaming"
"stopStreaming": "Stop Streaming",
"knowledge": "Knowledge"
},
"sendWhenEnter": "Send when Enter pressed"
}

View File

@ -51,6 +51,23 @@
"success": "Import Success",
"error": "Import Error"
}
},
"tts": {
"heading": "Text-to-Speech Settings",
"ttsEnabled": {
"label": "Enable Text-to-Speech"
},
"ttsProvider": {
"label": "Text-to-Speech Provider",
"placeholder": "Select a provider"
},
"ttsVoice": {
"label": "Text-to-Speech Voice",
"placeholder": "Select a voice"
},
"ssmlEnabled": {
"label": "Enable SSML (Speech Synthesis Markup Language)"
}
}
},
"manageModels": {
@ -242,5 +259,9 @@
"koFi": "Support on Ko-fi",
"githubSponsor": "Sponsor on GitHub",
"githubRepo": "GitHub Repository"
},
"manageKnowledge": {
"title": "Manage Knowledge",
"heading": "Configure Knowledge Base"
}
}

View File

@ -1,5 +1,7 @@
{
"tooltip": {
"embed": "It may take a few minutes to embed the page. Please wait..."
"embed": "It may take a few minutes to embed the page. Please wait...",
"clear": "Erase chat history",
"history": "Chat history"
}
}

View File

@ -48,5 +48,7 @@
"submit": "送信",
"noData": "データがありません",
"noHistory": "チャット履歴がありません",
"chatWithCurrentPage": "現在のページでチャット"
"chatWithCurrentPage": "現在のページでチャット",
"beta": "ベータ",
"tts": "読み上げ"
}

View File

@ -0,0 +1,42 @@
{
"addBtn": "新しい知識を追加",
"columns": {
"title": "タイトル",
"status": "ステータス",
"embeddings": "埋め込みモデル",
"createdAt": "作成日",
"action": "アクション"
},
"expandedColumns": {
"name": "名前"
},
"tooltip": {
"delete": "削除"
},
"confirm": {
"delete": "この知識を削除してもよろしいですか?"
},
"deleteSuccess": "知識が正常に削除されました",
"status": {
"pending": "保留中",
"finished": "完了",
"processing": "処理中"
},
"addKnowledge": "知識を追加",
"form": {
"title": {
"label": "知識タイトル",
"placeholder": "知識のタイトルを入力してください",
"required": "知識のタイトルは必須です"
},
"uploadFile": {
"label": "ファイルをアップロード",
"uploadText": "ファイルをここにドラッグアンドドロップするか、クリックしてアップロード",
"uploadHint": "サポートされているファイルタイプ: .pdf、.csv、.txt",
"required": "ファイルは必須です"
},
"submit": "送信",
"success": "知識が正常に追加されました"
},
"noEmbeddingModel": "最初にOllamaの設定ページから埋め込みモデルを追加してください"
}

View File

@ -21,7 +21,8 @@
"searchInternet": "インターネットを検索",
"speechToText": "音声入力",
"uploadImage": "画像をアップロード",
"stopStreaming": "ストリーミングを停止"
"stopStreaming": "ストリーミングを停止",
"knowledge": "知識"
},
"sendWhenEnter": "Enterキーを押すと送信"
}

View File

@ -54,6 +54,23 @@
"success": "インポート成功",
"error": "インポートエラー"
}
},
"tts": {
"heading": "テキスト読み上げ設定",
"ttsEnabled": {
"label": "テキスト読み上げを有効にする"
},
"ttsProvider": {
"label": "テキスト読み上げプロバイダー",
"placeholder": "プロバイダーを選択"
},
"ttsVoice": {
"label": "テキスト読み上げの音声",
"placeholder": "音声を選択"
},
"ssmlEnabled": {
"label": "SSML (Speech Synthesis Markup Language) を有効にする"
}
}
},
"manageModels": {
@ -245,5 +262,9 @@
"koFi": "Ko-fiで支援する",
"githubSponsor": "GitHubでスポンサーする",
"githubRepo": "GitHubリポジトリ"
}
},
"manageKnowledge": {
"title": "知識を管理する",
"heading": "知識ベースを構成する"
}
}

View File

@ -48,5 +48,6 @@
"submit": "സമർപ്പിക്കുക",
"noData": "ഡാറ്റ ലഭ്യമല്ല",
"noHistory": "ചാറ്റ് ചരിത്രം ലഭ്യമല്ല",
"chatWithCurrentPage": "നിലവിലെ പേജിനുമായി ചാറ്റ് ചെയ്യുക"
"chatWithCurrentPage": "നിലവിലെ പേജിനുമായി ചാറ്റ് ചെയ്യുക",
"beta": "ബീറ്റ"
}

View File

@ -0,0 +1,42 @@
{
"addBtn": "പുതിയ വിജ്ഞാനം ചേര്‍ക്കുക",
"columns": {
"title": "തലക്കെട്ട്",
"status": "സ്ഥിതി",
"embeddings": "എംബെഡിംഗ് മോഡല്‍",
"createdAt": "സൃഷ്ടിച്ചത്",
"action": "പ്രവർത്തനങ്ങൾ"
},
"expandedColumns": {
"name": "നാമം"
},
"tooltip": {
"delete": "ഇല്ലാതാക്കുക"
},
"confirm": {
"delete": "നിങ്ങൾക്ക് ഈ വിജ്ഞാനം ഇല്ലാതാക്കണമെന്ന് ഉറപ്പാണോ?"
},
"deleteSuccess": "വിജ്ഞാനം വിജയകരമായി ഇല്ലാതാക്കി",
"status": {
"pending": "തീരുമാനിക്കാനുണ്ട്",
"finished": "പൂർത്തീകരിച്ചു",
"processing": "പ്രോസസ്സിംഗ്"
},
"addKnowledge": "വിജ്ഞാനം ചേര്‍ക്കുക",
"form": {
"title": {
"label": "വിജ്ഞാനത്തിന്റെ തലക്കെട്ട്",
"placeholder": "വിജ്ഞാനത്തിന്റെ തലക്കെട്ട് നല്‍കുക",
"required": "വിജ്ഞാനത്തിന്റെ തലക്കെട്ട് ആവശ്യമാണ്"
},
"uploadFile": {
"label": "ഫയല്‍ അപ്‌ലോഡ് ചെയ്യുക",
"uploadText": "ഇവിടെ ഒരു ഫയല്‍ എടുത്തിടുക അല്ലെങ്കില്‍ അപ്‌ലോഡ് ചെയ്യാന്‍ ക്ലിക്ക് ചെയ്യുക",
"uploadHint": "പിന്തുണയുള്ള ഫയല്‍ തരങ്ങള്‍: .pdf, .csv, .txt, .md",
"required": "ഫയല്‍ ആവശ്യമാണ്"
},
"submit": "സമര്‍പ്പിക്കുക",
"success": "വിജ്ഞാനം വിജയകരമായി ചേര്‍ത്തു"
},
"noEmbeddingModel": "ദയവായി ആദ്യം Ollama ക്രമീകരണ പേജില്‍ നിന്ന് ഒരു എംബെഡിംഗ് മോഡല്‍ ചേര്‍ക്കുക"
}

View File

@ -21,7 +21,8 @@
"searchInternet": "ഇന്റര്‍നെറ്റ് തിരയുക",
"speechToText": "സംഭാഷണം ടെക്സ്റ്റായി",
"uploadImage": "ഇമേജ് അപ്‌ലോഡ് ചെയ്യുക",
"stopStreaming": "സ്ട്രീമിംഗ് നിർത്തുക"
"stopStreaming": "സ്ട്രീമിംഗ് നിർത്തുക",
"knowledge": "അറിവ്"
},
"sendWhenEnter": "എന്റര്‍ അമര്‍ത്തുമ്പോള്‍ അയയ്ക്കുക"
}

View File

@ -54,6 +54,23 @@
"success": "ഇമ്പോർട്ട് വിജയകരമായി",
"error": "ഇമ്പോർട്ട് പരാജയപ്പെട്ടു"
}
},
"tts": {
"heading": "ടെക്സ്റ്റ്-ടു-സ്പീച്ച് ക്രമീകരണങ്ങൾ",
"ttsEnabled": {
"label": "ടെക്സ്റ്റ്-ടു-സ്പീച്ച് പ്രവർത്തനക്ഷമമാക്കുക"
},
"ttsProvider": {
"label": "ടെക്സ്റ്റ്-ടു-സ്പീച്ച് പ്രോവൈഡർ",
"placeholder": "ഒരു പ്രോവൈഡർ തിരഞ്ഞെടുക്കുക"
},
"ttsVoice": {
"label": "ടെക്സ്റ്റ്-ടു-സ്പീച്ച് വോയ്സ്",
"placeholder": "ഒരു വോയ്സ് തിരഞ്ഞെടുക്കുക"
},
"ssmlEnabled": {
"label": "SSML (സ്പീച്ച് സിന്തസിസ് മാർക്കപ്പ് ലാംഗ്വേജ്) പ്രവർത്തനക്ഷമമാക്കുക"
}
}
},
"manageModels": {
@ -245,6 +262,9 @@
"koFi": "കോഫിയിൽ പിന്തുണയ്ക്കുക",
"githubSponsor": "ഗിറ്റ്ഹബ്ബിൽ സ്പോൺസർ ചെയ്യുക",
"githubRepo": "ഗിറ്റ്ഹബ്ബ് റെപ്പോസിറ്ററി"
}
},
"manageKnowledge": {
"title": "വിജ്ഞാനം നിര്‍വ്വഹിക്കുക",
"heading": "വിജ്ഞാനാധാരം കോണ്‍ഫിഗര്‍ ചെയ്യുക"
}
}

View File

@ -48,5 +48,7 @@
"submit": "提交",
"noData": "无数据",
"noHistory": "无聊天记录",
"chatWithCurrentPage": "与当前页面聊天"
"chatWithCurrentPage": "与当前页面聊天",
"beta": "Beta",
"tts": "朗读"
}

View File

@ -0,0 +1,42 @@
{
"addBtn": "添加新知识",
"columns": {
"title": "标题",
"status": "状态",
"embeddings": "嵌入模型",
"createdAt": "创建于",
"action": "操作"
},
"expandedColumns": {
"name": "名称"
},
"tooltip": {
"delete": "删除"
},
"confirm": {
"delete": "您确定要删除此知识吗?"
},
"deleteSuccess": "知识删除成功",
"status": {
"pending": "待定",
"finished": "已完成",
"processing": "处理中"
},
"addKnowledge": "添加知识",
"form": {
"title": {
"label": "知识标题",
"placeholder": "输入知识标题",
"required": "知识标题是必需的"
},
"uploadFile": {
"label": "上传文件",
"uploadText": "将文件拖放到此处或点击上传",
"uploadHint": "支持的文件类型: .pdf, .csv, .txt, .md",
"required": "文件是必需的"
},
"submit": "提交",
"success": "知识添加成功"
},
"noEmbeddingModel": "请先从Ollama设置页面添加一个嵌入模型"
}

View File

@ -21,7 +21,8 @@
"searchInternet": "搜索互联网",
"speechToText": "语音到文本",
"uploadImage": "上传图片",
"stopStreaming": "停止流媒体"
"stopStreaming": "停止流媒体",
"knowledge": "知识"
},
"sendWhenEnter": "按Enter发送"
}

View File

@ -54,6 +54,23 @@
"success": "导入成功",
"error": "导入错误"
}
},
"tts": {
"heading": "文本转语音设置",
"ttsEnabled": {
"label": "启用文本转语音"
},
"ttsProvider": {
"label": "文本转语音提供商",
"placeholder": "选择一个提供商"
},
"ttsVoice": {
"label": "文本转语音语音",
"placeholder": "选择一种语音"
},
"ssmlEnabled": {
"label": "启用SSML(语音合成标记语言)"
}
}
},
"manageModels": {
@ -246,5 +263,9 @@
"koFi": "在Ko-fi上支持",
"githubSponsor": "在GitHub上赞助",
"githubRepo": "GitHub仓库"
}
},
"manageKnowledge": {
"title": "管理知识",
"heading": "配置知识库"
}
}

View File

@ -55,3 +55,13 @@
background-position: 0% 50%;
}
}
/* Hide scrollbar for Chrome, Safari and Opera */
.no-scrollbar::-webkit-scrollbar {
display: none;
}
/* Hide scrollbar for IE, Edge and Firefox */
.no-scrollbar {
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
}

158
src/chain/chat-with-x.ts Normal file
View File

@ -0,0 +1,158 @@
import { BaseLanguageModel } from "@langchain/core/language_models/base"
import { Document } from "@langchain/core/documents"
import {
ChatPromptTemplate,
MessagesPlaceholder,
PromptTemplate
} from "@langchain/core/prompts"
import { AIMessage, BaseMessage, HumanMessage } from "@langchain/core/messages"
import { StringOutputParser } from "@langchain/core/output_parsers"
import {
Runnable,
RunnableBranch,
RunnableLambda,
RunnableMap,
RunnableSequence
} from "@langchain/core/runnables"
type RetrievalChainInput = {
chat_history: string
question: string
}
const formatChatHistoryAsString = (history: BaseMessage[]) => {
return history
.map((message) => `${message._getType()}: ${message.content}`)
.join("\n")
}
export const formatDocs = (docs: Document[]) => {
return docs
.filter(
(doc, i, self) =>
self.findIndex((d) => d.pageContent === doc.pageContent) === i
)
.map((doc, i) => `<doc id='${i}'>${doc.pageContent}</doc>`)
.join("\n")
}
const serializeHistory = (input: any) => {
const chatHistory = input.chat_history || []
const convertedChatHistory = []
for (const message of chatHistory) {
if (message.human !== undefined) {
convertedChatHistory.push(new HumanMessage({ content: message.human }))
}
if (message["ai"] !== undefined) {
convertedChatHistory.push(new AIMessage({ content: message.ai }))
}
}
return convertedChatHistory
}
const createRetrieverChain = (
llm: BaseLanguageModel,
retriever: Runnable,
question_template: string
) => {
const CONDENSE_QUESTION_PROMPT =
PromptTemplate.fromTemplate(question_template)
const condenseQuestionChain = RunnableSequence.from([
CONDENSE_QUESTION_PROMPT,
llm,
new StringOutputParser()
]).withConfig({
runName: "CondenseQuestion"
})
const hasHistoryCheckFn = RunnableLambda.from(
(input: RetrievalChainInput) => input.chat_history.length > 0
).withConfig({ runName: "HasChatHistoryCheck" })
const conversationChain = condenseQuestionChain.pipe(retriever).withConfig({
runName: "RetrievalChainWithHistory"
})
const basicRetrievalChain = RunnableLambda.from(
(input: RetrievalChainInput) => input.question
)
.withConfig({
runName: "Itemgetter:question"
})
.pipe(retriever)
.withConfig({ runName: "RetrievalChainWithNoHistory" })
return RunnableBranch.from([
[hasHistoryCheckFn, conversationChain],
basicRetrievalChain
]).withConfig({
runName: "FindDocs"
})
}
export const createChatWithXChain = ({
llm,
question_template,
question_llm,
retriever,
response_template
}: {
llm: BaseLanguageModel
question_llm: BaseLanguageModel
retriever: Runnable
question_template: string
response_template: string
}) => {
const retrieverChain = createRetrieverChain(
question_llm,
retriever,
question_template
)
const context = RunnableMap.from({
context: RunnableSequence.from([
({ question, chat_history }) => {
return {
question: question,
chat_history: formatChatHistoryAsString(chat_history)
}
},
retrieverChain,
RunnableLambda.from(formatDocs).withConfig({
runName: "FormatDocumentChunks"
})
]),
question: RunnableLambda.from(
(input: RetrievalChainInput) => input.question
).withConfig({
runName: "Itemgetter:question"
}),
chat_history: RunnableLambda.from(
(input: RetrievalChainInput) => input.chat_history
).withConfig({
runName: "Itemgetter:chat_history"
})
}).withConfig({ tags: ["RetrieveDocs"] })
const prompt = ChatPromptTemplate.fromMessages([
["system", response_template],
new MessagesPlaceholder("chat_history"),
["human", "{question}"]
])
const responseSynthesizerChain = RunnableSequence.from([
prompt,
llm,
new StringOutputParser()
]).withConfig({
tags: ["GenerateResponse"]
})
return RunnableSequence.from([
{
question: RunnableLambda.from(
(input: RetrievalChainInput) => input.question
).withConfig({
runName: "Itemgetter:question"
}),
chat_history: RunnableLambda.from(serializeHistory).withConfig({
runName: "SerializeHistory"
})
},
context,
responseSynthesizerChain
])
}

View File

@ -1,7 +1,6 @@
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"
import remarkGfm from "remark-gfm"
import { nightOwl } from "react-syntax-highlighter/dist/cjs/styles/prism"
import rehypeMathjax from "rehype-mathjax"
import remarkMath from "remark-math"
import ReactMarkdown from "react-markdown"
import "property-information"
@ -19,7 +18,6 @@ export default function Markdown({ message }: { message: string }) {
<ReactMarkdown
className="prose break-words dark:prose-invert prose-p:leading-relaxed prose-pre:p-0 dark:prose-dark"
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeMathjax]}
components={{
code({ node, inline, className, children, ...props }) {
const match = /language-(\w+)/.exec(className || "")

View File

@ -11,6 +11,8 @@ export const PageAssistProvider = ({
const [controller, setController] = React.useState<AbortController | null>(
null
)
const [embeddingController, setEmbeddingController] =
React.useState<AbortController | null>(null)
return (
<PageAssistContext.Provider
@ -19,7 +21,10 @@ export const PageAssistProvider = ({
setMessages,
controller,
setController
setController,
embeddingController,
setEmbeddingController
}}>
{children}
</PageAssistContext.Provider>

View File

@ -2,9 +2,18 @@ import Markdown from "../../Common/Markdown"
import React from "react"
import { Image, Tooltip } from "antd"
import { WebSearch } from "./WebSearch"
import { CheckIcon, ClipboardIcon, Pen, RotateCcw } from "lucide-react"
import {
CheckIcon,
ClipboardIcon,
Pen,
PlayIcon,
RotateCcw,
Square
} from "lucide-react"
import { EditMessageForm } from "./EditMessageForm"
import { useTranslation } from "react-i18next"
import { MessageSource } from "./MessageSource"
import { useTTS } from "@/hooks/useTTS"
type Props = {
message: string
@ -23,6 +32,8 @@ type Props = {
isSearchingInternet?: boolean
sources?: any[]
hideEditAndRegenerate?: boolean
onSourceClick?: (source: any) => void
isTTSEnabled?: boolean
}
export const PlaygroundMessage = (props: Props) => {
@ -30,6 +41,7 @@ export const PlaygroundMessage = (props: Props) => {
const [editMode, setEditMode] = React.useState(false)
const { t } = useTranslation("common")
const { cancel, isSpeaking, speak } = useTTS()
return (
<div className="group w-full text-gray-800 dark:text-gray-100">
@ -95,13 +107,11 @@ export const PlaygroundMessage = (props: Props) => {
{props.isBot && props?.sources && props?.sources.length > 0 && (
<div className="mb-3 flex flex-wrap gap-2">
{props?.sources?.map((source, index) => (
<a
<MessageSource
onSourceClick={props.onSourceClick}
key={index}
href={source?.url}
target="_blank"
className="inline-flex cursor-pointer transition-shadow duration-300 ease-in-out hover:shadow-lg items-center rounded-md bg-gray-100 p-1 text-xs text-gray-800 border border-gray-300 dark:bg-gray-800 dark:border-gray-700 dark:text-gray-100 opacity-80 hover:opacity-100">
<span className="text-xs">{source.name}</span>
</a>
source={source}
/>
))}
</div>
)}
@ -112,11 +122,31 @@ export const PlaygroundMessage = (props: Props) => {
? "hidden group-hover:flex"
: "flex"
}`}>
{props.isTTSEnabled && (
<Tooltip title={t("tts")}>
<button
onClick={() => {
if (isSpeaking) {
cancel()
} else {
speak({
utterance: props.message
})
}
}}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
{!isSpeaking ? (
<PlayIcon className="w-3 h-3 text-gray-400 group-hover:text-gray-500" />
) : (
<Square className="w-3 h-3 text-red-400 group-hover:text-red-500" />
)}
</button>
</Tooltip>
)}
{props.isBot && (
<>
{!props.hideCopy && (
<Tooltip title={t("copyToClipboard")}
>
<Tooltip title={t("copyToClipboard")}>
<button
onClick={() => {
navigator.clipboard.writeText(props.message)
@ -137,8 +167,7 @@ export const PlaygroundMessage = (props: Props) => {
{!props.hideEditAndRegenerate &&
props.currentMessageIndex === props.totalMessages - 1 && (
<Tooltip title={t("regenerate")}
>
<Tooltip title={t("regenerate")}>
<button
onClick={props.onRengerate}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">
@ -149,8 +178,7 @@ export const PlaygroundMessage = (props: Props) => {
</>
)}
{!props.hideEditAndRegenerate && (
<Tooltip title={t("edit")}
>
<Tooltip title={t("edit")}>
<button
onClick={() => setEditMode(true)}
className="flex items-center justify-center w-6 h-6 rounded-full bg-gray-100 dark:bg-gray-800 hover:bg-gray-200 dark:hover:bg-gray-700 transition-colors duration-200 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-gray-500">

View File

@ -0,0 +1,37 @@
import { KnowledgeIcon } from "@/components/Option/Knowledge/KnowledgeIcon"
type Props = {
source: {
name?: string
url?: string
mode?: string
type?: string
pageContent?: string
content?: string
}
onSourceClick?: (source: any) => void
}
export const MessageSource: React.FC<Props> = ({ source, onSourceClick }) => {
if (source?.mode === "rag") {
return (
<button
onClick={() => {
onSourceClick && onSourceClick(source)
}}
className="inline-flex gap-2 cursor-pointer transition-shadow duration-300 ease-in-out hover:shadow-lg items-center rounded-md bg-gray-100 p-1 text-xs text-gray-800 border border-gray-300 dark:bg-gray-800 dark:border-gray-700 dark:text-gray-100 opacity-80 hover:opacity-100">
<KnowledgeIcon type={source.type} className="h-3 w-3" />
<span className="text-xs">{source.name}</span>
</button>
)
}
return (
<a
href={source?.url}
target="_blank"
className="inline-flex cursor-pointer transition-shadow duration-300 ease-in-out hover:shadow-lg items-center rounded-md bg-gray-100 p-1 text-xs text-gray-800 border border-gray-300 dark:bg-gray-800 dark:border-gray-700 dark:text-gray-100 opacity-80 hover:opacity-100">
<span className="text-xs">{source.name}</span>
</a>
)
}

View File

@ -0,0 +1,52 @@
import { KnowledgeIcon } from "@/components/Option/Knowledge/KnowledgeIcon"
import { Modal } from "antd"
type Props = {
source: any
open: boolean
setOpen: (open: boolean) => void
}
export const MessageSourcePopup: React.FC<Props> = ({
source,
open,
setOpen
}) => {
return (
<Modal
open={open}
// mask={false}
zIndex={10000}
onCancel={() => setOpen(false)}
footer={null}
onOk={() => setOpen(false)}>
<div className="flex flex-col gap-2 mt-6">
<h4 className="bg-gray-100 text-md dark:bg-gray-800 inline-flex gap-2 items-center text-gray-800 dark:text-gray-100 font-semibold p-2">
{source?.type && (
<KnowledgeIcon type={source?.type} className="h-4 w-5" />
)}
{source?.name}
</h4>
{source?.type === "pdf" ? (
<>
<p className="text-gray-500 text-sm">{source?.pageContent}</p>
<div className="flex flex-wrap gap-3">
<span className="border border-gray-300 dark:border-gray-700 rounded-md p-1 text-gray-500 text-xs">
{`Page ${source?.metadata?.page}`}
</span>
<span className="border border-gray-300 dark:border-gray-700 rounded-md p-1 text-xs text-gray-500">
{`Line ${source?.metadata?.loc?.lines?.from} - ${source?.metadata?.loc?.lines?.to}`}
</span>
</div>
</>
) : (
<>
<p className="text-gray-500 text-sm">{source?.pageContent}</p>
</>
)}
</div>
</Modal>
)
}

View File

@ -7,7 +7,7 @@ import React from "react"
import { useMutation } from "@tanstack/react-query"
import { getPageShareUrl } from "~/services/ollama"
import { cleanUrl } from "~/libs/clean-url"
import { getUserId, saveWebshare } from "~/libs/db"
import { getUserId, saveWebshare } from "@/db"
import { useTranslation } from "react-i18next"
type Props = {

View File

@ -0,0 +1,44 @@
import React from "react"
export const CSVIcon = React.forwardRef<
SVGSVGElement,
React.SVGProps<SVGSVGElement>
>((props, ref) => {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
version="1.1"
viewBox="0 0 303.188 303.188"
xmlSpace="preserve"
ref={ref}
{...props}>
<path
fill="#E4E4E4"
d="M219.821 0L32.842 0 32.842 303.188 270.346 303.188 270.346 50.525z"></path>
<path
fill="#007934"
d="M227.64 25.263L32.842 25.263 32.842 0 219.821 0z"></path>
<g fill="#A4A9AD">
<path d="M114.872 227.984c-2.982 0-5.311 1.223-6.982 3.666-1.671 2.444-2.507 5.814-2.507 10.109 0 8.929 3.396 13.393 10.188 13.393 2.052 0 4.041-.285 5.967-.856a59.8 59.8 0 005.808-2.063v10.601c-3.872 1.713-8.252 2.57-13.14 2.57-7.004 0-12.373-2.031-16.107-6.094-3.734-4.062-5.602-9.934-5.602-17.615 0-4.803.904-9.023 2.714-12.663 1.809-3.64 4.411-6.438 7.808-8.395 3.396-1.957 7.39-2.937 11.98-2.937 5.016 0 9.808 1.09 14.378 3.27l-3.841 9.871a42.982 42.982 0 00-5.141-2.031c-1.714-.55-3.554-.826-5.523-.826zM166.732 250.678c0 2.878-.729 5.433-2.191 7.665-1.459 2.232-3.565 3.967-6.315 5.205-2.751 1.237-5.977 1.856-9.681 1.856-3.089 0-5.681-.217-7.775-.65-2.095-.434-4.274-1.191-6.538-2.27v-11.172a37.254 37.254 0 007.458 2.872c2.582.689 4.951 1.032 7.109 1.032 1.862 0 3.227-.322 4.095-.969.867-.645 1.302-1.476 1.302-2.491 0-.635-.175-1.19-.524-1.666-.349-.477-.91-.958-1.682-1.444-.772-.486-2.83-1.48-6.173-2.983-3.026-1.375-5.296-2.708-6.809-3.999s-2.634-2.771-3.364-4.443-1.095-3.65-1.095-5.936c0-4.273 1.555-7.605 4.666-9.997 3.109-2.391 7.384-3.587 12.822-3.587 4.803 0 9.7 1.111 14.694 3.333l-3.841 9.681c-4.337-1.989-8.082-2.984-11.234-2.984-1.63 0-2.814.286-3.555.857s-1.111 1.28-1.111 2.127c0 .91.471 1.725 1.412 2.443.941.72 3.496 2.031 7.665 3.936 3.999 1.799 6.776 3.729 8.331 5.792 1.557 2.063 2.334 4.661 2.334 7.792zM199.964 218.368h14.027l-15.202 46.401H184.03l-15.139-46.401h14.092l6.316 23.519c1.312 5.227 2.031 8.865 2.158 10.918.148-1.481.443-3.333.889-5.555.443-2.222.835-3.967 1.174-5.236l6.444-23.646z"></path>
</g>
<path fill="#D1D3D3" d="M219.821 50.525L270.346 50.525 219.821 0z"></path>
<path fill="#007934" d="M134.957 80.344H168.231V95.762H134.957z"></path>
<path fill="#007934" d="M175.602 80.344H208.875V95.762H175.602z"></path>
<path fill="#007934" d="M134.957 102.661H168.231V118.08H134.957z"></path>
<path fill="#007934" d="M175.602 102.661H208.875V118.08H175.602z"></path>
<path fill="#007934" d="M134.957 124.979H168.231V140.397H134.957z"></path>
<path fill="#007934" d="M175.602 124.979H208.875V140.397H175.602z"></path>
<path
fill="#007934"
d="M94.312 124.979H127.58500000000001V140.397H94.312z"></path>
<path fill="#007934" d="M134.957 147.298H168.231V162.716H134.957z"></path>
<path fill="#007934" d="M175.602 147.298H208.875V162.716H175.602z"></path>
<path
fill="#007934"
d="M94.312 147.298H127.58500000000001V162.716H94.312z"></path>
<path
fill="#007934"
d="M127.088 116.162h-10.04l-6.262-10.041-6.196 10.041h-9.821l10.656-16.435L95.406 84.04h9.624l5.8 9.932 5.581-9.932h9.909l-10.173 16.369 10.941 15.753z"></path>
</svg>
)
})

View File

@ -0,0 +1,30 @@
import React from "react"
export const PDFIcon = React.forwardRef<
SVGSVGElement,
React.SVGProps<SVGSVGElement>
>((props, ref) => {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
version="1.1"
viewBox="0 0 303.188 303.188"
xmlSpace="preserve"
ref={ref}
{...props}>
<path
fill="#E8E8E8"
d="M219.821 0L32.842 0 32.842 303.188 270.346 303.188 270.346 50.525z"></path>
<path
fill="#FB3449"
d="M230.013 149.935c-3.643-6.493-16.231-8.533-22.006-9.451-4.552-.724-9.199-.94-13.803-.936-3.615-.024-7.177.154-10.693.354-1.296.087-2.579.199-3.861.31a93.594 93.594 0 01-3.813-4.202c-7.82-9.257-14.134-19.755-19.279-30.664 1.366-5.271 2.459-10.772 3.119-16.485 1.205-10.427 1.619-22.31-2.288-32.251-1.349-3.431-4.946-7.608-9.096-5.528-4.771 2.392-6.113 9.169-6.502 13.973-.313 3.883-.094 7.776.558 11.594.664 3.844 1.733 7.494 2.897 11.139a165.324 165.324 0 003.588 9.943 171.593 171.593 0 01-2.63 7.603c-2.152 5.643-4.479 11.004-6.717 16.161l-3.465 7.507c-3.576 7.855-7.458 15.566-11.815 23.02-10.163 3.585-19.283 7.741-26.857 12.625-4.063 2.625-7.652 5.476-10.641 8.603-2.822 2.952-5.69 6.783-5.941 11.024-.141 2.394.807 4.717 2.768 6.137 2.697 2.015 6.271 1.881 9.4 1.225 10.25-2.15 18.121-10.961 24.824-18.387 4.617-5.115 9.872-11.61 15.369-19.465l.037-.054c9.428-2.923 19.689-5.391 30.579-7.205 4.975-.825 10.082-1.5 15.291-1.974 3.663 3.431 7.621 6.555 11.939 9.164 3.363 2.069 6.94 3.816 10.684 5.119 3.786 1.237 7.595 2.247 11.528 2.886 1.986.284 4.017.413 6.092.335 4.631-.175 11.278-1.951 11.714-7.57.134-1.72-.237-3.228-.98-4.55zm-110.869 10.31a170.827 170.827 0 01-6.232 9.041c-4.827 6.568-10.34 14.369-18.322 17.286-1.516.554-3.512 1.126-5.616 1.002-1.874-.11-3.722-.937-3.637-3.065.042-1.114.587-2.535 1.423-3.931.915-1.531 2.048-2.935 3.275-4.226 2.629-2.762 5.953-5.439 9.777-7.918 5.865-3.805 12.867-7.23 20.672-10.286-.449.71-.897 1.416-1.34 2.097zm27.222-84.26a38.169 38.169 0 01-.323-10.503 24.858 24.858 0 011.038-4.952c.428-1.33 1.352-4.576 2.826-4.993 2.43-.688 3.177 4.529 3.452 6.005 1.566 8.396.186 17.733-1.693 25.969-.299 1.31-.632 2.599-.973 3.883a121.219 121.219 0 01-1.648-4.821c-1.1-3.525-2.106-7.091-2.679-10.588zm16.683 66.28a236.508 236.508 0 00-25.979 5.708c.983-.275 5.475-8.788 6.477-10.555 4.721-8.315 8.583-17.042 11.358-26.197 4.9 9.691 10.847 18.962 18.153 27.214.673.749 1.357 1.489 2.053 2.22-4.094.441-8.123.978-12.062 1.61zm61.744 11.694c-.334 1.805-4.189 2.837-5.988 3.121-5.316.836-10.94.167-16.028-1.542-3.491-1.172-6.858-2.768-10.057-4.688-3.18-1.921-6.155-4.181-8.936-6.673 3.429-.206 6.9-.341 10.388-.275 3.488.035 7.003.211 10.475.664 6.511.726 13.807 2.961 18.932 7.186 1.009.833 1.331 1.569 1.214 2.207z"></path>
<path
fill="#FB3449"
d="M227.64 25.263L32.842 25.263 32.842 0 219.821 0z"></path>
<g fill="#A4A9AD">
<path d="M126.841 241.152c0 5.361-1.58 9.501-4.742 12.421-3.162 2.921-7.652 4.381-13.472 4.381h-3.643v15.917H92.022v-47.979h16.606c6.06 0 10.611 1.324 13.652 3.971 3.041 2.647 4.561 6.41 4.561 11.289zm-21.856 6.235h2.363c1.947 0 3.495-.546 4.644-1.641 1.149-1.094 1.723-2.604 1.723-4.529 0-3.238-1.794-4.857-5.382-4.857h-3.348v11.027zM175.215 248.864c0 8.007-2.205 14.177-6.613 18.509s-10.606 6.498-18.591 6.498h-15.523v-47.979h16.606c7.701 0 13.646 1.969 17.836 5.907 4.189 3.938 6.285 9.627 6.285 17.065zm-13.455.46c0-4.398-.87-7.657-2.609-9.78-1.739-2.122-4.381-3.183-7.926-3.183h-3.773v26.877h2.888c3.939 0 6.826-1.143 8.664-3.43 1.837-2.285 2.756-5.78 2.756-10.484zM196.579 273.871h-12.766v-47.979h28.355v10.403h-15.589v9.156h14.374v10.403h-14.374v18.017z"></path>
</g>
<path fill="#D1D3D3" d="M219.821 50.525L270.346 50.525 219.821 0z"></path>
</svg>
)
})

View File

@ -0,0 +1,32 @@
import React from "react"
export const TXTIcon = React.forwardRef<
SVGSVGElement,
React.SVGProps<SVGSVGElement>
>((props, ref) => {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="-4 0 64 64"
ref={ref}
{...props}>
<path
fill="#F9CA06"
fillRule="evenodd"
d="M5.151-.036A5.074 5.074 0 00.077 5.038v53.841a5.073 5.073 0 005.074 5.074h45.774a5.074 5.074 0 005.074-5.074V20.274L37.097-.036H5.151z"
clipRule="evenodd"></path>
<g fillRule="evenodd" clipRule="evenodd">
<path
fill="#F7BC04"
d="M56.008 20.316v1H43.209s-6.312-1.26-6.129-6.708c0 0 .208 5.708 6.004 5.708h12.924z"></path>
<path
fill="#fff"
d="M37.106-.036v14.561c0 1.656 1.104 5.792 6.104 5.792h12.799L37.106-.036z"
opacity="0.5"></path>
</g>
<path
fill="#fff"
d="M18.763 43.045h-3.277v10.047a.734.734 0 01-.756.738.73.73 0 01-.738-.738V43.045h-3.259c-.36 0-.648-.288-.648-.684 0-.36.288-.648.648-.648h8.03c.36 0 .648.288.648.685a.645.645 0 01-.648.647zm11.7 10.803a.64.64 0 01-.541-.27l-3.727-4.97-3.745 4.97a.639.639 0 01-.54.27.71.71 0 01-.72-.72c0-.144.036-.306.144-.432l3.889-5.131-3.619-4.826a.721.721 0 01-.144-.414c0-.343.288-.721.72-.721.216 0 .432.108.576.288l3.439 4.627 3.439-4.646a.642.642 0 01.541-.27c.378 0 .738.306.738.721a.7.7 0 01-.126.414l-3.619 4.808 3.89 5.149c.09.126.126.27.126.415a.739.739 0 01-.721.738zm11.195-10.803h-3.277v10.047a.734.734 0 01-.756.738.73.73 0 01-.738-.738V43.045h-3.259c-.36 0-.648-.288-.648-.684 0-.36.288-.648.648-.648h8.03c.36 0 .648.288.648.685a.644.644 0 01-.648.647z"></path>
</svg>
)
})

View File

@ -4,7 +4,7 @@ import { useLocation, NavLink } from "react-router-dom"
import { Sidebar } from "../Option/Sidebar"
import { Drawer, Select, Tooltip } from "antd"
import { useQuery } from "@tanstack/react-query"
import { getAllModels } from "~/services/ollama"
import { fetchChatModels, getAllModels } from "~/services/ollama"
import { useMessageOption } from "~/hooks/useMessageOption"
import {
ChevronLeft,
@ -15,10 +15,11 @@ import {
SquarePen,
ZapIcon
} from "lucide-react"
import { getAllPrompts } from "~/libs/db"
import { getAllPrompts } from "@/db"
import { ShareBtn } from "~/components/Common/ShareBtn"
import { useTranslation } from "react-i18next"
import { OllamaIcon } from "../Icons/Ollama"
import { SelectedKnowledge } from "../Option/Knowledge/SelectedKnwledge"
export default function OptionLayout({
children
@ -45,7 +46,7 @@ export default function OptionLayout({
isFetching: isModelsFetching
} = useQuery({
queryKey: ["fetchModel"],
queryFn: () => getAllModels({ returnEmpty: true }),
queryFn: () => fetchChatModels({ returnEmpty: true }),
refetchInterval: 15000
})
@ -106,7 +107,10 @@ export default function OptionLayout({
<div>
<Select
value={selectedModel}
onChange={setSelectedModel}
onChange={(e) => {
setSelectedModel(e)
localStorage.setItem("selectedModel", e)
}}
size="large"
loading={isModelsLoading || isModelsFetching}
filterOption={(input, option) =>
@ -166,6 +170,7 @@ export default function OptionLayout({
}))}
/>
</div>
<SelectedKnowledge />
</div>
<div className="flex flex-1 justify-end px-4">
<div className="ml-4 flex items-center md:ml-6">

View File

@ -1,7 +1,15 @@
import { Book, BrainCircuit, Orbit, Share, BlocksIcon , InfoIcon} from "lucide-react"
import {
Book,
BrainCircuit,
Orbit,
Share,
BlocksIcon,
InfoIcon
} from "lucide-react"
import { useTranslation } from "react-i18next"
import { Link, useLocation } from "react-router-dom"
import { OllamaIcon } from "../Icons/Ollama"
import { Tag } from "antd"
function classNames(...classes: string[]) {
return classes.filter(Boolean).join(" ")
@ -9,7 +17,7 @@ function classNames(...classes: string[]) {
const LinkComponent = (item: {
href: string
name: string
name: string | JSX.Element
icon: any
current: string
}) => {
@ -40,12 +48,12 @@ const LinkComponent = (item: {
export const SettingsLayout = ({ children }: { children: React.ReactNode }) => {
const location = useLocation()
const { t } = useTranslation("settings")
const { t } = useTranslation(["settings", "common"])
return (
<>
<div className="mx-auto max-w-7xl lg:flex lg:gap-x-16 lg:px-8">
<aside className="flex lg:rounded-md bg-white lg:p-4 lg:mt-20 overflow-x-auto lg:border-0 border-b py-4 lg:block lg:w-64 lg:flex-none dark:bg-[#171717] dark:border-gray-600">
<aside className="flex lg:rounded-md bg-white lg:p-4 lg:mt-20 overflow-x-auto lg:border-0 border-b py-4 lg:block lg:w-72 lg:flex-none dark:bg-[#171717] dark:border-gray-600">
<nav className="flex-none px-4 sm:px-6 lg:px-0">
<ul
role="list"
@ -68,12 +76,17 @@ export const SettingsLayout = ({ children }: { children: React.ReactNode }) => {
current={location.pathname}
icon={BrainCircuit}
/>
{/* <LinkComponent
<LinkComponent
href="/settings/knowledge"
name={t("manageKnowledge.title")}
name={
<div className="inline-flex items-center gap-2">
{t("manageKnowledge.title")}
<Tag color="yellow">{t("common:beta")}</Tag>
</div>
}
icon={BlocksIcon}
current={location.pathname}
/> */}
/>
<LinkComponent
href="/settings/prompt"
name={t("managePrompts.title")}

View File

@ -0,0 +1,140 @@
import { Source, createKnowledge } from "@/db/knowledge"
import { defaultEmbeddingModelForRag } from "@/services/ollama"
import { convertToSource } from "@/utils/to-source"
import { useMutation } from "@tanstack/react-query"
import { Modal, Form, Input, Upload, message, UploadFile } from "antd"
import { InboxIcon } from "lucide-react"
import { useTranslation } from "react-i18next"
import PubSub from "pubsub-js"
import { KNOWLEDGE_QUEUE } from "@/queue"
type Props = {
open: boolean
setOpen: React.Dispatch<React.SetStateAction<boolean>>
}
export const AddKnowledge = ({ open, setOpen }: Props) => {
const { t } = useTranslation(["knowledge", "common"])
const [form] = Form.useForm()
const onUploadHandler = async (data: {
title: string
file: UploadFile[]
}) => {
const defaultEM = await defaultEmbeddingModelForRag()
if (!defaultEM) {
throw new Error(t("noEmbeddingModel"))
}
const source: Source[] = []
for (const file of data.file) {
const data = await convertToSource(file)
source.push(data)
}
const knowledge = await createKnowledge({
embedding_model: defaultEM,
source,
title: data.title
})
return knowledge.id
}
const { mutate: saveKnowledge, isPending: isSaving } = useMutation({
mutationFn: onUploadHandler,
onError: (error) => {
message.error(error.message)
},
onSuccess: async (id) => {
message.success(t("form.success"))
PubSub.publish(KNOWLEDGE_QUEUE, id)
form.resetFields()
setOpen(false)
}
})
return (
<Modal
title={t("addKnowledge")}
open={open}
footer={null}
onCancel={() => setOpen(false)}>
<Form onFinish={saveKnowledge} form={form} layout="vertical">
<Form.Item
rules={[
{
required: true,
message: t("form.title.required")
}
]}
name="title"
label={t("form.title.label")}>
<Input size="large" placeholder={t("form.title.placeholder")} />
</Form.Item>
<Form.Item
name="file"
label={t("form.uploadFile.label")}
rules={[
{
required: true,
message: t("form.uploadFile.required")
}
]}
getValueFromEvent={(e) => {
if (Array.isArray(e)) {
return e
}
return e?.fileList
}}>
<Upload.Dragger
accept={".pdf, .csv, .txt, .md"}
multiple={true}
maxCount={10}
beforeUpload={(file) => {
const allowedTypes = [
"application/pdf",
// "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"text/csv",
"text/plain"
]
.map((type) => type.toLowerCase())
.join(", ")
if (!allowedTypes.includes(file.type.toLowerCase())) {
message.error(
t("form.uploadFile.uploadError", { allowedTypes })
)
return Upload.LIST_IGNORE
}
return false
}}>
<div className="p-3">
<p className="flex justify-center ant-upload-drag-icon">
<InboxIcon className="w-10 h-10 text-gray-400" />
</p>
<p className="ant-upload-text">
{t("form.uploadFile.uploadText")}
</p>
<p className="ant-upload-hint">
{t("form.uploadFile.uploadHint")}
</p>
</div>
</Upload.Dragger>
</Form.Item>
<Form.Item>
<button
type="submit"
disabled={isSaving}
className="inline-flex items-center justify-center w-full px-2 py-2 font-medium leading-4 text-center text-white bg-black border border-transparent rounded-md shadow-sm text-md hover:bg-gray-800 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50">
{t("form.submit")}
</button>
</Form.Item>
</Form>
</Modal>
)
}

View File

@ -0,0 +1,18 @@
import { CSVIcon } from "@/components/Icons/CSVIcon"
import { PDFIcon } from "@/components/Icons/PDFIcon"
import { TXTIcon } from "@/components/Icons/TXTIcon"
type Props = {
type: string
className?: string
}
export const KnowledgeIcon = ({ type, className = "w-6 h-6" }: Props) => {
if (type === "pdf" || type === "application/pdf") {
return <PDFIcon className={className} />
} else if (type === "csv" || type === "text/csv") {
return <CSVIcon className={className} />
} else if (type === "txt" || type === "text/plain") {
return <TXTIcon className={className} />
}
}

View File

@ -0,0 +1,64 @@
import { getAllKnowledge } from "@/db/knowledge"
import { useMessageOption } from "@/hooks/useMessageOption"
import { useQuery } from "@tanstack/react-query"
import { Dropdown, Tooltip } from "antd"
import { Blocks } from "lucide-react"
import React from "react"
import { useTranslation } from "react-i18next"
export const KnowledgeSelect: React.FC = () => {
const { t } = useTranslation("playground")
const { setSelectedKnowledge, selectedKnowledge } = useMessageOption()
const { data } = useQuery({
queryKey: ["getAllKnowledge"],
queryFn: async () => {
const data = await getAllKnowledge("finished")
return data
},
refetchInterval: 1000
})
return (
<>
{data && data.length > 0 && (
<Dropdown
menu={{
items:
data?.map((d) => ({
key: d.id,
label: (
<div className="w-52 gap-2 text-lg truncate inline-flex line-clamp-3 items-center dark:border-gray-700">
<div>
<Blocks className="h-6 w-6 text-gray-400" />
</div>
{d.title}
</div>
),
onClick: () => {
const knowledge = data?.find((k) => k.id === d.id)
if (selectedKnowledge?.id === d.id) {
setSelectedKnowledge(null)
} else {
setSelectedKnowledge(knowledge)
}
}
})) || [],
style: {
maxHeight: 500,
overflowY: "scroll"
},
className: "no-scrollbar",
activeKey: selectedKnowledge?.id
}}
placement={"topLeft"}
trigger={["click"]}>
<Tooltip title={t("tooltip.knowledge")}>
<button type="button" className="dark:text-gray-300">
<Blocks className="h-6 w-6" />
</button>
</Tooltip>
</Dropdown>
)}
</>
)
}

View File

@ -0,0 +1,32 @@
import { Blocks, XIcon } from "lucide-react"
import { useMessageOption } from "@/hooks/useMessageOption"
export const SelectedKnowledge = () => {
const { selectedKnowledge: knowledge, setSelectedKnowledge } =
useMessageOption()
if (!knowledge) return <></>
return (
<div className="flex flex-row items-center gap-3">
<span className="text-lg font-thin text-zinc-300 dark:text-zinc-600">
{"/"}
</span>
<div className="border flex justify-between items-center rounded-full px-2 py-1 gap-2 bg-gray-100 dark:bg-slate-800 dark:border-slate-700">
<div className="inline-flex items-center gap-2">
<Blocks className="h-5 w-5 text-gray-400" />
<span className="text-xs font-semibold dark:text-gray-100">
{knowledge.title}
</span>
</div>
<div>
<button
onClick={() => setSelectedKnowledge(null)}
className="flex items-center justify-center bg-white dark:bg-[#262626] p-1 rounded-full hover:bg-gray-100 dark:hover:bg-gray-600 text-black dark:text-gray-100">
<XIcon className="h-3 w-3" />
</button>
</div>
</div>
</div>
)
}

View File

@ -0,0 +1,141 @@
import { useState } from "react"
import { useTranslation } from "react-i18next"
import { AddKnowledge } from "./AddKnowledge"
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
import { deleteKnowledge, getAllKnowledge } from "@/db/knowledge"
import { Skeleton, Table, Tag, Tooltip, message } from "antd"
import { Trash2 } from "lucide-react"
import { KnowledgeIcon } from "./KnowledgeIcon"
import { useMessageOption } from "@/hooks/useMessageOption"
export const KnowledgeSettings = () => {
const { t } = useTranslation(["knowledge", "common"])
const [open, setOpen] = useState(false)
const queryClient = useQueryClient()
const { selectedKnowledge, setSelectedKnowledge } = useMessageOption()
const { data, status } = useQuery({
queryKey: ["fetchAllKnowledge"],
queryFn: () => getAllKnowledge(),
refetchInterval: 1000
})
const { mutate: deleteKnowledgeMutation, isPending: isDeleting } =
useMutation({
mutationFn: deleteKnowledge,
onSuccess: () => {
queryClient.invalidateQueries({
queryKey: ["fetchAllKnowledge"]
})
message.success(t("deleteSuccess"))
},
onError: (error) => {
message.error(error.message)
}
})
const statusColor = {
finished: "green",
processing: "blue",
pending: "gray"
}
return (
<div>
<div>
{/* Add new model button */}
<div className="mb-6">
<div className="-ml-4 -mt-2 flex flex-wrap items-center justify-end sm:flex-nowrap">
<div className="ml-4 mt-2 flex-shrink-0">
<button
onClick={() => setOpen(true)}
className="inline-flex items-center rounded-md border border-transparent bg-black px-2 py-2 text-md font-medium leading-4 text-white shadow-sm hover:bg-gray-800 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100 dark:focus:ring-gray-500 dark:focus:ring-offset-gray-100 disabled:opacity-50">
{t("addBtn")}
</button>
</div>
</div>
</div>
{status === "pending" && <Skeleton paragraph={{ rows: 8 }} />}
{status === "success" && (
<Table
columns={[
{
title: t("columns.title"),
dataIndex: "title",
key: "title"
},
{
title: t("columns.status"),
dataIndex: "status",
key: "status",
render: (text: string) => (
<Tag color={statusColor[text]}>{t(`status.${text}`)}</Tag>
)
},
{
title: t("columns.embeddings"),
dataIndex: "embedding_model",
key: "embedding_model"
},
{
title: t("columns.createdAt"),
dataIndex: "createdAt",
key: "createdAt",
render: (text: number) => new Date(text).toLocaleString()
},
{
title: t("columns.action"),
key: "action",
render: (text: string, record: any) => (
<div className="flex gap-4">
<Tooltip title={t("tooltip.delete")}>
<button
disabled={isDeleting}
onClick={() => {
if (window.confirm(t("confirm.delete"))) {
deleteKnowledgeMutation(record.id)
if (selectedKnowledge?.id === record?.id) {
setSelectedKnowledge(null)
}
}
}}
className="text-red-500 dark:text-red-400">
<Trash2 className="w-5 h-5" />
</button>
</Tooltip>
</div>
)
}
]}
expandable={{
expandedRowRender: (record) => (
<Table
pagination={false}
columns={[
{
title: t("expandedColumns.name"),
key: "filename",
dataIndex: "filename"
}
]}
dataSource={record.source}
locale={{
emptyText: t("common:noData")
}}
/>
),
defaultExpandAllRows: false
}}
bordered
dataSource={data}
rowKey={(record) => `${record.name}-${record.id}`}
/>
)}
</div>
<AddKnowledge open={open} setOpen={setOpen} />
</div>
)
}

View File

@ -8,6 +8,7 @@ import { useState } from "react"
import { useForm } from "@mantine/form"
import { Download, RotateCcw, Trash2 } from "lucide-react"
import { useTranslation } from "react-i18next"
import { useStorage } from "@plasmohq/storage/hook"
dayjs.extend(relativeTime)
@ -15,6 +16,7 @@ export const ModelsBody = () => {
const queryClient = useQueryClient()
const [open, setOpen] = useState(false)
const { t } = useTranslation(["settings", "common"])
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const form = useForm({
initialValues: {
@ -131,6 +133,12 @@ export const ModelsBody = () => {
window.confirm(t("manageModels.confirm.delete"))
) {
deleteOllamaModel(record.model)
if (
selectedModel &&
selectedModel === record.model
) {
setSelectedModel(null)
}
}
}}
className="text-red-500 dark:text-red-400">
@ -193,8 +201,7 @@ export const ModelsBody = () => {
}}
/>
),
defaultExpandAllRows: false,
defaultExpandAllRows: false
}}
bordered
dataSource={data}

View File

@ -1,15 +1,21 @@
import React from "react"
import { PlaygroundForm } from "./PlaygroundForm"
import { PlaygroundChat } from "./PlaygroundChat"
import { useMessageOption } from "@/hooks/useMessageOption"
export const Playground = () => {
const drop = React.useRef<HTMLDivElement>(null)
const [dropedFile, setDropedFile] = React.useState<File | undefined>()
const { selectedKnowledge } = useMessageOption()
const [dropState, setDropState] = React.useState<
"idle" | "dragging" | "error"
>("idle")
React.useEffect(() => {
if (selectedKnowledge) {
return
}
if (!drop.current) {
return
}
@ -64,7 +70,7 @@ export const Playground = () => {
drop.current.removeEventListener("dragleave", handleDragLeave)
}
}
}, [])
}, [selectedKnowledge])
return (
<div
ref={drop}

View File

@ -2,6 +2,7 @@ import React from "react"
import { useMessageOption } from "~/hooks/useMessageOption"
import { PlaygroundEmpty } from "./PlaygroundEmpty"
import { PlaygroundMessage } from "~/components/Common/Playground/Message"
import { MessageSourcePopup } from "@/components/Common/Playground/MessageSourcePopup"
export const PlaygroundChat = () => {
const {
@ -9,44 +10,60 @@ export const PlaygroundChat = () => {
streaming,
regenerateLastMessage,
isSearchingInternet,
editMessage
editMessage,
ttsEnabled
} = useMessageOption()
const divRef = React.useRef<HTMLDivElement>(null)
const [isSourceOpen, setIsSourceOpen] = React.useState(false)
const [source, setSource] = React.useState<any>(null)
React.useEffect(() => {
if (divRef.current) {
divRef.current.scrollIntoView({ behavior: "smooth" })
}
})
return (
<div className="grow flex flex-col md:translate-x-0 transition-transform duration-300 ease-in-out">
{messages.length === 0 && (
<div className="mt-32">
<PlaygroundEmpty />
</div>
)}
{/* {messages.length > 0 && <div className="w-full h-16 flex-shrink-0"></div>} */}
{messages.map((message, index) => (
<PlaygroundMessage
key={index}
isBot={message.isBot}
message={message.message}
name={message.name}
images={message.images || []}
currentMessageIndex={index}
totalMessages={messages.length}
onRengerate={regenerateLastMessage}
isProcessing={streaming}
isSearchingInternet={isSearchingInternet}
sources={message.sources}
onEditFormSubmit={(value) => {
editMessage(index, value, !message.isBot)
}}
/>
))}
{messages.length > 0 && (
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>
)}
<div ref={divRef} />
</div>
<>
{" "}
<div className="grow flex flex-col md:translate-x-0 transition-transform duration-300 ease-in-out">
{messages.length === 0 && (
<div className="mt-32">
<PlaygroundEmpty />
</div>
)}
{/* {messages.length > 0 && <div className="w-full h-16 flex-shrink-0"></div>} */}
{messages.map((message, index) => (
<PlaygroundMessage
key={index}
isBot={message.isBot}
message={message.message}
name={message.name}
images={message.images || []}
currentMessageIndex={index}
totalMessages={messages.length}
onRengerate={regenerateLastMessage}
isProcessing={streaming}
isSearchingInternet={isSearchingInternet}
sources={message.sources}
onEditFormSubmit={(value) => {
editMessage(index, value, !message.isBot)
}}
onSourceClick={(data) => {
setSource(data)
setIsSourceOpen(true)
}}
isTTSEnabled={ttsEnabled}
/>
))}
{messages.length > 0 && (
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>
)}
<div ref={divRef} />
</div>
<MessageSourcePopup
open={isSourceOpen}
setOpen={setIsSourceOpen}
source={source}
/>
</>
)
}

View File

@ -4,7 +4,7 @@ import React from "react"
import useDynamicTextareaSize from "~/hooks/useDynamicTextareaSize"
import { toBase64 } from "~/libs/to-base64"
import { useMessageOption } from "~/hooks/useMessageOption"
import { Checkbox, Dropdown, Switch, Tooltip } from "antd"
import { Checkbox, Dropdown, Select, Switch, Tooltip } from "antd"
import { Image } from "antd"
import { useSpeechRecognition } from "~/hooks/useSpeechRecognition"
import { useWebUI } from "~/store/webui"
@ -12,6 +12,8 @@ import { defaultEmbeddingModelForRag } from "~/services/ollama"
import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react"
import { getVariable } from "~/utils/select-varaible"
import { useTranslation } from "react-i18next"
import { KnowledgeSelect } from "../Knowledge/KnowledgeSelect"
import { SelectedKnowledge } from "../Knowledge/SelectedKnwledge"
type Props = {
dropedFile: File | undefined
@ -32,7 +34,8 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
setWebSearch,
selectedQuickPrompt,
textareaRef,
setSelectedQuickPrompt
setSelectedQuickPrompt,
selectedKnowledge
} = useMessageOption()
const textAreaFocus = () => {
@ -224,31 +227,34 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
/>
<div className="mt-4 flex justify-between items-center">
<div className="flex">
<Tooltip title={t("tooltip.searchInternet")}>
<div className="inline-flex items-center gap-2">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
strokeWidth={1.5}
stroke="currentColor"
className="w-5 h-5 dark:text-gray-300">
<path
strokeLinecap="round"
strokeLinejoin="round"
d="M12 21a9.004 9.004 0 0 0 8.716-6.747M12 21a9.004 9.004 0 0 1-8.716-6.747M12 21c2.485 0 4.5-4.03 4.5-9S14.485 3 12 3m0 18c-2.485 0-4.5-4.03-4.5-9S9.515 3 12 3m0 0a8.997 8.997 0 0 1 7.843 4.582M12 3a8.997 8.997 0 0 0-7.843 4.582m15.686 0A11.953 11.953 0 0 1 12 10.5c-2.998 0-5.74-1.1-7.843-2.918m15.686 0A8.959 8.959 0 0 1 21 12c0 .778-.099 1.533-.284 2.253m0 0A17.919 17.919 0 0 1 12 16.5c-3.162 0-6.133-.815-8.716-2.247m0 0A9.015 9.015 0 0 1 3 12c0-1.605.42-3.113 1.157-4.418"
{!selectedKnowledge && (
<Tooltip title={t("tooltip.searchInternet")}>
<div className="inline-flex items-center gap-2">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
strokeWidth={1.5}
stroke="currentColor"
className="w-5 h-5 dark:text-gray-300">
<path
strokeLinecap="round"
strokeLinejoin="round"
d="M12 21a9.004 9.004 0 0 0 8.716-6.747M12 21a9.004 9.004 0 0 1-8.716-6.747M12 21c2.485 0 4.5-4.03 4.5-9S14.485 3 12 3m0 18c-2.485 0-4.5-4.03-4.5-9S9.515 3 12 3m0 0a8.997 8.997 0 0 1 7.843 4.582M12 3a8.997 8.997 0 0 0-7.843 4.582m15.686 0A11.953 11.953 0 0 1 12 10.5c-2.998 0-5.74-1.1-7.843-2.918m15.686 0A8.959 8.959 0 0 1 21 12c0 .778-.099 1.533-.284 2.253m0 0A17.919 17.919 0 0 1 12 16.5c-3.162 0-6.133-.815-8.716-2.247m0 0A9.015 9.015 0 0 1 3 12c0-1.605.42-3.113 1.157-4.418"
/>
</svg>
<Switch
value={webSearch}
onChange={(e) => setWebSearch(e)}
checkedChildren={t("form.webSearch.on")}
unCheckedChildren={t("form.webSearch.off")}
/>
</svg>
<Switch
value={webSearch}
onChange={(e) => setWebSearch(e)}
checkedChildren={t("form.webSearch.on")}
unCheckedChildren={t("form.webSearch.off")}
/>
</div>
</Tooltip>
</div>
</Tooltip>
)}
</div>
<div className="flex !justify-end gap-3">
<KnowledgeSelect />
<Tooltip title={t("tooltip.speechToText")}>
<button
type="button"
@ -273,18 +279,21 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
)}
</button>
</Tooltip>
<Tooltip title={t("tooltip.uploadImage")}>
<button
type="button"
onClick={() => {
inputRef.current?.click()
}}
className={`flex items-center justify-center dark:text-gray-300 ${
chatMode === "rag" ? "hidden" : "block"
}`}>
<ImageIcon className="h-5 w-5" />
</button>
</Tooltip>
{!selectedKnowledge && (
<Tooltip title={t("tooltip.uploadImage")}>
<button
type="button"
onClick={() => {
inputRef.current?.click()
}}
className={`flex items-center justify-center dark:text-gray-300 ${
chatMode === "rag" ? "hidden" : "block"
}`}>
<ImageIcon className="h-5 w-5" />
</button>
</Tooltip>
)}
{!isSending ? (
<Dropdown.Button
htmlType="submit"

View File

@ -8,7 +8,6 @@ import {
Input,
Form,
Switch,
Empty
} from "antd"
import { Trash2, Pen, Computer, Zap } from "lucide-react"
import { useState } from "react"
@ -18,7 +17,7 @@ import {
getAllPrompts,
savePrompt,
updatePrompt
} from "~/libs/db"
} from "@/db"
export const PromptBody = () => {
const queryClient = useQueryClient()

View File

@ -3,6 +3,7 @@ import { useTranslation } from "react-i18next"
import { useQuery } from "@tanstack/react-query"
import { Skeleton } from "antd"
import { cleanUrl } from "@/libs/clean-url"
import { Descriptions } from "antd"
export const AboutApp = () => {
const { t } = useTranslation("settings")
@ -41,37 +42,23 @@ export const AboutApp = () => {
{status === "pending" && <Skeleton paragraph={{ rows: 4 }} active />}
{status === "success" && (
<div className="flex flex-col space-y-4">
<div>
<div>
<h2 className="text-base font-semibold leading-7 text-gray-900 dark:text-white">
{t("about.heading")}
</h2>
<div className="border border-b border-gray-200 dark:border-gray-600 mt-3 mb-6"></div>
</div>
</div>
<div>
<div className="flex flex-col space-y-6">
<div className="flex gap-6">
<span className="text-sm text-gray-500 dark:text-gray-400">
{t("about.chromeVersion")}
</span>
<span className="text-sm text-gray-900 dark:text-white">
{data.chromeVersion}
</span>
</div>
<div className="flex gap-6">
<span className="text-sm text-gray-500 dark:text-gray-400">
{t("about.ollamaVersion")}
</span>
<span className="text-sm text-gray-900 dark:text-white">
{data.ollama}
</span>
</div>
</div>
</div>
<Descriptions
title={t("about.heading")}
column={1}
size="middle"
items={[
{
key: 1,
label: t("about.chromeVersion"),
children: data.chromeVersion
},
{
key: 1,
label: t("about.ollamaVersion"),
children: data.ollama
}
]}
/>
<div>
<p className="text-sm text-gray-500 dark:text-gray-400 mb-4">
{t("about.support")}

View File

@ -1,13 +1,14 @@
import { useQueryClient } from "@tanstack/react-query"
import { useDarkMode } from "~/hooks/useDarkmode"
import { useMessageOption } from "~/hooks/useMessageOption"
import { PageAssitDatabase } from "~/libs/db"
import { PageAssitDatabase } from "@/db"
import { Select } from "antd"
import { SUPPORTED_LANGUAGES } from "~/utils/supporetd-languages"
import { MoonIcon, SunIcon } from "lucide-react"
import { SearchModeSettings } from "./search-mode"
import { useTranslation } from "react-i18next"
import { useI18n } from "@/hooks/useI18n"
import { TTSModeSettings } from "./tts-mode"
export const SettingOther = () => {
const { clearChat, speechToTextLanguage, setSpeechToTextLanguage } =
@ -89,6 +90,7 @@ export const SettingOther = () => {
</button>
</div>
<SearchModeSettings />
<TTSModeSettings />
<div>
<div className="mb-5">
<h2 className="text-base font-semibold leading-7 text-gray-900 dark:text-white">

View File

@ -1,22 +1,20 @@
import { useQuery, useQueryClient } from "@tanstack/react-query"
import { Skeleton, Radio, Form, Alert } from "antd"
import { Skeleton, Radio, Form, Input } from "antd"
import React from "react"
import { useTranslation } from "react-i18next"
import { SaveButton } from "~/components/Common/SaveButton"
import {
getWebSearchPrompt,
setSystemPromptForNonRagOption,
systemPromptForNonRagOption,
geWebSearchFollowUpPrompt,
setWebPrompts
setWebPrompts,
promptForRag,
setPromptForRag
} from "~/services/ollama"
export const SettingPrompt = () => {
const { t } = useTranslation("settings")
const [selectedValue, setSelectedValue] = React.useState<"normal" | "web">(
"web"
)
const [selectedValue, setSelectedValue] = React.useState<"web" | "rag">("rag")
const queryClient = useQueryClient()
@ -25,7 +23,7 @@ export const SettingPrompt = () => {
queryFn: async () => {
const [prompt, webSearchPrompt, webSearchFollowUpPrompt] =
await Promise.all([
systemPromptForNonRagOption(),
promptForRag(),
getWebSearchPrompt(),
geWebSearchFollowUpPrompt()
])
@ -48,46 +46,60 @@ export const SettingPrompt = () => {
<Radio.Group
defaultValue={selectedValue}
onChange={(e) => setSelectedValue(e.target.value)}>
<Radio.Button value="normal">
{t("ollamaSettings.settings.prompt.option1")}
</Radio.Button>
<Radio.Button value="rag">RAG</Radio.Button>
<Radio.Button value="web">
{t("ollamaSettings.settings.prompt.option2")}
</Radio.Button>
</Radio.Group>
</div>
{selectedValue === "normal" && (
{selectedValue === "rag" && (
<Form
layout="vertical"
onFinish={(values) => {
setSystemPromptForNonRagOption(values?.prompt || "")
// setSystemPromptForNonRagOption(values?.prompt || "")
setPromptForRag(
values?.systemPrompt || "",
values?.questionPrompt || ""
)
queryClient.invalidateQueries({
queryKey: ["fetchOllaPrompt"]
})
}}
initialValues={{
prompt: data.prompt
systemPrompt: data.prompt.ragPrompt,
questionPrompt: data.prompt.ragQuestionPrompt
}}>
<Form.Item>
<Alert
message={t("ollamaSettings.settings.prompt.alert")}
type="warning"
showIcon
closable
<Form.Item
label={t("managePrompts.systemPrompt")}
name="systemPrompt"
rules={[
{
required: true,
message: "Enter a prompt."
}
]}>
<Input.TextArea
value={data.webSearchPrompt}
rows={5}
placeholder="Enter a prompt."
/>
</Form.Item>
<Form.Item
label={t("ollamaSettings.settings.prompt.systemPrompt")}
name="prompt">
<textarea
value={data.prompt}
label={t("managePrompts.questionPrompt")}
name="questionPrompt"
rules={[
{
required: true,
message: "Enter a follow up prompt."
}
]}>
<Input.TextArea
value={data.webSearchFollowUpPrompt}
rows={5}
id="ollamaPrompt"
placeholder={t(
"ollamaSettings.settings.prompt.systemPromptPlaceholder"
"ollamaSettings.settings.prompt.webSearchFollowUpPromptPlaceholder"
)}
className="w-full p-2 border border-gray-300 rounded-md dark:bg-[#262626] dark:text-gray-100"
/>
</Form.Item>
<Form.Item>
@ -126,32 +138,36 @@ export const SettingPrompt = () => {
)
}
]}>
<textarea
<Input.TextArea
value={data.webSearchPrompt}
rows={5}
id="ollamaWebSearchPrompt"
placeholder={t(
"ollamaSettings.settings.prompt.webSearchPromptPlaceholder"
)}
className="w-full p-2 border border-gray-300 rounded-md dark:bg-[#262626] dark:text-gray-100"
/>
</Form.Item>
<Form.Item
label={t("ollamaSettings.settings.prompt.webSearchFollowUpPrompt")}
label={t(
"ollamaSettings.settings.prompt.webSearchFollowUpPrompt"
)}
name="webSearchFollowUpPrompt"
help={t("ollamaSettings.settings.prompt.webSearchFollowUpPromptHelp")}
help={t(
"ollamaSettings.settings.prompt.webSearchFollowUpPromptHelp"
)}
rules={[
{
required: true,
message: t("ollamaSettings.settings.prompt.webSearchFollowUpPromptError")
message: t(
"ollamaSettings.settings.prompt.webSearchFollowUpPromptError"
)
}
]}>
<textarea
<Input.TextArea
value={data.webSearchFollowUpPrompt}
rows={5}
id="ollamaWebSearchFollowUpPrompt"
placeholder={t("ollamaSettings.settings.prompt.webSearchFollowUpPromptPlaceholder")}
className="w-full p-2 border border-gray-300 rounded-md dark:bg-[#262626] dark:text-gray-100"
placeholder={t(
"ollamaSettings.settings.prompt.webSearchFollowUpPromptPlaceholder"
)}
/>
</Form.Item>
<Form.Item>

View File

@ -44,43 +44,50 @@ export const SearchModeSettings = () => {
await setSearchSettings(values)
})}
className="space-y-4">
<div className="flex flex-row justify-between">
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.webSearch.provider.label")}
</span>
<Select
placeholder={t("generalSettings.webSearch.provider.placeholder")}
showSearch
style={{ width: "200px" }}
options={SUPPORTED_SERACH_PROVIDERS}
filterOption={(input, option) =>
option!.label.toLowerCase().indexOf(input.toLowerCase()) >= 0 ||
option!.value.toLowerCase().indexOf(input.toLowerCase()) >= 0
}
{...form.getInputProps("searchProvider")}
/>
<div>
<Select
placeholder={t("generalSettings.webSearch.provider.placeholder")}
showSearch
className="w-full mt-4 sm:mt-0 sm:w-[200px]"
options={SUPPORTED_SERACH_PROVIDERS}
filterOption={(input, option) =>
option!.label.toLowerCase().indexOf(input.toLowerCase()) >= 0 ||
option!.value.toLowerCase().indexOf(input.toLowerCase()) >= 0
}
{...form.getInputProps("searchProvider")}
/>
</div>
</div>
<div className="flex flex-row justify-between">
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.webSearch.searchMode.label")}
</span>
<Switch
{...form.getInputProps("isSimpleInternetSearch", {
type: "checkbox"
})}
/>
<div>
<Switch
className="mt-4 sm:mt-0"
{...form.getInputProps("isSimpleInternetSearch", {
type: "checkbox"
})}
/>
</div>
</div>
<div className="flex flex-row justify-between">
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.webSearch.totalSearchResults.label")}
</span>
<InputNumber
placeholder={t(
"generalSettings.webSearch.totalSearchResults.placeholder"
)}
{...form.getInputProps("totalSearchResults")}
style={{ width: "200px" }}
/>
<div>
<InputNumber
placeholder={t(
"generalSettings.webSearch.totalSearchResults.placeholder"
)}
{...form.getInputProps("totalSearchResults")}
className="!w-full mt-4 sm:mt-0 sm:w-[200px]"
/>
</div>
</div>
<div className="flex justify-end">

View File

@ -0,0 +1,116 @@
import { SaveButton } from "@/components/Common/SaveButton"
import { getTTSSettings, setTTSSettings } from "@/services/tts"
import { useWebUI } from "@/store/webui"
import { useForm } from "@mantine/form"
import { useQuery } from "@tanstack/react-query"
import { Select, Skeleton, Switch } from "antd"
import { useTranslation } from "react-i18next"
export const TTSModeSettings = ({ hideBorder }: { hideBorder?: boolean }) => {
const { t } = useTranslation("settings")
const { setTTSEnabled } = useWebUI()
const form = useForm({
initialValues: {
ttsEnabled: false,
ttsProvider: "",
voice: "",
ssmlEnabled: false
}
})
const { status, data } = useQuery({
queryKey: ["fetchTTSSettings"],
queryFn: async () => {
const data = await getTTSSettings()
form.setValues(data)
return data
}
})
if (status === "pending" || status === "error") {
return <Skeleton active />
}
return (
<div>
<div className="mb-5">
<h2 className="text-base font-semibold leading-7 text-gray-900 dark:text-white">
{t("generalSettings.tts.heading")}
</h2>
{!hideBorder && (
<div className="border border-b border-gray-200 dark:border-gray-600 mt-3"></div>
)}
</div>
<form
onSubmit={form.onSubmit(async (values) => {
await setTTSSettings(values)
setTTSEnabled(values.ttsEnabled)
})}
className="space-y-4">
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.tts.ttsEnabled.label")}
</span>
<div>
<Switch
className="mt-4 sm:mt-0"
{...form.getInputProps("ttsEnabled", {
type: "checkbox"
})}
/>
</div>
</div>
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.tts.ttsProvider.label")}
</span>
<div>
<Select
placeholder={t("generalSettings.tts.ttsProvider.placeholder")}
className="w-full mt-4 sm:mt-0 sm:w-[200px]"
options={[{ label: "Browser TTS", value: "browser" }]}
{...form.getInputProps("ttsProvider")}
/>
</div>
</div>
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.tts.ttsVoice.label")}
</span>
<div>
<Select
placeholder={t("generalSettings.tts.ttsVoice.placeholder")}
className="w-full mt-4 sm:mt-0 sm:w-[200px]"
options={data?.browserTTSVoices?.map(
(voice) =>
({
label: `${voice.voiceName} - ${voice.lang}`.trim(),
value: voice.voiceName
}) || []
)}
{...form.getInputProps("voice")}
/>
</div>
</div>
<div className="flex sm:flex-row flex-col space-y-4 sm:space-y-0 sm:justify-between">
<span className="text-gray-500 dark:text-neutral-50 ">
{t("generalSettings.tts.ssmlEnabled.label")}
</span>
<div>
<Switch
className="mt-4 sm:mt-0"
{...form.getInputProps("ssmlEnabled", {
type: "checkbox"
})}
/>
</div>
</div>
<div className="flex justify-end">
<SaveButton btnType="submit" />
</div>
</form>
</div>
)
}

View File

@ -3,7 +3,7 @@ import { Form, Input, Skeleton, Table, Tooltip, message } from "antd"
import { Trash2 } from "lucide-react"
import { Trans, useTranslation } from "react-i18next"
import { SaveButton } from "~/components/Common/SaveButton"
import { deleteWebshare, getAllWebshares, getUserId } from "~/libs/db"
import { deleteWebshare, getAllWebshares, getUserId } from "@/db"
import { getPageShareUrl, setPageShareUrl } from "~/services/ollama"
import { verifyPageShareURL } from "~/utils/verify-page-share"

View File

@ -5,7 +5,7 @@ import {
formatToMessage,
deleteByHistoryId,
updateHistory
} from "~/libs/db"
} from "@/db"
import { Empty, Skeleton } from "antd"
import { useMessageOption } from "~/hooks/useMessageOption"
import { PencilIcon, Trash2 } from "lucide-react"

View File

@ -2,10 +2,12 @@ import React from "react"
import { PlaygroundMessage } from "~/components/Common/Playground/Message"
import { useMessage } from "~/hooks/useMessage"
import { EmptySidePanel } from "../Chat/empty"
import { useWebUI } from "@/store/webui"
export const SidePanelBody = () => {
const { messages, streaming } = useMessage()
const divRef = React.useRef<HTMLDivElement>(null)
const { ttsEnabled } = useWebUI()
React.useEffect(() => {
if (divRef.current) {
divRef.current.scrollIntoView({ behavior: "smooth" })
@ -16,7 +18,7 @@ export const SidePanelBody = () => {
{messages.length === 0 && <EmptySidePanel />}
{messages.map((message, index) => (
<PlaygroundMessage
onEditFormSubmit={(value) => {}}
onEditFormSubmit={(value) => {}}
key={index}
isBot={message.isBot}
message={message.message}
@ -27,6 +29,7 @@ export const SidePanelBody = () => {
onRengerate={() => {}}
isProcessing={streaming}
hideEditAndRegenerate
isTTSEnabled={ttsEnabled}
/>
))}
<div className="w-full h-32 md:h-48 flex-shrink-0"></div>

View File

@ -8,7 +8,8 @@ import {
getAllModels,
getOllamaURL,
isOllamaRunning,
setOllamaURL as saveOllamaURL
setOllamaURL as saveOllamaURL,
fetchChatModels
} from "~/services/ollama"
export const EmptySidePanel = () => {
@ -24,7 +25,7 @@ export const EmptySidePanel = () => {
queryFn: async () => {
const ollamaURL = await getOllamaURL()
const isOk = await isOllamaRunning()
const models = await getAllModels({ returnEmpty: false })
const models = await fetchChatModels({ returnEmpty: false })
return {
isOk,
@ -96,6 +97,7 @@ export const EmptySidePanel = () => {
<Select
onChange={(e) => {
setSelectedModel(e)
localStorage.setItem("selectedModel", e)
}}
value={selectedModel}
size="large"
@ -134,11 +136,11 @@ export const EmptySidePanel = () => {
viewBox="0 0 20 20"
fill="currentColor"
stroke="currentColor"
stroke-width="1">
strokeWidth="1">
<path
fill-rule="evenodd"
fillRule="evenodd"
d="M16.707 5.293a1 1 0 010 1.414l-8 8a1 1 0 01-1.414 0l-4-4a1 1 0 011.414-1.414L8 12.586l7.293-7.293a1 1 0 011.414 0z"
clip-rule="evenodd"></path>
clipRule="evenodd"></path>
</svg>
</span>
</label>

View File

@ -8,7 +8,7 @@ import { Checkbox, Dropdown, Image, Tooltip } from "antd"
import { useSpeechRecognition } from "~/hooks/useSpeechRecognition"
import { useWebUI } from "~/store/webui"
import { defaultEmbeddingModelForRag } from "~/services/ollama"
import { ImageIcon, MicIcon, X } from "lucide-react"
import { ImageIcon, MicIcon, StopCircleIcon, X } from "lucide-react"
import { useTranslation } from "react-i18next"
type Props = {
@ -56,8 +56,13 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
useDynamicTextareaSize(textareaRef, form.values.message, 120)
const { onSubmit, selectedModel, chatMode, speechToTextLanguage } =
useMessage()
const {
onSubmit,
selectedModel,
chatMode,
speechToTextLanguage,
stopStreamingRequest
} = useMessage()
const { isListening, start, stop, transcript } = useSpeechRecognition()
React.useEffect(() => {
@ -217,59 +222,70 @@ export const SidepanelForm = ({ dropedFile }: Props) => {
<ImageIcon className="h-5 w-5" />
</button>
</Tooltip>
<Dropdown.Button
htmlType="submit"
disabled={isSending}
className="!justify-end !w-auto"
icon={
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
strokeWidth={1.5}
stroke="currentColor"
className="w-5 h-5">
<path
strokeLinecap="round"
strokeLinejoin="round"
d="m19.5 8.25-7.5 7.5-7.5-7.5"
/>
</svg>
}
menu={{
items: [
{
key: 1,
label: (
<Checkbox
checked={sendWhenEnter}
onChange={(e) =>
setSendWhenEnter(e.target.checked)
}>
{t("sendWhenEnter")}
</Checkbox>
)
}
]
}}>
<div className="inline-flex gap-2">
{sendWhenEnter ? (
{!isSending ? (
<Dropdown.Button
htmlType="submit"
disabled={isSending}
className="!justify-end !w-auto"
icon={
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
strokeWidth={1.5}
stroke="currentColor"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
className="h-5 w-5"
viewBox="0 0 24 24">
<path d="M9 10L4 15 9 20"></path>
<path d="M20 4v7a4 4 0 01-4 4H4"></path>
className="w-5 h-5">
<path
strokeLinecap="round"
strokeLinejoin="round"
d="m19.5 8.25-7.5 7.5-7.5-7.5"
/>
</svg>
) : null}
{t("common:submit")}
</div>
</Dropdown.Button>
}
menu={{
items: [
{
key: 1,
label: (
<Checkbox
checked={sendWhenEnter}
onChange={(e) =>
setSendWhenEnter(e.target.checked)
}>
{t("sendWhenEnter")}
</Checkbox>
)
}
]
}}>
<div className="inline-flex gap-2">
{sendWhenEnter ? (
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
stroke="currentColor"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
className="h-5 w-5"
viewBox="0 0 24 24">
<path d="M9 10L4 15 9 20"></path>
<path d="M20 4v7a4 4 0 01-4 4H4"></path>
</svg>
) : null}
{t("common:submit")}
</div>
</Dropdown.Button>
) : (
<Tooltip title={t("tooltip.stopStreaming")}>
<button
type="button"
onClick={stopStreamingRequest}
className="text-gray-800 dark:text-gray-300">
<StopCircleIcon className="h-6 w-6" />
</button>
</Tooltip>
)}
</div>
</div>
</form>

View File

@ -2,10 +2,10 @@ import logoImage from "~/assets/icon.png"
import { useMessage } from "~/hooks/useMessage"
import { Link } from "react-router-dom"
import { Tooltip } from "antd"
import { BoxesIcon, CogIcon, RefreshCcw } from "lucide-react"
import { BoxesIcon, CogIcon, EraserIcon, HistoryIcon } from "lucide-react"
import { useTranslation } from "react-i18next"
export const SidepanelHeader = () => {
const { clearChat, isEmbedding } = useMessage()
const { clearChat, isEmbedding, messages, streaming } = useMessage()
const { t } = useTranslation(["sidepanel", "common"])
return (
@ -25,13 +25,21 @@ export const SidepanelHeader = () => {
<BoxesIcon className="h-5 w-5 text-gray-500 dark:text-gray-400 animate-bounce animate-infinite" />
</Tooltip>
) : null}
<button
onClick={() => {
clearChat()
}}
className="flex items-center space-x-1 focus:outline-none focus-visible:ring-2 focus-visible:ring-pink-700">
<RefreshCcw className="h-5 w-5 text-gray-500 dark:text-gray-400" />
</button>
{messages.length > 0 && !streaming && (
<button
title={t("tooltip.clear")}
onClick={() => {
clearChat()
}}
className="flex items-center space-x-1 focus:outline-none focus-visible:ring-2 focus-visible:ring-pink-700">
<EraserIcon className="h-5 w-5 text-gray-500 dark:text-gray-400" />
</button>
)}
{/* <Tooltip title={t("tooltip.history")}>
<Link to="/history">
<HistoryIcon className="h-5 w-5 text-gray-500 dark:text-gray-400" />
</Link>
</Tooltip> */}
<Link to="/settings">
<CogIcon className="h-5 w-5 text-gray-500 dark:text-gray-400" />
</Link>

View File

@ -22,6 +22,7 @@ import { useMessage } from "~/hooks/useMessage"
import { MoonIcon, SunIcon } from "lucide-react"
import { useTranslation } from "react-i18next"
import { useI18n } from "@/hooks/useI18n"
import { TTSModeSettings } from "@/components/Option/Settings/tts-mode"
export const SettingsBody = () => {
const { t } = useTranslation("settings")
@ -285,6 +286,9 @@ export const SettingsBody = () => {
</div>
</Form>
</div>
<div className="border border-gray-300 dark:border-gray-700 rounded p-4 bg-white dark:bg-[#171717]">
<TTSModeSettings hideBorder />
</div>
<div className="border border-gray-300 dark:border-gray-700 rounded p-4 bg-white dark:bg-[#171717]">
<h2 className="text-md mb-4 font-semibold dark:text-white">
{t("generalSettings.settings.language.label")}{" "}

View File

@ -7,6 +7,9 @@ interface PageAssistContext {
controller: AbortController | null
setController: Dispatch<SetStateAction<AbortController>>
embeddingController: AbortController | null
setEmbeddingController: Dispatch<SetStateAction<AbortController>>
}
export const PageAssistContext = createContext<PageAssistContext>({
@ -14,7 +17,10 @@ export const PageAssistContext = createContext<PageAssistContext>({
setMessages: () => {},
controller: null,
setController: () => {}
setController: () => {},
embeddingController: null,
setEmbeddingController: () => {}
})
export const usePageAssist = () => {

192
src/db/knowledge.ts Normal file
View File

@ -0,0 +1,192 @@
import { deleteVector, deleteVectorByFileId } from "./vector"
export type Source = {
source_id: string
type: string
filename?: string
content: string
}
export type Knowledge = {
id: string
db_type: string
title: string
status: string
embedding_model: string
source: Source[]
knownledge: any
createdAt: number
}
export const generateID = () => {
return "pa_knowledge_xxxx-xxxx-xxx-xxxx".replace(/[x]/g, () => {
const r = Math.floor(Math.random() * 16)
return r.toString(16)
})
}
export class PageAssistKnowledge {
db: chrome.storage.StorageArea
constructor() {
this.db = chrome.storage.local
}
getAll = async (): Promise<Knowledge[]> => {
return new Promise((resolve, reject) => {
this.db.get(null, (result) => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
const data = Object.keys(result).map((key) => result[key])
resolve(data)
}
})
})
}
getById = async (id: string): Promise<Knowledge> => {
return new Promise((resolve, reject) => {
this.db.get(id, (result) => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve(result[id])
}
})
})
}
create = async (knowledge: Knowledge): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.set({ [knowledge.id]: knowledge }, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
})
}
update = async (knowledge: Knowledge): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.set({ [knowledge.id]: knowledge }, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
})
}
delete = async (id: string): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.remove(id, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
})
}
deleteSource = async (id: string, source_id: string): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.get(id, (result) => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
const data = result[id] as Knowledge
data.source = data.source.filter((s) => s.source_id !== source_id)
this.db.set({ [id]: data }, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
}
})
})
}
}
export const createKnowledge = async ({
source,
title,
embedding_model
}: {
title: string
source: Source[]
embedding_model: string
}) => {
const db = new PageAssistKnowledge()
const id = generateID()
const knowledge: Knowledge = {
id,
title,
db_type: "knowledge",
source,
status: "pending",
knownledge: {},
embedding_model,
createdAt: Date.now()
}
await db.create(knowledge)
return knowledge
}
export const getKnowledgeById = async (id: string) => {
const db = new PageAssistKnowledge()
return db.getById(id)
}
export const updateKnowledgeStatus = async (id: string, status: string) => {
const db = new PageAssistKnowledge()
const knowledge = await db.getById(id)
await db.update({
...knowledge,
status
})
}
export const getAllKnowledge = async (status?: string) => {
const db = new PageAssistKnowledge()
const data = await db.getAll()
if (status) {
return data
.filter((d) => d.db_type === "knowledge")
.filter((d) => d.status === status)
.map((d) => {
d.source.forEach((s) => {
delete s.content
})
return d
})
.sort((a, b) => b.createdAt - a.createdAt)
}
return data
.filter((d) => d.db_type === "knowledge")
.map((d) => {
d.source.forEach((s) => {
delete s.content
})
return d
})
.sort((a, b) => b.createdAt - a.createdAt)
}
export const deleteKnowledge = async (id: string) => {
const db = new PageAssistKnowledge()
await db.delete(id)
await deleteVector(`vector:${id}`)
}
export const deleteSource = async (id: string, source_id: string) => {
const db = new PageAssistKnowledge()
await db.deleteSource(id, source_id)
await deleteVectorByFileId(`vector:${id}`, source_id)
}

129
src/db/vector.ts Normal file
View File

@ -0,0 +1,129 @@
interface PageAssistVector {
file_id: string
content: string
embedding: number[]
metadata: Record<string, any>
}
export type VectorData = {
id: string
vectors: PageAssistVector[]
}
export class PageAssistVectorDb {
db: chrome.storage.StorageArea
constructor() {
this.db = chrome.storage.local
}
insertVector = async (
id: string,
vector: PageAssistVector[]
): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.get(id, (result) => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
const data = result[id] as VectorData
if (!data) {
this.db.set({ [id]: { id, vectors: vector } }, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
} else {
this.db.set(
{
[id]: {
...data,
vectors: data.vectors.concat(vector)
}
},
() => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
}
)
}
}
})
})
}
deleteVector = async (id: string): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.remove(id, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
})
}
deleteVectorByFileId = async (id: string, file_id: string): Promise<void> => {
return new Promise((resolve, reject) => {
this.db.get(id, (result) => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
const data = result[id] as VectorData
data.vectors = data.vectors.filter((v) => v.file_id !== file_id)
this.db.set({ [id]: data }, () => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve()
}
})
}
})
})
}
getVector = async (id: string): Promise<VectorData> => {
return new Promise((resolve, reject) => {
this.db.get(id, (result) => {
if (chrome.runtime.lastError) {
reject(chrome.runtime.lastError)
} else {
resolve(result[id] as VectorData)
}
})
})
}
}
export const insertVector = async (
id: string,
vector: PageAssistVector[]
): Promise<void> => {
const db = new PageAssistVectorDb()
return db.insertVector(id, vector)
}
export const getVector = async (id: string): Promise<VectorData> => {
const db = new PageAssistVectorDb()
return db.getVector(id)
}
export const deleteVector = async (id: string): Promise<void> => {
const db = new PageAssistVectorDb()
return db.deleteVector(id)
}
export const deleteVectorByFileId = async (
id: string,
file_id: string
): Promise<void> => {
const db = new PageAssistVectorDb()
return db.deleteVectorByFileId(id, file_id)
}

View File

@ -1,5 +1,6 @@
import { getOllamaURL, isOllamaRunning } from "../services/ollama"
import { Storage } from "@plasmohq/storage"
const progressHuman = (completed: number, total: number) => {
return ((completed / total) * 100).toFixed(0) + "%"
}
@ -76,15 +77,20 @@ const streamDownload = async (url: string, model: string) => {
}
export default defineBackground({
main() {
const storage = new Storage()
chrome.runtime.onMessage.addListener(async (message) => {
if (message.type === "sidepanel") {
chrome.tabs.query({ active: true, currentWindow: true }, async (tabs) => {
const tab = tabs[0]
chrome.sidePanel.open({
// tabId: tab.id!,
windowId: tab.windowId!,
})
})
chrome.tabs.query(
{ active: true, currentWindow: true },
async (tabs) => {
const tab = tabs[0]
chrome.sidePanel.open({
// tabId: tab.id!,
windowId: tab.windowId!
})
}
)
} else if (message.type === "pull_model") {
const ollamaURL = await getOllamaURL()
@ -93,8 +99,7 @@ export default defineBackground({
if (!isRunning) {
chrome.action.setBadgeText({ text: "E" })
chrome.action.setBadgeBackgroundColor({ color: "#FF0000" })
chrome.action.setTitle({ title: "Ollama is not running"
})
chrome.action.setTitle({ title: "Ollama is not running" })
setTimeout(() => {
clearBadge()
}, 5000)
@ -111,12 +116,15 @@ export default defineBackground({
chrome.commands.onCommand.addListener((command) => {
switch (command) {
case "execute_side_panel":
chrome.tabs.query({ active: true, currentWindow: true }, async (tabs) => {
const tab = tabs[0]
chrome.sidePanel.open({
windowId: tab.windowId!
})
})
chrome.tabs.query(
{ active: true, currentWindow: true },
async (tabs) => {
const tab = tabs[0]
chrome.sidePanel.open({
windowId: tab.windowId!
})
}
)
break
default:
break
@ -131,14 +139,17 @@ export default defineBackground({
chrome.contextMenus.onClicked.addListener((info, tab) => {
if (info.menuItemId === "open-side-panel-pa") {
chrome.tabs.query({ active: true, currentWindow: true }, async (tabs) => {
const tab = tabs[0]
await chrome.sidePanel.open({
windowId: tab.windowId!,
})
})
chrome.tabs.query(
{ active: true, currentWindow: true },
async (tabs) => {
const tab = tabs[0]
chrome.sidePanel.open({
tabId: tab.id!
})
}
)
}
})
},
persistent: true
})
})

View File

@ -1,4 +1,4 @@
import { saveHistory, saveMessage } from "@/libs/db"
import { saveHistory, saveMessage } from "@/db"
import { ChatHistory } from "@/store/option"
export const saveMessageOnError = async ({

View File

@ -6,24 +6,39 @@ import {
promptForRag,
systemPromptForNonRag
} from "~/services/ollama"
import { useStoreMessage, type Message } from "~/store"
import { type Message } from "~/store/option"
import { useStoreMessage } from "~/store"
import { ChatOllama } from "@langchain/community/chat_models/ollama"
import { HumanMessage, SystemMessage } from "@langchain/core/messages"
import { getDataFromCurrentTab } from "~/libs/get-html"
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"
import {
createChatWithWebsiteChain,
groupMessagesByConversation
} from "~/chain/chat-with-website"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
import { memoryEmbedding } from "@/utils/memory-embeddings"
import { ChatHistory } from "@/store/option"
import { generateID } from "@/db"
import { saveMessageOnError, saveMessageOnSuccess } from "./chat-helper"
import { notification } from "antd"
import { useTranslation } from "react-i18next"
import { usePageAssist } from "@/context"
import { formatDocs } from "@/chain/chat-with-x"
import { OllamaEmbeddingsPageAssist } from "@/models/OllamaEmbedding"
import { useStorage } from "@plasmohq/storage/hook"
export const useMessage = () => {
const {
history,
controller: abortController,
setController: setAbortController,
messages,
setHistory,
setMessages,
embeddingController,
setEmbeddingController
} = usePageAssist()
const { t } = useTranslation("option")
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const {
history,
setHistory,
setStreaming,
streaming,
setIsFirstMessage,
@ -33,8 +48,6 @@ export const useMessage = () => {
setIsLoading,
isProcessing,
setIsProcessing,
selectedModel,
setSelectedModel,
chatMode,
setChatMode,
setIsEmbedding,
@ -45,8 +58,6 @@ export const useMessage = () => {
setCurrentURL
} = useStoreMessage()
const abortControllerRef = React.useRef<AbortController | null>(null)
const [keepTrackOfEmbedding, setKeepTrackOfEmbedding] = React.useState<{
[key: string]: MemoryVectorStore
}>({})
@ -62,57 +73,88 @@ export const useMessage = () => {
setStreaming(false)
}
const chatWithWebsiteMode = async (message: string) => {
try {
let isAlreadyExistEmbedding: MemoryVectorStore
let embedURL: string, embedHTML: string, embedType: string
let embedPDF: { content: string; page: number }[] = []
const chatWithWebsiteMode = async (
message: string,
image: string,
isRegenerate: boolean,
messages: Message[],
history: ChatHistory,
signal: AbortSignal,
embeddingSignal: AbortSignal
) => {
setStreaming(true)
const url = await getOllamaURL()
if (messages.length === 0) {
const { content: html, url, type, pdf } = await getDataFromCurrentTab()
embedHTML = html
embedURL = url
embedType = type
embedPDF = pdf
setCurrentURL(url)
isAlreadyExistEmbedding = keepTrackOfEmbedding[currentURL]
} else {
isAlreadyExistEmbedding = keepTrackOfEmbedding[currentURL]
embedURL = currentURL
}
let newMessage: Message[] = [
const ollama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url)
})
let newMessage: Message[] = []
let generateMessageId = generateID()
if (!isRegenerate) {
newMessage = [
...messages,
{
isBot: false,
name: "You",
message,
sources: []
sources: [],
images: []
},
{
isBot: true,
name: selectedModel,
message: "▋",
sources: []
sources: [],
id: generateMessageId
}
]
} else {
newMessage = [
...messages,
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
}
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
let isAlreadyExistEmbedding: MemoryVectorStore
let embedURL: string, embedHTML: string, embedType: string
let embedPDF: { content: string; page: number }[] = []
const appendingIndex = newMessage.length - 1
setMessages(newMessage)
const ollamaUrl = await getOllamaURL()
const embeddingModle = await defaultEmbeddingModelForRag()
if (messages.length === 0) {
const { content: html, url, type, pdf } = await getDataFromCurrentTab()
embedHTML = html
embedURL = url
embedType = type
embedPDF = pdf
setCurrentURL(url)
isAlreadyExistEmbedding = keepTrackOfEmbedding[currentURL]
} else {
isAlreadyExistEmbedding = keepTrackOfEmbedding[currentURL]
embedURL = currentURL
}
const ollamaEmbedding = new OllamaEmbeddings({
model: embeddingModle || selectedModel,
baseUrl: cleanUrl(ollamaUrl)
})
setMessages(newMessage)
const ollamaUrl = await getOllamaURL()
const embeddingModle = await defaultEmbeddingModelForRag()
const ollamaChat = new ChatOllama({
model: selectedModel,
baseUrl: cleanUrl(ollamaUrl)
})
let vectorstore: MemoryVectorStore
const ollamaEmbedding = new OllamaEmbeddingsPageAssist({
model: embeddingModle || selectedModel,
baseUrl: cleanUrl(ollamaUrl),
signal: embeddingSignal
})
let vectorstore: MemoryVectorStore
try {
if (isAlreadyExistEmbedding) {
vectorstore = isAlreadyExistEmbedding
} else {
@ -127,109 +169,207 @@ export const useMessage = () => {
url: embedURL
})
}
let query = message
const { ragPrompt: systemPrompt, ragQuestionPrompt: questionPrompt } =
await promptForRag()
const sanitizedQuestion = message.trim().replaceAll("\n", " ")
const chain = createChatWithWebsiteChain({
llm: ollamaChat,
question_llm: ollamaChat,
question_template: questionPrompt,
response_template: systemPrompt,
retriever: vectorstore.asRetriever()
})
const chunks = await chain.stream({
question: sanitizedQuestion,
chat_history: groupMessagesByConversation(history)
})
let count = 0
for await (const chunk of chunks) {
if (count === 0) {
setIsProcessing(true)
newMessage[appendingIndex].message = chunk + "▋"
setMessages(newMessage)
} else {
newMessage[appendingIndex].message =
newMessage[appendingIndex].message.slice(0, -1) + chunk + "▋"
setMessages(newMessage)
}
count++
if (newMessage.length > 2) {
const lastTenMessages = newMessage.slice(-10)
lastTenMessages.pop()
const chat_history = lastTenMessages
.map((message) => {
return `${message.isBot ? "Assistant: " : "Human: "}${message.message}`
})
.join("\n")
const promptForQuestion = questionPrompt
.replaceAll("{chat_history}", chat_history)
.replaceAll("{question}", message)
const questionOllama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url)
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
}
newMessage[appendingIndex].message = newMessage[
appendingIndex
].message.slice(0, -1)
const docs = await vectorstore.similaritySearch(query, 4)
const context = formatDocs(docs)
const source = docs.map((doc) => {
return {
...doc,
name: doc?.metadata?.source || "untitled",
type: doc?.metadata?.type || "unknown",
mode: "chat",
url: ""
}
})
message = message.trim().replaceAll("\n", " ")
let humanMessage = new HumanMessage({
content: [
{
text: systemPrompt
.replace("{context}", context)
.replace("{question}", message),
type: "text"
}
]
})
const applicationChatHistory = generateHistory(history)
const chunks = await ollama.stream(
[...applicationChatHistory, humanMessage],
{
signal: signal
}
)
let count = 0
for await (const chunk of chunks) {
contentToSave += chunk.content
fullText += chunk.content
if (count === 0) {
setIsProcessing(true)
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText.slice(0, -1) + "▋"
}
}
return message
})
})
count++
}
// update the message with the full text
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText,
sources: source
}
}
return message
})
})
setHistory([
...history,
{
role: "user",
content: message
content: message,
image
},
{
role: "assistant",
content: newMessage[appendingIndex].message
content: fullText
}
])
setIsProcessing(false)
} catch (e) {
await saveMessageOnSuccess({
historyId,
setHistoryId,
isRegenerate,
selectedModel: selectedModel,
message,
image,
fullText,
source
})
setIsProcessing(false)
setStreaming(false)
} catch (e) {
const errorSave = await saveMessageOnError({
e,
botMessage: fullText,
history,
historyId,
image,
selectedModel,
setHistory,
setHistoryId,
userMessage: message,
isRegenerating: isRegenerate
})
setMessages([
...messages,
{
isBot: true,
name: selectedModel,
message: `Error in chat with website mode. Check out the following logs:
~~~
${e?.message}
~~~
`,
sources: []
}
])
if (!errorSave) {
notification.error({
message: t("error"),
description: e?.message || t("somethingWentWrong")
})
}
setIsProcessing(false)
setStreaming(false)
setIsProcessing(false)
setStreaming(false)
setIsEmbedding(false)
} finally {
setAbortController(null)
setEmbeddingController(null)
}
}
const normalChatMode = async (message: string, image: string) => {
const normalChatMode = async (
message: string,
image: string,
isRegenerate: boolean,
messages: Message[],
history: ChatHistory,
signal: AbortSignal
) => {
setStreaming(true)
const url = await getOllamaURL()
if (image.length > 0) {
image = `data:image/jpeg;base64,${image.split(",")[1]}`
}
abortControllerRef.current = new AbortController()
const ollama = new ChatOllama({
model: selectedModel,
model: selectedModel!,
baseUrl: cleanUrl(url)
})
let newMessage: Message[] = [
...messages,
{
isBot: false,
name: "You",
message,
sources: [],
images: [image]
},
{
isBot: true,
name: selectedModel,
message: "▋",
sources: []
}
]
let newMessage: Message[] = []
let generateMessageId = generateID()
const appendingIndex = newMessage.length - 1
if (!isRegenerate) {
newMessage = [
...messages,
{
isBot: false,
name: "You",
message,
sources: [],
images: [image]
},
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
} else {
newMessage = [
...messages,
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
}
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
try {
const prompt = await systemPromptForNonRag()
@ -277,29 +417,41 @@ ${e?.message}
const chunks = await ollama.stream(
[...applicationChatHistory, humanMessage],
{
signal: abortControllerRef.current.signal
signal: signal
}
)
let count = 0
for await (const chunk of chunks) {
contentToSave += chunk.content
fullText += chunk.content
if (count === 0) {
setIsProcessing(true)
newMessage[appendingIndex].message = chunk.content + "▋"
setMessages(newMessage)
} else {
newMessage[appendingIndex].message =
newMessage[appendingIndex].message.slice(0, -1) +
chunk.content +
"▋"
setMessages(newMessage)
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText.slice(0, -1) + "▋"
}
}
return message
})
})
count++
}
newMessage[appendingIndex].message = newMessage[
appendingIndex
].message.slice(0, -1)
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText.slice(0, -1)
}
}
return message
})
})
setHistory([
...history,
@ -310,28 +462,49 @@ ${e?.message}
},
{
role: "assistant",
content: newMessage[appendingIndex].message
content: fullText
}
])
setIsProcessing(false)
} catch (e) {
await saveMessageOnSuccess({
historyId,
setHistoryId,
isRegenerate,
selectedModel: selectedModel,
message,
image,
fullText,
source: []
})
setIsProcessing(false)
setStreaming(false)
setIsProcessing(false)
setStreaming(false)
} catch (e) {
const errorSave = await saveMessageOnError({
e,
botMessage: fullText,
history,
historyId,
image,
selectedModel,
setHistory,
setHistoryId,
userMessage: message,
isRegenerating: isRegenerate
})
setMessages([
...messages,
{
isBot: true,
name: selectedModel,
message: `Something went wrong. Check out the following logs:
\`\`\`
${e?.message}
\`\`\`
`,
sources: []
}
])
if (!errorSave) {
notification.error({
message: t("error"),
description: e?.message || t("somethingWentWrong")
})
}
setIsProcessing(false)
setStreaming(false)
} finally {
setAbortController(null)
}
}
@ -342,20 +515,40 @@ ${e?.message}
message: string
image: string
}) => {
const newController = new AbortController()
let signal = newController.signal
setAbortController(newController)
if (chatMode === "normal") {
await normalChatMode(message, image)
await normalChatMode(message, image, false, messages, history, signal)
} else {
await chatWithWebsiteMode(message)
const newEmbeddingController = new AbortController()
let embeddingSignal = newEmbeddingController.signal
setEmbeddingController(newEmbeddingController)
await chatWithWebsiteMode(
message,
image,
false,
messages,
history,
signal,
embeddingSignal
)
}
}
const stopStreamingRequest = () => {
if (abortControllerRef.current) {
abortControllerRef.current.abort()
abortControllerRef.current = null
if (isEmbedding) {
if (embeddingController) {
embeddingController.abort()
setEmbeddingController(null)
}
}
if (abortController) {
abortController.abort()
setAbortController(null)
}
}
return {
messages,
setMessages,

View File

@ -1,8 +1,10 @@
import React from "react"
import { cleanUrl } from "~/libs/clean-url"
import {
defaultEmbeddingModelForRag,
geWebSearchFollowUpPrompt,
getOllamaURL,
promptForRag,
systemPromptForNonRagOption
} from "~/services/ollama"
import { type ChatHistory, type Message } from "~/store/option"
@ -15,7 +17,7 @@ import {
getPromptById,
removeMessageUsingHistoryId,
updateMessageByIndex
} from "~/libs/db"
} from "@/db"
import { useNavigate } from "react-router-dom"
import { notification } from "antd"
import { getSystemPromptForWeb } from "~/web/web"
@ -23,6 +25,12 @@ import { generateHistory } from "@/utils/generate-history"
import { useTranslation } from "react-i18next"
import { saveMessageOnError, saveMessageOnSuccess } from "./chat-helper"
import { usePageAssist } from "@/context"
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"
import { PageAssistVectorStore } from "@/libs/PageAssistVectorStore"
import { formatDocs } from "@/chain/chat-with-x"
import { useWebUI } from "@/store/webui"
import { isTTSEnabled } from "@/services/tts"
import { useStorage } from "@plasmohq/storage/hook"
export const useMessageOption = () => {
const {
@ -43,8 +51,6 @@ export const useMessageOption = () => {
setIsLoading,
isProcessing,
setIsProcessing,
selectedModel,
setSelectedModel,
chatMode,
setChatMode,
speechToTextLanguage,
@ -56,14 +62,20 @@ export const useMessageOption = () => {
selectedQuickPrompt,
setSelectedQuickPrompt,
selectedSystemPrompt,
setSelectedSystemPrompt
setSelectedSystemPrompt,
selectedKnowledge,
setSelectedKnowledge
} = useStoreMessageOption()
const [selectedModel, setSelectedModel] = useStorage("selectedModel")
const { ttsEnabled } = useWebUI()
const { t } = useTranslation("option")
const navigate = useNavigate()
const textareaRef = React.useRef<HTMLTextAreaElement>(null)
const clearChat = () => {
navigate("/")
setMessages([])
@ -500,6 +512,215 @@ export const useMessageOption = () => {
}
}
const ragMode = async (
message: string,
image: string,
isRegenerate: boolean,
messages: Message[],
history: ChatHistory,
signal: AbortSignal
) => {
const url = await getOllamaURL()
const ollama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url)
})
let newMessage: Message[] = []
let generateMessageId = generateID()
if (!isRegenerate) {
newMessage = [
...messages,
{
isBot: false,
name: "You",
message,
sources: [],
images: []
},
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
} else {
newMessage = [
...messages,
{
isBot: true,
name: selectedModel,
message: "▋",
sources: [],
id: generateMessageId
}
]
}
setMessages(newMessage)
let fullText = ""
let contentToSave = ""
const embeddingModle = await defaultEmbeddingModelForRag()
const ollamaUrl = await getOllamaURL()
const ollamaEmbedding = new OllamaEmbeddings({
model: embeddingModle || selectedModel,
baseUrl: cleanUrl(ollamaUrl)
})
let vectorstore = await PageAssistVectorStore.fromExistingIndex(
ollamaEmbedding,
{
file_id: null,
knownledge_id: selectedKnowledge.id
}
)
try {
let query = message
const { ragPrompt: systemPrompt, ragQuestionPrompt: questionPrompt } =
await promptForRag()
if (newMessage.length > 2) {
const lastTenMessages = newMessage.slice(-10)
lastTenMessages.pop()
const chat_history = lastTenMessages
.map((message) => {
return `${message.isBot ? "Assistant: " : "Human: "}${message.message}`
})
.join("\n")
const promptForQuestion = questionPrompt
.replaceAll("{chat_history}", chat_history)
.replaceAll("{question}", message)
const questionOllama = new ChatOllama({
model: selectedModel!,
baseUrl: cleanUrl(url)
})
const response = await questionOllama.invoke(promptForQuestion)
query = response.content.toString()
}
const docs = await vectorstore.similaritySearch(query, 4)
const context = formatDocs(docs)
const source = docs.map((doc) => {
return {
...doc,
name: doc?.metadata?.source || "untitled",
type: doc?.metadata?.type || "unknown",
mode: "rag",
url: ""
}
})
message = message.trim().replaceAll("\n", " ")
let humanMessage = new HumanMessage({
content: [
{
text: systemPrompt
.replace("{context}", context)
.replace("{question}", message),
type: "text"
}
]
})
const applicationChatHistory = generateHistory(history)
const chunks = await ollama.stream(
[...applicationChatHistory, humanMessage],
{
signal: signal
}
)
let count = 0
for await (const chunk of chunks) {
contentToSave += chunk.content
fullText += chunk.content
if (count === 0) {
setIsProcessing(true)
}
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText.slice(0, -1) + "▋"
}
}
return message
})
})
count++
}
// update the message with the full text
setMessages((prev) => {
return prev.map((message) => {
if (message.id === generateMessageId) {
return {
...message,
message: fullText,
sources: source
}
}
return message
})
})
setHistory([
...history,
{
role: "user",
content: message,
image
},
{
role: "assistant",
content: fullText
}
])
await saveMessageOnSuccess({
historyId,
setHistoryId,
isRegenerate,
selectedModel: selectedModel,
message,
image,
fullText,
source
})
setIsProcessing(false)
setStreaming(false)
} catch (e) {
const errorSave = await saveMessageOnError({
e,
botMessage: fullText,
history,
historyId,
image,
selectedModel,
setHistory,
setHistoryId,
userMessage: message,
isRegenerating: isRegenerate
})
if (!errorSave) {
notification.error({
message: t("error"),
description: e?.message || t("somethingWentWrong")
})
}
setIsProcessing(false)
setStreaming(false)
} finally {
setAbortController(null)
}
}
const onSubmit = async ({
message,
image,
@ -525,8 +746,8 @@ export const useMessageOption = () => {
setAbortController(controller)
signal = controller.signal
}
if (webSearch) {
await searchChatMode(
if (selectedKnowledge) {
await ragMode(
message,
image,
isRegenerate,
@ -535,14 +756,25 @@ export const useMessageOption = () => {
signal
)
} else {
await normalChatMode(
message,
image,
isRegenerate,
chatHistory || messages,
memory || history,
signal
)
if (webSearch) {
await searchChatMode(
message,
image,
isRegenerate,
chatHistory || messages,
memory || history,
signal
)
} else {
await normalChatMode(
message,
image,
isRegenerate,
chatHistory || messages,
memory || history,
signal
)
}
}
}
@ -608,7 +840,7 @@ export const useMessageOption = () => {
}
const currentHumanMessage = newMessages[index]
newMessages[index].message = message
const previousMessages = newMessages.slice(0, index + 1)
setMessages(previousMessages)
const previousHistory = newHistory.slice(0, index)
@ -664,6 +896,9 @@ export const useMessageOption = () => {
setSelectedQuickPrompt,
selectedSystemPrompt,
setSelectedSystemPrompt,
textareaRef
textareaRef,
selectedKnowledge,
setSelectedKnowledge,
ttsEnabled
}
}

54
src/hooks/useTTS.tsx Normal file
View File

@ -0,0 +1,54 @@
import { useEffect, useState } from "react"
import { notification } from "antd"
import { getVoice, isSSMLEnabled } from "@/services/tts"
import { markdownToSSML } from "@/utils/markdown-to-ssml"
type VoiceOptions = {
utterance: string
}
export const useTTS = () => {
const [isSpeaking, setIsSpeaking] = useState(false)
const speak = async ({ utterance }: VoiceOptions) => {
try {
const voice = await getVoice()
const isSSML = await isSSMLEnabled()
if (isSSML) {
utterance = markdownToSSML(utterance)
}
chrome.tts.speak(utterance, {
voiceName: voice,
onEvent(event) {
if (event.type === "start") {
setIsSpeaking(true)
} else if (event.type === "end") {
setIsSpeaking(false)
}
}
})
} catch (error) {
notification.error({
message: "Error",
description: "Something went wrong while trying to play the audio"
})
}
}
const cancel = () => {
chrome.tts.stop()
setIsSpeaking(false)
}
useEffect(() => {
return () => {
cancel()
}
}, [])
return {
speak,
cancel,
isSpeaking
}
}

View File

@ -3,12 +3,13 @@ import playground from "@/assets/locale/en/playground.json";
import common from "@/assets/locale/en/common.json";
import sidepanel from "@/assets/locale/en/sidepanel.json";
import settings from "@/assets/locale/en/settings.json";
import knowledge from "@/assets/locale/en/knowledge.json";
export const en = {
option,
playground,
common,
sidepanel,
settings
settings,
knowledge
}

View File

@ -3,6 +3,7 @@ import playground from "@/assets/locale/ja-JP/playground.json";
import common from "@/assets/locale/ja-JP/common.json";
import sidepanel from "@/assets/locale/ja-JP/sidepanel.json";
import settings from "@/assets/locale/ja-JP/settings.json";
import knowledge from "@/assets/locale/ja-JP/knowledge.json";
export const ja = {
@ -10,5 +11,6 @@ export const ja = {
playground,
common,
sidepanel,
settings
settings,
knowledge
}

View File

@ -3,12 +3,13 @@ import playground from "@/assets/locale/ml/playground.json";
import common from "@/assets/locale/ml/common.json";
import sidepanel from "@/assets/locale/ml/sidepanel.json";
import settings from "@/assets/locale/ml/settings.json";
import knowledge from "@/assets/locale/ml/knowledge.json";
export const ml = {
option,
playground,
common,
sidepanel,
settings
settings,
knowledge
}

View File

@ -3,6 +3,7 @@ import playground from "@/assets/locale/zh/playground.json";
import common from "@/assets/locale/zh/common.json";
import sidepanel from "@/assets/locale/zh/sidepanel.json";
import settings from "@/assets/locale/zh/settings.json";
import knowledge from "@/assets/locale/zh/knowledge.json";
export const zh = {
@ -10,5 +11,6 @@ export const zh = {
playground,
common,
sidepanel,
settings
settings,
knowledge
}

View File

@ -0,0 +1,203 @@
import { similarity as ml_distance_similarity } from "ml-distance"
import { VectorStore } from "@langchain/core/vectorstores"
import type { EmbeddingsInterface } from "@langchain/core/embeddings"
import { Document } from "@langchain/core/documents"
import { getVector, insertVector } from "@/db/vector"
import { cp } from "fs"
/**
* Interface representing a vector in memory. It includes the content
* (text), the corresponding embedding (vector), and any associated
* metadata.
*/
interface PageAssistVector {
content: string
embedding: number[]
metadata: Record<string, any>
}
/**
* Interface for the arguments that can be passed to the
* `MemoryVectorStore` constructor. It includes an optional `similarity`
* function.
*/
export interface MemoryVectorStoreArgs {
knownledge_id: string
file_id?: string
similarity?: typeof ml_distance_similarity.cosine
}
/**
* Class that extends `VectorStore` to store vectors in memory. Provides
* methods for adding documents, performing similarity searches, and
* creating instances from texts, documents, or an existing index.
*/
export class PageAssistVectorStore extends VectorStore {
declare FilterType: (doc: Document) => boolean
knownledge_id: string
file_id?: string
// memoryVectors: PageAssistVector[] = []
similarity: typeof ml_distance_similarity.cosine
_vectorstoreType(): string {
return "memory"
}
constructor(embeddings: EmbeddingsInterface, args: MemoryVectorStoreArgs) {
super(embeddings, args)
this.similarity = args?.similarity ?? ml_distance_similarity.cosine
this.knownledge_id = args?.knownledge_id!
this.file_id = args?.file_id
}
/**
* Method to add documents to the memory vector store. It extracts the
* text from each document, generates embeddings for them, and adds the
* resulting vectors to the store.
* @param documents Array of `Document` instances to be added to the store.
* @returns Promise that resolves when all documents have been added.
*/
async addDocuments(documents: Document[]): Promise<void> {
const texts = documents.map(({ pageContent }) => pageContent)
return this.addVectors(
await this.embeddings.embedDocuments(texts),
documents
)
}
/**
* Method to add vectors to the memory vector store. It creates
* `PageAssistVector` instances for each vector and document pair and adds
* them to the store.
* @param vectors Array of vectors to be added to the store.
* @param documents Array of `Document` instances corresponding to the vectors.
* @returns Promise that resolves when all vectors have been added.
*/
async addVectors(vectors: number[][], documents: Document[]): Promise<void> {
const memoryVectors = vectors.map((embedding, idx) => ({
content: documents[idx].pageContent,
embedding,
metadata: documents[idx].metadata,
file_id: this.file_id
}))
console.log(`vector:${this.knownledge_id}`)
await insertVector(`vector:${this.knownledge_id}`, memoryVectors)
}
/**
* Method to perform a similarity search in the memory vector store. It
* calculates the similarity between the query vector and each vector in
* the store, sorts the results by similarity, and returns the top `k`
* results along with their scores.
* @param query Query vector to compare against the vectors in the store.
* @param k Number of top results to return.
* @param filter Optional filter function to apply to the vectors before performing the search.
* @returns Promise that resolves with an array of tuples, each containing a `Document` and its similarity score.
*/
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: this["FilterType"]
): Promise<[Document, number][]> {
const filterFunction = (memoryVector: PageAssistVector) => {
if (!filter) {
return true
}
const doc = new Document({
metadata: memoryVector.metadata,
pageContent: memoryVector.content
})
return filter(doc)
}
const data = await getVector(`vector:${this.knownledge_id}`)
const pgVector = [...data.vectors]
const filteredMemoryVectors = pgVector.filter(filterFunction)
console.log(filteredMemoryVectors)
const searches = filteredMemoryVectors
.map((vector, index) => ({
similarity: this.similarity(query, vector.embedding),
index
}))
.sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
.slice(0, k)
console.log(searches)
const result: [Document, number][] = searches.map((search) => [
new Document({
metadata: filteredMemoryVectors[search.index].metadata,
pageContent: filteredMemoryVectors[search.index].content
}),
search.similarity
])
return result
}
/**
* Static method to create a `MemoryVectorStore` instance from an array of
* texts. It creates a `Document` for each text and metadata pair, and
* adds them to the store.
* @param texts Array of texts to be added to the store.
* @param metadatas Array or single object of metadata corresponding to the texts.
* @param embeddings `Embeddings` instance used to generate embeddings for the texts.
* @param dbConfig Optional `MemoryVectorStoreArgs` to configure the `MemoryVectorStore` instance.
* @returns Promise that resolves with a new `MemoryVectorStore` instance.
*/
static async fromTexts(
texts: string[],
metadatas: object[] | object,
embeddings: EmbeddingsInterface,
dbConfig?: MemoryVectorStoreArgs
): Promise<PageAssistVectorStore> {
const docs: Document[] = []
for (let i = 0; i < texts.length; i += 1) {
const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas
const newDoc = new Document({
pageContent: texts[i],
metadata
})
docs.push(newDoc)
}
return PageAssistVectorStore.fromDocuments(docs, embeddings, dbConfig)
}
/**
* Static method to create a `MemoryVectorStore` instance from an array of
* `Document` instances. It adds the documents to the store.
* @param docs Array of `Document` instances to be added to the store.
* @param embeddings `Embeddings` instance used to generate embeddings for the documents.
* @param dbConfig Optional `MemoryVectorStoreArgs` to configure the `MemoryVectorStore` instance.
* @returns Promise that resolves with a new `MemoryVectorStore` instance.
*/
static async fromDocuments(
docs: Document[],
embeddings: EmbeddingsInterface,
dbConfig?: MemoryVectorStoreArgs
): Promise<PageAssistVectorStore> {
const instance = new this(embeddings, dbConfig)
await instance.addDocuments(docs)
return instance
}
/**
* Static method to create a `MemoryVectorStore` instance from an existing
* index. It creates a new `MemoryVectorStore` instance without adding any
* documents or vectors.
* @param embeddings `Embeddings` instance used to generate embeddings for the documents.
* @param dbConfig Optional `MemoryVectorStoreArgs` to configure the `MemoryVectorStore` instance.
* @returns Promise that resolves with a new `MemoryVectorStore` instance.
*/
static async fromExistingIndex(
embeddings: EmbeddingsInterface,
dbConfig?: MemoryVectorStoreArgs
): Promise<PageAssistVectorStore> {
const instance = new this(embeddings, dbConfig)
return instance
}
}

View File

@ -1,41 +1,25 @@
import { pdfDist } from "./pdfjs"
import { defaultExtractContent } from "@/parser/default"
import { getPdf } from "./pdf"
import {
isTweet,
isTwitterTimeline,
parseTweet,
parseTwitterTimeline,
} from "@/parser/twitter"
import { isGoogleDocs, parseGoogleDocs } from "@/parser/google-docs"
import { cleanUnwantedUnicode } from "@/utils/clean"
export const getPdf = async (data: ArrayBuffer) => {
const pdf = pdfDist.getDocument({
data,
useWorkerFetch: false,
isEvalSupported: false,
useSystemFonts: true,
});
pdf.onPassword = (callback: any) => {
const password = prompt("Enter the password: ")
if (!password) {
throw new Error("Password required to open the PDF.");
}
callback(password);
};
const pdfDocument = await pdf.promise;
return pdfDocument
}
const _getHtml = async () => {
const _getHtml = () => {
const url = window.location.href
if (document.contentType === "application/pdf") {
return { url, content: "", type: "pdf" }
}
const html = Array.from(document.querySelectorAll("script")).reduce(
(acc, script) => {
return acc.replace(script.outerHTML, "")
},
document.documentElement.outerHTML
)
return { url, content: html, type: "html" }
return {
content: document.documentElement.outerHTML,
url,
type: "html"
}
}
export const getDataFromCurrentTab = async () => {
@ -58,7 +42,6 @@ export const getDataFromCurrentTab = async () => {
type: string
}>
const { content, type, url } = await result
if (type === "pdf") {
@ -71,31 +54,58 @@ export const getDataFromCurrentTab = async () => {
const pdf = await getPdf(data)
for (let i = 1; i <= pdf.numPages; i += 1) {
const page = await pdf.getPage(i);
const content = await page.getTextContent();
const page = await pdf.getPage(i)
const content = await page.getTextContent()
if (content?.items.length === 0) {
continue;
continue
}
const text = content?.items.map((item: any) => item.str).join("\n")
.replace(/\x00/g, "").trim();
const text = content?.items
.map((item: any) => item.str)
.join("\n")
.replace(/\x00/g, "")
.trim()
pdfHtml.push({
content: text,
page: i
})
}
return {
url,
content: "",
pdf: pdfHtml,
type: "pdf"
}
}
return { url, content, type, pdf: [] }
if (isTwitterTimeline(url)) {
const data = parseTwitterTimeline(content)
return {
url,
content: data,
type: "html",
pdf: []
}
} else if (isTweet(url)) {
const data = parseTweet(content)
return {
url,
content: data,
type: "html",
pdf: []
}
} else if (isGoogleDocs(url)) {
const data = await parseGoogleDocs()
if (data) {
return {
url,
content: cleanUnwantedUnicode(data),
type: "html",
pdf: []
}
}
}
const data = defaultExtractContent(content)
return { url, content: data, type, pdf: [] }
}

29
src/libs/pdf.ts Normal file
View File

@ -0,0 +1,29 @@
import { pdfDist } from "./pdfjs"
export const getPdf = async (data: ArrayBuffer) => {
const pdf = pdfDist.getDocument({
data,
useWorkerFetch: false,
isEvalSupported: false,
useSystemFonts: true
})
pdf.onPassword = (callback: any) => {
const password = prompt("Enter the password: ")
if (!password) {
throw new Error("Password required to open the PDF.")
}
callback(password)
}
const pdfDocument = await pdf.promise
return pdfDocument
}
export const processPdf = async (base64: string) => {
const res = await fetch(base64)
const data = await res.arrayBuffer()
const pdf = await getPdf(data)
return pdf
}

View File

@ -0,0 +1,85 @@
import { getKnowledgeById, updateKnowledgeStatus } from "@/db/knowledge"
import { PageAssistPDFUrlLoader } from "@/loader/pdf-url"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize
} from "@/services/ollama"
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { PageAssistVectorStore } from "./PageAssistVectorStore"
import { PageAssisCSVUrlLoader } from "@/loader/csv"
import { PageAssisTXTUrlLoader } from "@/loader/txt"
export const processKnowledge = async (msg: any, id: string): Promise<void> => {
console.log(`Processing knowledge with id: ${id}`)
try {
const knowledge = await getKnowledgeById(id)
if (!knowledge) {
console.error(`Knowledge with id ${id} not found`)
return
}
await updateKnowledgeStatus(id, "processing")
const ollamaEmbedding = new OllamaEmbeddings({
model: knowledge.embedding_model
})
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
for (const doc of knowledge.source) {
if (doc.type === "pdf" || doc.type === "application/pdf") {
const loader = new PageAssistPDFUrlLoader({
name: doc.filename,
url: doc.content
})
let docs = await loader.load()
const chunks = await textSplitter.splitDocuments(docs)
await PageAssistVectorStore.fromDocuments(chunks, ollamaEmbedding, {
knownledge_id: knowledge.id,
file_id: doc.source_id
})
} else if (doc.type === "csv" || doc.type === "text/csv") {
const loader = new PageAssisCSVUrlLoader({
name: doc.filename,
url: doc.content,
options: {}
})
let docs = await loader.load()
const chunks = await textSplitter.splitDocuments(docs)
await PageAssistVectorStore.fromDocuments(chunks, ollamaEmbedding, {
knownledge_id: knowledge.id,
file_id: doc.source_id
})
} else {
const loader = new PageAssisTXTUrlLoader({
name: doc.filename,
url: doc.content
})
let docs = await loader.load()
const chunks = await textSplitter.splitDocuments(docs)
await PageAssistVectorStore.fromDocuments(chunks, ollamaEmbedding, {
knownledge_id: knowledge.id,
file_id: doc.source_id
})
}
}
await updateKnowledgeStatus(id, "finished")
} catch (error) {
console.error(`Error processing knowledge with id: ${id}`, error)
await updateKnowledgeStatus(id, "failed")
} finally {
console.log(`Finished processing knowledge with id: ${id}`)
}
}

84
src/loader/csv.ts Normal file
View File

@ -0,0 +1,84 @@
import { dsvFormat } from "d3-dsv"
import { BaseDocumentLoader } from "langchain/document_loaders/base"
import { Document } from "@langchain/core/documents"
export interface WebLoaderParams {
url: string
name: string
options: {
column?: string
separator?: string
}
}
export class PageAssisCSVUrlLoader
extends BaseDocumentLoader
implements WebLoaderParams
{
pdf: { content: string; page: number }[]
url: string
name: string
options: { column?: string; separator?: string }
constructor({ url, name }: WebLoaderParams) {
super()
this.url = url
this.name = name
this.options = {}
}
public async parse(raw: string): Promise<string[]> {
const { column, separator = "," } = this.options
const psv = dsvFormat(separator)
let parsed = psv.parseRows(raw.trim())
if (column !== undefined) {
if (!parsed[0].includes(column)) {
throw new Error(`ColumnNotFoundError: Column ${column} not found`)
}
const columnIndex = parsed[0].indexOf(column)
return parsed.map((row) => row[columnIndex]!)
}
const headers = parsed[0]
parsed = parsed.slice(1)
return parsed.map((row) =>
row.map((value, index) => `${headers[index]}: ${value}`).join("\n")
)
}
async load(): Promise<Document<Record<string, any>>[]> {
const res = await fetch(this.url)
if (!res.ok) {
throw new Error(`Failed to fetch ${this.url}`)
}
const raw = await res.text()
const parsed = await this.parse(raw)
let metadata = { source: this.name, type: "csv" }
parsed.forEach((pageContent, i) => {
if (typeof pageContent !== "string") {
throw new Error(
`Expected string, at position ${i} got ${typeof pageContent}`
)
}
})
return parsed.map(
(pageContent, i) =>
new Document({
pageContent,
metadata:
parsed.length === 1
? metadata
: {
...metadata,
line: i + 1
}
})
)
}
}

View File

@ -3,6 +3,7 @@ import { Document } from "@langchain/core/documents"
import { compile } from "html-to-text"
import { chromeRunTime } from "~/libs/runtime"
import { YtTranscript } from "yt-transcript"
import { isWikipedia, parseWikipedia } from "@/parser/wiki"
const YT_REGEX =
/(?:https?:\/\/)?(?:www\.)?(?:youtube\.com|youtu\.be)\/(?:watch\?v=)?([a-zA-Z0-9_-]+)/
@ -16,7 +17,6 @@ const getTranscript = async (url: string) => {
return await ytTranscript.getTranscript()
}
export interface WebLoaderParams {
html: string
url: string
@ -24,7 +24,8 @@ export interface WebLoaderParams {
export class PageAssistHtmlLoader
extends BaseDocumentLoader
implements WebLoaderParams {
implements WebLoaderParams
{
html: string
url: string
@ -47,7 +48,6 @@ export class PageAssistHtmlLoader
text += item.text + " "
})
return [
{
metadata: {
@ -58,12 +58,25 @@ export class PageAssistHtmlLoader
}
]
}
const htmlCompiler = compile({
wordwrap: false
})
const text = htmlCompiler(this.html)
// let html = this.html
// if (isWikipedia(this.url)) {
// console.log("Wikipedia URL detected")
// html = parseWikipedia(html)
// }
// // else if (isTwitter(this.url)) {
// // console.log("Twitter URL detected")
// // html = parseTweet(html, this.url)
// // }
// const htmlCompiler = compile({
// wordwrap: false
// })
// const text = htmlCompiler(html)
const metadata = { source: this.url }
return [new Document({ pageContent: text, metadata })]
return [new Document({ pageContent: this.html, metadata })]
}
async loadByURL(): Promise<Document<Record<string, any>>[]> {
@ -79,7 +92,6 @@ export class PageAssistHtmlLoader
text += item.text + " "
})
return [
{
metadata: {
@ -92,7 +104,18 @@ export class PageAssistHtmlLoader
}
await chromeRunTime(this.url)
const fetchHTML = await fetch(this.url)
const html = await fetchHTML.text()
let html = await fetchHTML.text()
if (isWikipedia(this.url)) {
console.log("Wikipedia URL detected")
html = parseWikipedia(await fetchHTML.text())
}
// else if (isTwitter(this.url)) {
// console.log("Twitter URL detected")
// html = parseTweet(await fetchHTML.text(), this.url)
// }
const htmlCompiler = compile({
wordwrap: false,
selectors: [

49
src/loader/pdf-url.ts Normal file
View File

@ -0,0 +1,49 @@
import { BaseDocumentLoader } from "langchain/document_loaders/base"
import { Document } from "@langchain/core/documents"
import { processPdf } from "@/libs/pdf"
export interface WebLoaderParams {
url: string
name: string
}
export class PageAssistPDFUrlLoader
extends BaseDocumentLoader
implements WebLoaderParams
{
pdf: { content: string; page: number }[]
url: string
name: string
constructor({ url, name }: WebLoaderParams) {
super()
this.url = url
this.name = name
}
async load(): Promise<Document<Record<string, any>>[]> {
const documents: Document[] = []
const data = await processPdf(this.url)
for (let i = 1; i <= data.numPages; i += 1) {
const page = await data.getPage(i)
const content = await page.getTextContent()
if (content?.items.length === 0) {
continue
}
const text = content?.items
.map((item: any) => item.str)
.join("\n")
.replace(/\x00/g, "")
.trim()
documents.push({
pageContent: text,
metadata: { source: this.name, page: i, type: "pdf" }
})
}
return documents
}
}

View File

@ -1,37 +1,36 @@
import { BaseDocumentLoader } from "langchain/document_loaders/base"
import { Document } from "@langchain/core/documents"
export interface WebLoaderParams {
pdf: { content: string, page: number }[]
url: string
pdf: { content: string; page: number }[]
url: string
}
export class PageAssistPDFLoader
extends BaseDocumentLoader
implements WebLoaderParams {
pdf: { content: string, page: number }[]
url: string
extends BaseDocumentLoader
implements WebLoaderParams
{
pdf: { content: string; page: number }[]
url: string
constructor({ pdf, url }: WebLoaderParams) {
super()
this.pdf = pdf
this.url = url
constructor({ pdf, url }: WebLoaderParams) {
super()
this.pdf = pdf
this.url = url
}
async load(): Promise<Document<Record<string, any>>[]> {
const documents: Document[] = []
for (const page of this.pdf) {
const metadata = { source: this.url, page: page.page }
documents.push(new Document({ pageContent: page.content, metadata }))
}
async load(): Promise<Document<Record<string, any>>[]> {
const documents: Document[] = [];
for (const page of this.pdf) {
const metadata = { source: this.url, page: page.page }
documents.push(new Document({ pageContent: page.content, metadata }))
}
return [
new Document({
pageContent: documents.map((doc) => doc.pageContent).join("\n\n"),
metadata: documents.map((doc) => doc.metadata),
}),
];
}
return [
new Document({
pageContent: documents.map((doc) => doc.pageContent).join("\n\n"),
metadata: documents.map((doc) => doc.metadata)
})
]
}
}

57
src/loader/txt.ts Normal file
View File

@ -0,0 +1,57 @@
import { BaseDocumentLoader } from "langchain/document_loaders/base"
import { Document } from "@langchain/core/documents"
export interface WebLoaderParams {
url: string
name: string
}
export class PageAssisTXTUrlLoader
extends BaseDocumentLoader
implements WebLoaderParams
{
pdf: { content: string; page: number }[]
url: string
name: string
constructor({ url, name }: WebLoaderParams) {
super()
this.url = url
this.name = name
}
public async parse(raw: string): Promise<string[]> {
return [raw]
}
async load(): Promise<Document<Record<string, any>>[]> {
const res = await fetch(this.url)
if (!res.ok) {
throw new Error(`Failed to fetch ${this.url}`)
}
const raw = await res.text()
const parsed = await this.parse(raw)
let metadata = { source: this.name, type: "txt" }
parsed.forEach((pageContent, i) => {
if (typeof pageContent !== "string") {
throw new Error(
`Expected string, at position ${i} got ${typeof pageContent}`
)
}
})
return parsed.map(
(pageContent, i) =>
new Document({
pageContent,
metadata:
parsed.length === 1
? metadata
: {
...metadata,
line: i + 1
}
})
)
}
}

View File

@ -0,0 +1,255 @@
import { Embeddings, EmbeddingsParams } from "@langchain/core/embeddings"
import type { StringWithAutocomplete } from "@langchain/core/utils/types"
export interface OllamaInput {
embeddingOnly?: boolean
f16KV?: boolean
frequencyPenalty?: number
headers?: Record<string, string>
keepAlive?: string
logitsAll?: boolean
lowVram?: boolean
mainGpu?: number
model?: string
baseUrl?: string
mirostat?: number
mirostatEta?: number
mirostatTau?: number
numBatch?: number
numCtx?: number
numGpu?: number
numGqa?: number
numKeep?: number
numPredict?: number
numThread?: number
penalizeNewline?: boolean
presencePenalty?: number
repeatLastN?: number
repeatPenalty?: number
ropeFrequencyBase?: number
ropeFrequencyScale?: number
temperature?: number
stop?: string[]
tfsZ?: number
topK?: number
topP?: number
typicalP?: number
useMLock?: boolean
useMMap?: boolean
vocabOnly?: boolean
format?: StringWithAutocomplete<"json">
}
export interface OllamaRequestParams {
model: string
format?: StringWithAutocomplete<"json">
images?: string[]
options: {
embedding_only?: boolean
f16_kv?: boolean
frequency_penalty?: number
logits_all?: boolean
low_vram?: boolean
main_gpu?: number
mirostat?: number
mirostat_eta?: number
mirostat_tau?: number
num_batch?: number
num_ctx?: number
num_gpu?: number
num_gqa?: number
num_keep?: number
num_thread?: number
num_predict?: number
penalize_newline?: boolean
presence_penalty?: number
repeat_last_n?: number
repeat_penalty?: number
rope_frequency_base?: number
rope_frequency_scale?: number
temperature?: number
stop?: string[]
tfs_z?: number
top_k?: number
top_p?: number
typical_p?: number
use_mlock?: boolean
use_mmap?: boolean
vocab_only?: boolean
}
}
type CamelCasedRequestOptions = Omit<
OllamaInput,
"baseUrl" | "model" | "format" | "headers"
>
/**
* Interface for OllamaEmbeddings parameters. Extends EmbeddingsParams and
* defines additional parameters specific to the OllamaEmbeddings class.
*/
interface OllamaEmbeddingsParams extends EmbeddingsParams {
/** The Ollama model to use, e.g: "llama2:13b" */
model?: string
/** Base URL of the Ollama server, defaults to "http://localhost:11434" */
baseUrl?: string
/** Extra headers to include in the Ollama API request */
headers?: Record<string, string>
/** Defaults to "5m" */
keepAlive?: string
/** Advanced Ollama API request parameters in camelCase, see
* https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values
* for details of the available parameters.
*/
requestOptions?: CamelCasedRequestOptions
signal?: AbortSignal
}
export class OllamaEmbeddingsPageAssist extends Embeddings {
model = "llama2"
baseUrl = "http://localhost:11434"
headers?: Record<string, string>
keepAlive = "5m"
requestOptions?: OllamaRequestParams["options"]
signal?: AbortSignal
constructor(params?: OllamaEmbeddingsParams) {
super({ maxConcurrency: 1, ...params })
if (params?.model) {
this.model = params.model
}
if (params?.baseUrl) {
this.baseUrl = params.baseUrl
}
if (params?.headers) {
this.headers = params.headers
}
if (params?.keepAlive) {
this.keepAlive = params.keepAlive
}
if (params?.requestOptions) {
this.requestOptions = this._convertOptions(params.requestOptions)
}
if (params?.signal) {
this.signal = params.signal
}
}
/** convert camelCased Ollama request options like "useMMap" to
* the snake_cased equivalent which the ollama API actually uses.
* Used only for consistency with the llms/Ollama and chatModels/Ollama classes
*/
_convertOptions(requestOptions: CamelCasedRequestOptions) {
const snakeCasedOptions: Record<string, unknown> = {}
const mapping: Record<keyof CamelCasedRequestOptions, string> = {
embeddingOnly: "embedding_only",
f16KV: "f16_kv",
frequencyPenalty: "frequency_penalty",
keepAlive: "keep_alive",
logitsAll: "logits_all",
lowVram: "low_vram",
mainGpu: "main_gpu",
mirostat: "mirostat",
mirostatEta: "mirostat_eta",
mirostatTau: "mirostat_tau",
numBatch: "num_batch",
numCtx: "num_ctx",
numGpu: "num_gpu",
numGqa: "num_gqa",
numKeep: "num_keep",
numPredict: "num_predict",
numThread: "num_thread",
penalizeNewline: "penalize_newline",
presencePenalty: "presence_penalty",
repeatLastN: "repeat_last_n",
repeatPenalty: "repeat_penalty",
ropeFrequencyBase: "rope_frequency_base",
ropeFrequencyScale: "rope_frequency_scale",
temperature: "temperature",
stop: "stop",
tfsZ: "tfs_z",
topK: "top_k",
topP: "top_p",
typicalP: "typical_p",
useMLock: "use_mlock",
useMMap: "use_mmap",
vocabOnly: "vocab_only"
}
for (const [key, value] of Object.entries(requestOptions)) {
const snakeCasedOption = mapping[key as keyof CamelCasedRequestOptions]
if (snakeCasedOption) {
snakeCasedOptions[snakeCasedOption] = value
}
}
return snakeCasedOptions
}
async _request(prompt: string): Promise<number[]> {
const { model, baseUrl, keepAlive, requestOptions } = this
let formattedBaseUrl = baseUrl
if (formattedBaseUrl.startsWith("http://localhost:")) {
// Node 18 has issues with resolving "localhost"
// See https://github.com/node-fetch/node-fetch/issues/1624
formattedBaseUrl = formattedBaseUrl.replace(
"http://localhost:",
"http://127.0.0.1:"
)
}
const response = await fetch(`${formattedBaseUrl}/api/embeddings`, {
method: "POST",
headers: {
"Content-Type": "application/json",
...this.headers
},
body: JSON.stringify({
prompt,
model,
keep_alive: keepAlive,
options: requestOptions
}),
signal: this.signal
})
if (!response.ok) {
throw new Error(
`Request to Ollama server failed: ${response.status} ${response.statusText}`
)
}
const json = await response.json()
return json.embedding
}
async _embed(texts: string[]): Promise<number[][]> {
const embeddings: number[][] = await Promise.all(
texts.map((text) => this.caller.call(() => this._request(text)))
)
return embeddings
}
async embedDocuments(documents: string[]) {
return this._embed(documents)
}
async embedQuery(document: string) {
return (await this.embedDocuments([document]))[0]
}
}

10
src/parser/default.ts Normal file
View File

@ -0,0 +1,10 @@
import * as cheerio from "cheerio"
import TurndownService from "turndown"
let turndownService = new TurndownService()
export const defaultExtractContent = (html: string) => {
const $ = cheerio.load(html)
const mainContent = $('[role="main"]').html() || $("main").html() || $.html()
const markdown = turndownService.turndown(mainContent)
return markdown
}

119
src/parser/google-docs.ts Normal file
View File

@ -0,0 +1,119 @@
export const isGoogleDocs = (url: string) => {
const GOOGLE_DOCS_REGEX = /docs\.google\.com\/document/g
return GOOGLE_DOCS_REGEX.test(url)
}
const getGoogleDocs = () => {
try {
function traverse(
obj: { [x: string]: any },
predicate: { (_: any, value: any): boolean; (arg0: any, arg1: any): any },
maxDepth: number,
propNames = Object.getOwnPropertyNames(obj)
) {
const visited = new Set()
const results = []
let iterations = 0
const traverseObj = (
name: string,
value: unknown,
path: any[],
depth = 0
) => {
iterations++
if (name === "prototype" || value instanceof Window || depth > maxDepth)
return
const currentPath = [...path, name]
try {
if (predicate(name, value)) {
results.push({ path: currentPath, value })
return
}
} catch (error) {}
if (value != null && !visited.has(value)) {
visited.add(value)
if (Array.isArray(value)) {
value.forEach((val, index) => {
try {
traverseObj(index.toString(), val, currentPath, depth + 1)
} catch (error) {}
})
} else if (value instanceof Object) {
const propNamesForValue =
value &&
// @ts-ignore
value.nodeType === 1 &&
// @ts-ignore
typeof value.nodeName === "string"
? Object.getOwnPropertyNames(obj)
: Object.getOwnPropertyNames(value)
propNamesForValue.forEach((prop) => {
try {
traverseObj(prop, value[prop], currentPath, depth + 1)
} catch (error) {}
})
}
}
}
propNames.forEach((prop) => {
try {
traverseObj(prop, obj[prop], [])
} catch (error) {}
})
return { results, iterations }
}
const result = traverse(
// @ts-ignore
window.KX_kixApp,
(_: any, value: { toString: () => string }) =>
value && "\x03" === value.toString().charAt(0),
5
)
if (result.results?.[0]?.value) {
return {
content: result.results[0].value
}
}
return {
content: null
}
} catch (error) {
return {
content: null
}
}
}
export const parseGoogleDocs = async () => {
const result = new Promise((resolve) => {
chrome.tabs.query({ active: true, currentWindow: true }, async (tabs) => {
const tab = tabs[0]
const data = await chrome.scripting.executeScript({
target: { tabId: tab.id },
world: "MAIN",
func: getGoogleDocs
})
if (data.length > 0) {
resolve(data[0].result)
}
})
}) as Promise<{
content?: string
}>
const { content } = await result
return content
}

View File

@ -0,0 +1,5 @@
import * as cheerio from 'cheerio';
export const parseGoogleSheets = (html: string) => {
const $ = cheerio.load(html);
};

102
src/parser/twitter.ts Normal file
View File

@ -0,0 +1,102 @@
import * as cheerio from "cheerio"
export const isTweet = (url: string) => {
const TWEET_REGEX = /twitter\.com\/[a-zA-Z0-9_]+\/status\/[0-9]+/g
const X_REGEX = /x\.com\/[a-zA-Z0-9_]+\/status\/[0-9]+/g
return TWEET_REGEX.test(url) || X_REGEX.test(url)
}
export const isTwitterTimeline = (url: string) => {
return url === "https://twitter.com/home" || url === "https://x.com/home"
}
export const isTwitterProfile = (url: string) => {
const PROFILE_REGEX = /twitter\.com\/[a-zA-Z0-9_]+/g
const X_REGEX = /x\.com\/[a-zA-Z0-9_]+/g
return PROFILE_REGEX.test(url) || X_REGEX.test(url)
}
export const parseTwitterTimeline = (html: string) => {
const $ = cheerio.load(html)
const postElements = $("[data-testid=tweetText]")
const authorElements = $("[data-testid=User-Name]")
const posts = postElements
.map((index, element) => {
const post = $(element).text()
const author = $(authorElements[index]).text()
return {
author,
post
}
})
.get()
return posts
.map((post) => {
return `## Author: ${post.author}\n\n${post.post}\n\n---\n\n`
})
.filter((value, index, self) => self.indexOf(value) === index)
.join("\n")
}
export const parseTweet = (html: string) => {
const $ = cheerio.load(html)
const postElements = $("[data-testid=tweetText]")
const authorElements = $("[data-testid=User-Name]")
const posts = postElements
.map((index, element) => {
const post = $(element).text()
const author = $(authorElements[index]).text()
return {
author,
post,
isReply: index !== 0
}
})
.get()
return posts
.map((post) => {
return `##Author: ${post.author}\n\n${post.isReply ? "Reply:" : "Post:"} ${post.post}\n\n---\n\n`
})
.join("\n")
}
export const parseTweetProfile = (html: string) => {
const $ = cheerio.load(html)
const profileName = $("[data-testid=UserProfileHeader_Items]")
.find("h1")
.text()
const profileBio = $("[data-testid=UserProfileHeader_Items]").find("p").text()
const profileLocation = $("[data-testid=UserProfileHeader_Items]")
.find("span")
.text()
const profileJoinDate = $("[data-testid=UserProfileHeader_Items]")
.find("span")
.text()
const profileFollowers = $(
"[data-testid=UserProfileHeader_Items] span"
).text()
const profileFollowing = $(
"[data-testid=UserProfileHeader_Items] span"
).text()
const postElements = $("[data-testid=tweetText]")
const authorElements = $("[data-testid=User-Name]")
const posts = postElements
.map((index, element) => {
const post = $(element).text()
const author = $(authorElements[index]).text()
return {
author,
post
}
})
.get()
return `## Profile: ${profileName}\n\nBio: ${profileBio}\n\nLocation: ${profileLocation}\n\nJoin Date: ${profileJoinDate}\n\nFollowers: ${profileFollowers}\n\nFollowing: ${profileFollowing}\n\nPosts: ${posts.map((post) => `Author: ${post.author}\n\nPost: ${post.post}\n\n---\n\n`).join("\n")}`
}

28
src/parser/wiki.ts Normal file
View File

@ -0,0 +1,28 @@
import * as cheerio from "cheerio"
export const isWikipedia = (url: string) => {
const WIKI_REGEX = /wikipedia\.org\/wiki\//g
return WIKI_REGEX.test(url)
}
export const parseWikipedia = (html: string) => {
if (!html) {
return ""
}
const $ = cheerio.load(html)
const title = $("h1#firstHeading")
const content = $("#mw-content-text")
content?.find("sup.reference")?.remove()
content?.find("div.thumb")?.remove()
content?.find("div.reflist")?.remove()
content?.find("div.navbox")?.remove()
content?.find("table.infobox")?.remove()
content?.find("div.sister-wikipedia")?.remove()
content?.find("div.sister-projects")?.remove()
content?.find("div.metadata")?.remove()
content?.find("div.vertical-navbox")?.remove()
content?.find("div.toc")?.remove()
const newHtml = content?.html()
return `<div>TITLE: ${title?.text()}</div><div>${newHtml}</div>`
}

View File

@ -6,6 +6,6 @@
"message": "Use your locally running AI models to assist you in your web browsing."
},
"openSidePanelToChat": {
"message": "Open Side Panel to Chat"
"message": "Open Copilot to Chat"
}
}

View File

@ -1,11 +1,11 @@
{
"extName": {
"message": "Page Assist - ローカルAIモデル用のWeb UI"
},
"extDescription": {
"message": "ローカルで実行中のAIモデルを使って、Webブラウジングをアシストします。"
},
"openSidePanelToChat": {
"message": "サイドパネルを開いてチャット"
}
}
"extName": {
"message": "Page Assist - ローカルAIモデル用のWeb UI"
},
"extDescription": {
"message": "ローカルで実行中のAIモデルを使って、Webブラウジングをアシストします。"
},
"openSidePanelToChat": {
"message": "チャットするためにCopilotを開く"
}
}

View File

@ -6,6 +6,6 @@
"message": "使用本地运行的 AI 模型来辅助您的网络浏览。"
},
"openSidePanelToChat": {
"message": "打开侧边栏进行聊天"
"message": "打开Copilot进行聊天"
}
}
}

6
src/queue/index.ts Normal file
View File

@ -0,0 +1,6 @@
import { processKnowledge } from "@/libs/process-knowledge"
import PubSub from "pubsub-js"
export const KNOWLEDGE_QUEUE = Symbol("queue")
PubSub.subscribe(KNOWLEDGE_QUEUE, processKnowledge)

View File

@ -1,11 +1,12 @@
import { SettingsLayout } from "~/components/Layouts/SettingsOptionLayout"
import OptionLayout from "~/components/Layouts/Layout"
import { KnowledgeSettings } from "@/components/Option/Knowledge"
export const OptionKnowledgeBase = () => {
return (
<OptionLayout>
<SettingsLayout>
hey
<KnowledgeSettings />
</SettingsLayout>
</OptionLayout>
)

View File

@ -11,8 +11,7 @@ const DEFAULT_PAGE_SHARE_URL = "https://pageassist.xyz"
const DEFAULT_RAG_QUESTION_PROMPT =
"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question. Chat History: {chat_history} Follow Up Input: {question} Standalone question:"
const DEFAUTL_RAG_SYSTEM_PROMPT = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. If you don't know the answer, just say you don't know. DO NOT try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. {context} Question: {question} Helpful answer in markdown:`
const DEFAUTL_RAG_SYSTEM_PROMPT = `You are a helpful AI assistant. Use the following pieces of context to answer the question at the end. If you don't know the answer, just say you don't know. DO NOT try to make up an answer. If the question is not related to the context, politely respond that you are tuned to only answer questions that are related to the context. {context} Question: {question} Helpful answer:`
const DEFAULT_WEBSEARCH_PROMP = `You are a helpful assistant that can answer any questions. You can use the following search results in case you want to answer questions about anything in real-time. The current date and time are {current_date_time}.
@ -61,7 +60,11 @@ export const isOllamaRunning = async () => {
}
}
export const getAllModels = async ({ returnEmpty = false }: { returnEmpty?: boolean }) => {
export const getAllModels = async ({
returnEmpty = false
}: {
returnEmpty?: boolean
}) => {
try {
const baseUrl = await getOllamaURL()
const response = await fetch(`${cleanUrl(baseUrl)}/api/tags`)
@ -110,11 +113,18 @@ export const deleteModel = async (model: string) => {
return response.json()
}
export const fetchChatModels = async () => {
export const fetchChatModels = async ({
returnEmpty = false
}: {
returnEmpty?: boolean
}) => {
try {
const baseUrl = await getOllamaURL()
const response = await fetch(`${cleanUrl(baseUrl)}/api/tags`)
if (!response.ok) {
if (returnEmpty) {
return []
}
throw new Error(response.statusText)
}
const json = await response.json()
@ -124,8 +134,8 @@ export const fetchChatModels = async () => {
modified_at: string
size: number
digest: string
details: {
parent_model: string
details?: {
parent_model?: string
format: string
family: string
families: string[]
@ -133,7 +143,7 @@ export const fetchChatModels = async () => {
quantization_level: string
}
}[]
return models.filter((model) => {
return models?.filter((model) => {
return (
!model?.details?.families?.includes("bert") &&
!model?.details?.families?.includes("nomic-bert")
@ -141,7 +151,7 @@ export const fetchChatModels = async () => {
})
} catch (e) {
console.error(e)
return []
return await getAllModels({ returnEmpty })
}
}
@ -264,7 +274,6 @@ export const saveForRag = async (
await setDefaultEmbeddingChunkOverlap(overlap)
}
export const getWebSearchPrompt = async () => {
const prompt = await storage.get("webSearchPrompt")
if (!prompt || prompt.length === 0) {
@ -280,23 +289,20 @@ export const setWebSearchPrompt = async (prompt: string) => {
export const geWebSearchFollowUpPrompt = async () => {
const prompt = await storage.get("webSearchFollowUpPrompt")
if (!prompt || prompt.length === 0) {
return DEFAULT_RAG_QUESTION_PROMPT;
return DEFAULT_RAG_QUESTION_PROMPT
}
return prompt
}
export const setWebSearchFollowUpPrompt = async (prompt: string) => {
await storage.set("webSearchFollowUpPrompt", prompt)
}
export const setWebPrompts = async (prompt: string, followUpPrompt: string) => {
await setWebSearchPrompt(prompt)
await setWebSearchFollowUpPrompt(followUpPrompt)
}
export const getPageShareUrl = async () => {
const pageShareUrl = await storage.get("pageShareUrl")
if (!pageShareUrl || pageShareUrl.length === 0) {
@ -305,7 +311,7 @@ export const getPageShareUrl = async () => {
return pageShareUrl
}
export const setPageShareUrl = async (pageShareUrl: string) => {
await storage.set("pageShareUrl", pageShareUrl)
}
}

94
src/services/tts.ts Normal file
View File

@ -0,0 +1,94 @@
import { Storage } from "@plasmohq/storage"
const storage = new Storage()
const DEFAULT_TTS_PROVIDER = "browser"
const AVAILABLE_TTS_PROVIDERS = ["browser"] as const
export const getTTSProvider = async (): Promise<
(typeof AVAILABLE_TTS_PROVIDERS)[number]
> => {
const ttsProvider = await storage.get("ttsProvider")
if (!ttsProvider || ttsProvider.length === 0) {
return DEFAULT_TTS_PROVIDER
}
return ttsProvider as (typeof AVAILABLE_TTS_PROVIDERS)[number]
}
export const setTTSProvider = async (ttsProvider: string) => {
await storage.set("ttsProvider", ttsProvider)
}
export const getBrowserTTSVoices = async () => {
const tts = await chrome.tts.getVoices()
return tts
}
export const getVoice = async () => {
const voice = await storage.get("voice")
return voice
}
export const setVoice = async (voice: string) => {
await storage.set("voice", voice)
}
export const isTTSEnabled = async () => {
const data = await storage.get("isTTSEnabled")
if (!data || data.length === 0) {
return true
}
return data === "true"
}
export const setTTSEnabled = async (isTTSEnabled: boolean) => {
await storage.set("isTTSEnabled", isTTSEnabled.toString())
}
export const isSSMLEnabled = async () => {
const data = await storage.get("isSSMLEnabled")
return data === "true"
}
export const setSSMLEnabled = async (isSSMLEnabled: boolean) => {
await storage.set("isSSMLEnabled", isSSMLEnabled.toString())
}
export const getTTSSettings = async () => {
const [ttsEnabled, ttsProvider, browserTTSVoices, voice, ssmlEnabled] =
await Promise.all([
isTTSEnabled(),
getTTSProvider(),
getBrowserTTSVoices(),
getVoice(),
isSSMLEnabled()
])
return {
ttsEnabled,
ttsProvider,
browserTTSVoices,
voice,
ssmlEnabled
}
}
export const setTTSSettings = async ({
ttsEnabled,
ttsProvider,
voice,
ssmlEnabled
}: {
ttsEnabled: boolean
ttsProvider: string
voice: string
ssmlEnabled: boolean
}) => {
await Promise.all([
setTTSEnabled(ttsEnabled),
setTTSProvider(ttsProvider),
setVoice(voice),
setSSMLEnabled(ssmlEnabled)
])
}

View File

@ -1,3 +1,4 @@
import { Knowledge } from "@/db/knowledge"
import { create } from "zustand"
type WebSearch = {
@ -48,16 +49,19 @@ type State = {
setIsEmbedding: (isEmbedding: boolean) => void
speechToTextLanguage: string
setSpeechToTextLanguage: (language: string) => void
webSearch: boolean;
setWebSearch: (webSearch: boolean) => void;
isSearchingInternet: boolean;
setIsSearchingInternet: (isSearchingInternet: boolean) => void;
webSearch: boolean
setWebSearch: (webSearch: boolean) => void
isSearchingInternet: boolean
setIsSearchingInternet: (isSearchingInternet: boolean) => void
selectedSystemPrompt: string | null
setSelectedSystemPrompt: (selectedSystemPrompt: string) => void
selectedQuickPrompt: string | null
setSelectedQuickPrompt: (selectedQuickPrompt: string) => void
selectedKnowledge: Knowledge | null
setSelectedKnowledge: (selectedKnowledge: Knowledge) => void
}
export const useStoreMessageOption = create<State>((set) => ({
@ -89,7 +93,11 @@ export const useStoreMessageOption = create<State>((set) => ({
isSearchingInternet: false,
setIsSearchingInternet: (isSearchingInternet) => set({ isSearchingInternet }),
selectedSystemPrompt: null,
setSelectedSystemPrompt: (selectedSystemPrompt) => set({ selectedSystemPrompt }),
setSelectedSystemPrompt: (selectedSystemPrompt) =>
set({ selectedSystemPrompt }),
selectedQuickPrompt: null,
setSelectedQuickPrompt: (selectedQuickPrompt) => set({ selectedQuickPrompt }),
selectedKnowledge: null,
setSelectedKnowledge: (selectedKnowledge) => set({ selectedKnowledge })
}))

View File

@ -3,9 +3,15 @@ import { create } from "zustand"
type State = {
sendWhenEnter: boolean
setSendWhenEnter: (sendWhenEnter: boolean) => void
ttsEnabled: boolean
setTTSEnabled: (isTTSEnabled: boolean) => void
}
export const useWebUI = create<State>((set) => ({
sendWhenEnter: true,
setSendWhenEnter: (sendWhenEnter) => set({ sendWhenEnter })
setSendWhenEnter: (sendWhenEnter) => set({ sendWhenEnter }),
ttsEnabled: true,
setTTSEnabled: (ttsEnabled) => set({ ttsEnabled })
}))

4
src/utils/clean.ts Normal file
View File

@ -0,0 +1,4 @@
export const cleanUnwantedUnicode = (text: string) => {
const UNICODE_REGEX = /[\u200B-\u200D\uFEFF]/g
return text.replace(UNICODE_REGEX, "").trim()
}

View File

@ -0,0 +1,20 @@
export function markdownToSSML(markdown: string): string {
let ssml = markdown.replace(/\\n/g, "<break/>")
ssml = ssml.replace(
/^(#{1,6}) (.*?)(?=\r?\n\s*?(?:\r?\n|$))/gm,
(match, hashes, heading) => {
const level = hashes.length
const rate = (level - 1) * 10 + 100
return `<prosody rate="${rate}%">${heading}</prosody>`
}
)
ssml = ssml.replace(/\\\*\\\*(.\*?)\\\*\\\*/g, "<emphasis>$1</emphasis>")
ssml = ssml.replace(
/\\\*(.\*?)\\\*/g,
'<amazon:effect name="whispered">$1</amazon:effect>'
)
ssml = `<speak>${ssml}</speak>`
return `<?xml version="1.0"?>${ssml}`
}

View File

@ -2,62 +2,75 @@ import { PageAssistHtmlLoader } from "~/loader/html"
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"
import { MemoryVectorStore } from "langchain/vectorstores/memory"
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"
import { defaultEmbeddingChunkOverlap, defaultEmbeddingChunkSize } from "@/services/ollama"
import {
defaultEmbeddingChunkOverlap,
defaultEmbeddingChunkSize
} from "@/services/ollama"
import { PageAssistPDFLoader } from "@/loader/pdf"
export const getLoader = ({ html, pdf, type, url }: {
url: string,
html: string,
type: string,
pdf: { content: string, page: number }[]
export const getLoader = ({
html,
pdf,
type,
url
}: {
url: string
html: string
type: string
pdf: { content: string; page: number }[]
}) => {
if (type === "pdf") {
return new PageAssistPDFLoader({
pdf,
url
})
} else {
return new PageAssistHtmlLoader({
html,
url
})
}
if (type === "pdf") {
return new PageAssistPDFLoader({
pdf,
url
})
} else {
return new PageAssistHtmlLoader({
html,
url
})
}
}
export const memoryEmbedding = async (
{ html,
keepTrackOfEmbedding, ollamaEmbedding, pdf, setIsEmbedding, setKeepTrackOfEmbedding, type, url }: {
url: string,
html: string,
type: string,
pdf: { content: string, page: number }[],
keepTrackOfEmbedding: Record<string, MemoryVectorStore>,
ollamaEmbedding: OllamaEmbeddings,
setIsEmbedding: (value: boolean) => void,
setKeepTrackOfEmbedding: (value: Record<string, MemoryVectorStore>) => void
}
) => {
setIsEmbedding(true)
export const memoryEmbedding = async ({
html,
keepTrackOfEmbedding,
ollamaEmbedding,
pdf,
setIsEmbedding,
setKeepTrackOfEmbedding,
type,
url
}: {
url: string
html: string
type: string
pdf: { content: string; page: number }[]
keepTrackOfEmbedding: Record<string, MemoryVectorStore>
ollamaEmbedding: OllamaEmbeddings
setIsEmbedding: (value: boolean) => void
setKeepTrackOfEmbedding: (value: Record<string, MemoryVectorStore>) => void
}) => {
setIsEmbedding(true)
const loader = getLoader({ html, pdf, type, url })
const docs = await loader.load()
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const loader = getLoader({ html, pdf, type, url })
const docs = await loader.load()
const chunkSize = await defaultEmbeddingChunkSize()
const chunkOverlap = await defaultEmbeddingChunkOverlap()
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
const chunks = await textSplitter.splitDocuments(docs)
const chunks = await textSplitter.splitDocuments(docs)
const store = new MemoryVectorStore(ollamaEmbedding)
const store = new MemoryVectorStore(ollamaEmbedding)
await store.addDocuments(chunks)
setKeepTrackOfEmbedding({
...keepTrackOfEmbedding,
[url]: store
})
setIsEmbedding(false)
return store
}
await store.addDocuments(chunks)
setKeepTrackOfEmbedding({
...keepTrackOfEmbedding,
[url]: store
})
setIsEmbedding(false)
return store
}

32
src/utils/to-source.ts Normal file
View File

@ -0,0 +1,32 @@
import { Source } from "@/db/knowledge"
import { UploadFile } from "antd"
export const toBase64 = (file: File | Blob): Promise<string> => {
return new Promise((resolve, reject) => {
const reader = new FileReader()
reader.readAsDataURL(file)
reader.onload = () => resolve(reader.result as string)
reader.onerror = (error) => reject(error)
})
}
export const toArrayBufferFromBase64 = async (base64: string) => {
const res = await fetch(base64)
const blob = await res.blob()
return await blob.arrayBuffer()
}
export const generateSourceId = () => {
return "XXXXXXXX-XXXX-4XXX-YXXX-XXXXXXXXXXXX".replace(/[XY]/g, (c) => {
const r = (Math.random() * 16) | 0
const v = c === "X" ? r : (r & 0x3) | 0x8
return v.toString(16)
})
}
export const convertToSource = async (file: UploadFile): Promise<Source> => {
let type = file.type
let filename = file.name
const content = await toBase64(file.originFileObj)
return { content, type, filename, source_id: generateSourceId() }
}

View File

@ -41,6 +41,8 @@ export const localDuckDuckGoSearch = async (query: string) => {
.find("a.result__snippet")
.attr("href")
.replace("//duckduckgo.com/l/?uddg=", "")
.replace(/&rut=.*/, "")
const content = $(result).find("a.result__snippet").text()
const decodedLink = decodeURIComponent(link)
return { title, link: decodedLink, content }
@ -93,7 +95,7 @@ export const webDuckDuckGoSearch = async (query: string) => {
const textSplitter = new RecursiveCharacterTextSplitter({
chunkSize,
chunkOverlap
})
})
const chunks = await textSplitter.splitDocuments(docs)

View File

@ -24,7 +24,7 @@ export default defineConfig({
srcDir: "src",
outDir: "build",
manifest: {
version: "1.1.1",
version: "1.1.3",
name: '__MSG_extName__',
description: '__MSG_extDescription__',
default_locale: 'en',
@ -52,7 +52,8 @@ export default defineConfig({
"declarativeNetRequest",
"action",
"unlimitedStorage",
"contextMenus"
"contextMenus",
"tts"
]
}
})