Webui chat with x added
This commit is contained in:
parent
aae2ddecbc
commit
dd496b7b98
154
src/chain/chat-with-x.ts
Normal file
154
src/chain/chat-with-x.ts
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import { BaseLanguageModel } from "@langchain/core/language_models/base"
|
||||||
|
import { Document } from "@langchain/core/documents"
|
||||||
|
import {
|
||||||
|
ChatPromptTemplate,
|
||||||
|
MessagesPlaceholder,
|
||||||
|
PromptTemplate
|
||||||
|
} from "@langchain/core/prompts"
|
||||||
|
import { AIMessage, BaseMessage, HumanMessage } from "@langchain/core/messages"
|
||||||
|
import { StringOutputParser } from "@langchain/core/output_parsers"
|
||||||
|
import {
|
||||||
|
Runnable,
|
||||||
|
RunnableBranch,
|
||||||
|
RunnableLambda,
|
||||||
|
RunnableMap,
|
||||||
|
RunnableSequence
|
||||||
|
} from "@langchain/core/runnables"
|
||||||
|
type RetrievalChainInput = {
|
||||||
|
chat_history: string
|
||||||
|
question: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatChatHistoryAsString = (history: BaseMessage[]) => {
|
||||||
|
return history
|
||||||
|
.map((message) => `${message._getType()}: ${message.content}`)
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
export const formatDocs = (docs: Document[]) => {
|
||||||
|
return docs
|
||||||
|
.map((doc, i) => `<doc id='${i}'>${doc.pageContent}</doc>`)
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
const serializeHistory = (input: any) => {
|
||||||
|
const chatHistory = input.chat_history || []
|
||||||
|
const convertedChatHistory = []
|
||||||
|
for (const message of chatHistory) {
|
||||||
|
if (message.human !== undefined) {
|
||||||
|
convertedChatHistory.push(new HumanMessage({ content: message.human }))
|
||||||
|
}
|
||||||
|
if (message["ai"] !== undefined) {
|
||||||
|
convertedChatHistory.push(new AIMessage({ content: message.ai }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return convertedChatHistory
|
||||||
|
}
|
||||||
|
|
||||||
|
const createRetrieverChain = (
|
||||||
|
llm: BaseLanguageModel,
|
||||||
|
retriever: Runnable,
|
||||||
|
question_template: string
|
||||||
|
) => {
|
||||||
|
const CONDENSE_QUESTION_PROMPT =
|
||||||
|
PromptTemplate.fromTemplate(question_template)
|
||||||
|
const condenseQuestionChain = RunnableSequence.from([
|
||||||
|
CONDENSE_QUESTION_PROMPT,
|
||||||
|
llm,
|
||||||
|
new StringOutputParser()
|
||||||
|
]).withConfig({
|
||||||
|
runName: "CondenseQuestion"
|
||||||
|
})
|
||||||
|
const hasHistoryCheckFn = RunnableLambda.from(
|
||||||
|
(input: RetrievalChainInput) => input.chat_history.length > 0
|
||||||
|
).withConfig({ runName: "HasChatHistoryCheck" })
|
||||||
|
const conversationChain = condenseQuestionChain.pipe(retriever).withConfig({
|
||||||
|
runName: "RetrievalChainWithHistory"
|
||||||
|
})
|
||||||
|
const basicRetrievalChain = RunnableLambda.from(
|
||||||
|
(input: RetrievalChainInput) => input.question
|
||||||
|
)
|
||||||
|
.withConfig({
|
||||||
|
runName: "Itemgetter:question"
|
||||||
|
})
|
||||||
|
.pipe(retriever)
|
||||||
|
.withConfig({ runName: "RetrievalChainWithNoHistory" })
|
||||||
|
|
||||||
|
return RunnableBranch.from([
|
||||||
|
[hasHistoryCheckFn, conversationChain],
|
||||||
|
basicRetrievalChain
|
||||||
|
]).withConfig({
|
||||||
|
runName: "FindDocs"
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createChatWithXChain = ({
|
||||||
|
llm,
|
||||||
|
question_template,
|
||||||
|
question_llm,
|
||||||
|
retriever,
|
||||||
|
response_template
|
||||||
|
}: {
|
||||||
|
llm: BaseLanguageModel
|
||||||
|
question_llm: BaseLanguageModel
|
||||||
|
retriever: Runnable
|
||||||
|
question_template: string
|
||||||
|
response_template: string
|
||||||
|
}) => {
|
||||||
|
const retrieverChain = createRetrieverChain(
|
||||||
|
question_llm,
|
||||||
|
retriever,
|
||||||
|
question_template
|
||||||
|
)
|
||||||
|
const context = RunnableMap.from({
|
||||||
|
context: RunnableSequence.from([
|
||||||
|
({ question, chat_history }) => {
|
||||||
|
return {
|
||||||
|
question: question,
|
||||||
|
chat_history: formatChatHistoryAsString(chat_history)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
retrieverChain,
|
||||||
|
RunnableLambda.from(formatDocs).withConfig({
|
||||||
|
runName: "FormatDocumentChunks"
|
||||||
|
})
|
||||||
|
]),
|
||||||
|
question: RunnableLambda.from(
|
||||||
|
(input: RetrievalChainInput) => input.question
|
||||||
|
).withConfig({
|
||||||
|
runName: "Itemgetter:question"
|
||||||
|
}),
|
||||||
|
chat_history: RunnableLambda.from(
|
||||||
|
(input: RetrievalChainInput) => input.chat_history
|
||||||
|
).withConfig({
|
||||||
|
runName: "Itemgetter:chat_history"
|
||||||
|
})
|
||||||
|
}).withConfig({ tags: ["RetrieveDocs"] })
|
||||||
|
const prompt = ChatPromptTemplate.fromMessages([
|
||||||
|
["system", response_template],
|
||||||
|
new MessagesPlaceholder("chat_history"),
|
||||||
|
["human", "{question}"]
|
||||||
|
])
|
||||||
|
|
||||||
|
const responseSynthesizerChain = RunnableSequence.from([
|
||||||
|
prompt,
|
||||||
|
llm,
|
||||||
|
new StringOutputParser()
|
||||||
|
]).withConfig({
|
||||||
|
tags: ["GenerateResponse"]
|
||||||
|
})
|
||||||
|
return RunnableSequence.from([
|
||||||
|
{
|
||||||
|
question: RunnableLambda.from(
|
||||||
|
(input: RetrievalChainInput) => input.question
|
||||||
|
).withConfig({
|
||||||
|
runName: "Itemgetter:question"
|
||||||
|
}),
|
||||||
|
chat_history: RunnableLambda.from(serializeHistory).withConfig({
|
||||||
|
runName: "SerializeHistory",
|
||||||
|
})
|
||||||
|
},
|
||||||
|
context,
|
||||||
|
responseSynthesizerChain
|
||||||
|
])
|
||||||
|
}
|
@ -12,7 +12,7 @@ export const SelectedKnowledge = ({ knowledge, onClose }: Props) => {
|
|||||||
<div className="mb-3 border flex justify-between items-center rounded-md p-2 dark:border-gray-600">
|
<div className="mb-3 border flex justify-between items-center rounded-md p-2 dark:border-gray-600">
|
||||||
<div className="flex flex-col gap-2">
|
<div className="flex flex-col gap-2">
|
||||||
<div>
|
<div>
|
||||||
<h3 className="text-lg font-semibold dark:text-gray-100">
|
<h3 className="text-sm font-semibold dark:text-gray-100">
|
||||||
{knowledge.title}
|
{knowledge.title}
|
||||||
</h3>
|
</h3>
|
||||||
</div>
|
</div>
|
||||||
@ -20,7 +20,7 @@ export const SelectedKnowledge = ({ knowledge, onClose }: Props) => {
|
|||||||
{knowledge.source.map((source, index) => (
|
{knowledge.source.map((source, index) => (
|
||||||
<div
|
<div
|
||||||
key={index}
|
key={index}
|
||||||
className="inline-flex gap-2 border rounded-md p-1 dark:border-gray-600 dark:text-gray-100">
|
className="inline-flex gap-2 text-xs border rounded-md p-1 dark:border-gray-600 dark:text-gray-100">
|
||||||
<KnowledgeIcon type={source.type} className="w-4 h-4" />
|
<KnowledgeIcon type={source.type} className="w-4 h-4" />
|
||||||
{source.filename}
|
{source.filename}
|
||||||
</div>
|
</div>
|
||||||
|
@ -158,14 +158,7 @@ export const PlaygroundForm = ({ dropedFile }: Props) => {
|
|||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
<div className="px-3 pt-3 md:px-6 md:pt-6 bg-gray-50 dark:bg-[#262626] border rounded-t-xl dark:border-gray-600">
|
<div className="px-3 pt-3 md:px-6 md:pt-6 bg-gray-50 dark:bg-[#262626] border rounded-t-xl dark:border-gray-600">
|
||||||
{selectedKnowledge && (
|
|
||||||
<SelectedKnowledge
|
|
||||||
onClose={() => {
|
|
||||||
setSelectedKnowledge(null)
|
|
||||||
}}
|
|
||||||
knowledge={selectedKnowledge}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
<div
|
<div
|
||||||
className={`h-full rounded-md shadow relative ${
|
className={`h-full rounded-md shadow relative ${
|
||||||
form.values.image.length === 0 ? "hidden" : "block"
|
form.values.image.length === 0 ? "hidden" : "block"
|
||||||
|
@ -28,8 +28,8 @@ export class PageAssistVectorDb {
|
|||||||
} else {
|
} else {
|
||||||
const data = result[id] as VectorData
|
const data = result[id] as VectorData
|
||||||
if (!data) {
|
if (!data) {
|
||||||
console.log("Creating new vector")
|
console.log("Creating new vector", vector)
|
||||||
this.db.set({ [id]: { id, vectors: [vector] } }, () => {
|
this.db.set({ [id]: { id, vectors: vector } }, () => {
|
||||||
if (chrome.runtime.lastError) {
|
if (chrome.runtime.lastError) {
|
||||||
reject(chrome.runtime.lastError)
|
reject(chrome.runtime.lastError)
|
||||||
} else {
|
} else {
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
import React from "react"
|
import React from "react"
|
||||||
import { cleanUrl } from "~/libs/clean-url"
|
import { cleanUrl } from "~/libs/clean-url"
|
||||||
import {
|
import {
|
||||||
|
defaultEmbeddingModelForRag,
|
||||||
geWebSearchFollowUpPrompt,
|
geWebSearchFollowUpPrompt,
|
||||||
getOllamaURL,
|
getOllamaURL,
|
||||||
|
promptForRag,
|
||||||
systemPromptForNonRagOption
|
systemPromptForNonRagOption
|
||||||
} from "~/services/ollama"
|
} from "~/services/ollama"
|
||||||
import { type ChatHistory, type Message } from "~/store/option"
|
import { type ChatHistory, type Message } from "~/store/option"
|
||||||
@ -23,13 +25,16 @@ import { generateHistory } from "@/utils/generate-history"
|
|||||||
import { useTranslation } from "react-i18next"
|
import { useTranslation } from "react-i18next"
|
||||||
import { saveMessageOnError, saveMessageOnSuccess } from "./chat-helper"
|
import { saveMessageOnError, saveMessageOnSuccess } from "./chat-helper"
|
||||||
import { usePageAssist } from "@/context"
|
import { usePageAssist } from "@/context"
|
||||||
|
import { OllamaEmbeddings } from "@langchain/community/embeddings/ollama"
|
||||||
|
import { PageAssistVectorStore } from "@/libs/PageAssistVectorStore"
|
||||||
|
import { formatDocs } from "@/chain/chat-with-x"
|
||||||
|
|
||||||
export const useMessageOption = () => {
|
export const useMessageOption = () => {
|
||||||
const {
|
const {
|
||||||
controller: abortController,
|
controller: abortController,
|
||||||
setController: setAbortController,
|
setController: setAbortController,
|
||||||
messages,
|
messages,
|
||||||
setMessages,
|
setMessages
|
||||||
} = usePageAssist()
|
} = usePageAssist()
|
||||||
const {
|
const {
|
||||||
history,
|
history,
|
||||||
@ -502,6 +507,213 @@ export const useMessageOption = () => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const ragMode = async (
|
||||||
|
message: string,
|
||||||
|
image: string,
|
||||||
|
isRegenerate: boolean,
|
||||||
|
messages: Message[],
|
||||||
|
history: ChatHistory,
|
||||||
|
signal: AbortSignal
|
||||||
|
) => {
|
||||||
|
const url = await getOllamaURL()
|
||||||
|
|
||||||
|
const ollama = new ChatOllama({
|
||||||
|
model: selectedModel!,
|
||||||
|
baseUrl: cleanUrl(url)
|
||||||
|
})
|
||||||
|
|
||||||
|
let newMessage: Message[] = []
|
||||||
|
let generateMessageId = generateID()
|
||||||
|
|
||||||
|
if (!isRegenerate) {
|
||||||
|
newMessage = [
|
||||||
|
...messages,
|
||||||
|
{
|
||||||
|
isBot: false,
|
||||||
|
name: "You",
|
||||||
|
message,
|
||||||
|
sources: [],
|
||||||
|
images: []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
isBot: true,
|
||||||
|
name: selectedModel,
|
||||||
|
message: "▋",
|
||||||
|
sources: [],
|
||||||
|
id: generateMessageId
|
||||||
|
}
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
newMessage = [
|
||||||
|
...messages,
|
||||||
|
{
|
||||||
|
isBot: true,
|
||||||
|
name: selectedModel,
|
||||||
|
message: "▋",
|
||||||
|
sources: [],
|
||||||
|
id: generateMessageId
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
setMessages(newMessage)
|
||||||
|
let fullText = ""
|
||||||
|
let contentToSave = ""
|
||||||
|
|
||||||
|
const embeddingModle = await defaultEmbeddingModelForRag()
|
||||||
|
const ollamaUrl = await getOllamaURL()
|
||||||
|
const ollamaEmbedding = new OllamaEmbeddings({
|
||||||
|
model: embeddingModle || selectedModel,
|
||||||
|
baseUrl: cleanUrl(ollamaUrl)
|
||||||
|
})
|
||||||
|
|
||||||
|
let vectorstore = await PageAssistVectorStore.fromExistingIndex(
|
||||||
|
ollamaEmbedding,
|
||||||
|
{
|
||||||
|
file_id: null,
|
||||||
|
knownledge_id: selectedKnowledge.id
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
try {
|
||||||
|
let query = message
|
||||||
|
const { ragPrompt: systemPrompt, ragQuestionPrompt: questionPrompt } =
|
||||||
|
await promptForRag()
|
||||||
|
if (newMessage.length > 2) {
|
||||||
|
const lastTenMessages = newMessage.slice(-10)
|
||||||
|
lastTenMessages.pop()
|
||||||
|
const chat_history = lastTenMessages
|
||||||
|
.map((message) => {
|
||||||
|
return `${message.isBot ? "Assistant: " : "Human: "}${message.message}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
const promptForQuestion = questionPrompt
|
||||||
|
.replaceAll("{chat_history}", chat_history)
|
||||||
|
.replaceAll("{question}", message)
|
||||||
|
const questionOllama = new ChatOllama({
|
||||||
|
model: selectedModel!,
|
||||||
|
baseUrl: cleanUrl(url)
|
||||||
|
})
|
||||||
|
const response = await questionOllama.invoke(promptForQuestion)
|
||||||
|
query = response.content.toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
const docs = await vectorstore.similaritySearch(query, 4)
|
||||||
|
const context = formatDocs(docs)
|
||||||
|
const source = docs.map((doc) => {
|
||||||
|
return {
|
||||||
|
name: doc?.metadata?.source || "untitled",
|
||||||
|
type: doc?.metadata?.type || "unknown",
|
||||||
|
url: ""
|
||||||
|
}
|
||||||
|
})
|
||||||
|
message = message.trim().replaceAll("\n", " ")
|
||||||
|
|
||||||
|
let humanMessage = new HumanMessage({
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
text: systemPrompt
|
||||||
|
.replace("{context}", context)
|
||||||
|
.replace("{question}", message),
|
||||||
|
type: "text"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
const applicationChatHistory = generateHistory(history)
|
||||||
|
|
||||||
|
const chunks = await ollama.stream(
|
||||||
|
[...applicationChatHistory, humanMessage],
|
||||||
|
{
|
||||||
|
signal: signal
|
||||||
|
}
|
||||||
|
)
|
||||||
|
let count = 0
|
||||||
|
for await (const chunk of chunks) {
|
||||||
|
contentToSave += chunk.content
|
||||||
|
fullText += chunk.content
|
||||||
|
if (count === 0) {
|
||||||
|
setIsProcessing(true)
|
||||||
|
}
|
||||||
|
setMessages((prev) => {
|
||||||
|
return prev.map((message) => {
|
||||||
|
if (message.id === generateMessageId) {
|
||||||
|
return {
|
||||||
|
...message,
|
||||||
|
message: fullText.slice(0, -1) + "▋"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message
|
||||||
|
})
|
||||||
|
})
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
// update the message with the full text
|
||||||
|
setMessages((prev) => {
|
||||||
|
return prev.map((message) => {
|
||||||
|
if (message.id === generateMessageId) {
|
||||||
|
return {
|
||||||
|
...message,
|
||||||
|
message: fullText,
|
||||||
|
sources: source
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
setHistory([
|
||||||
|
...history,
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: message,
|
||||||
|
image
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "assistant",
|
||||||
|
content: fullText
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
await saveMessageOnSuccess({
|
||||||
|
historyId,
|
||||||
|
setHistoryId,
|
||||||
|
isRegenerate,
|
||||||
|
selectedModel: selectedModel,
|
||||||
|
message,
|
||||||
|
image,
|
||||||
|
fullText,
|
||||||
|
source
|
||||||
|
})
|
||||||
|
|
||||||
|
setIsProcessing(false)
|
||||||
|
setStreaming(false)
|
||||||
|
} catch (e) {
|
||||||
|
const errorSave = await saveMessageOnError({
|
||||||
|
e,
|
||||||
|
botMessage: fullText,
|
||||||
|
history,
|
||||||
|
historyId,
|
||||||
|
image,
|
||||||
|
selectedModel,
|
||||||
|
setHistory,
|
||||||
|
setHistoryId,
|
||||||
|
userMessage: message,
|
||||||
|
isRegenerating: isRegenerate
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!errorSave) {
|
||||||
|
notification.error({
|
||||||
|
message: t("error"),
|
||||||
|
description: e?.message || t("somethingWentWrong")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
setIsProcessing(false)
|
||||||
|
setStreaming(false)
|
||||||
|
} finally {
|
||||||
|
setAbortController(null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const onSubmit = async ({
|
const onSubmit = async ({
|
||||||
message,
|
message,
|
||||||
image,
|
image,
|
||||||
@ -527,6 +739,16 @@ export const useMessageOption = () => {
|
|||||||
setAbortController(controller)
|
setAbortController(controller)
|
||||||
signal = controller.signal
|
signal = controller.signal
|
||||||
}
|
}
|
||||||
|
if (selectedKnowledge) {
|
||||||
|
await ragMode(
|
||||||
|
message,
|
||||||
|
image,
|
||||||
|
isRegenerate,
|
||||||
|
chatHistory || messages,
|
||||||
|
memory || history,
|
||||||
|
signal
|
||||||
|
)
|
||||||
|
} else {
|
||||||
if (webSearch) {
|
if (webSearch) {
|
||||||
await searchChatMode(
|
await searchChatMode(
|
||||||
message,
|
message,
|
||||||
@ -547,6 +769,7 @@ export const useMessageOption = () => {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const regenerateLastMessage = async () => {
|
const regenerateLastMessage = async () => {
|
||||||
const isOk = validateBeforeSubmit()
|
const isOk = validateBeforeSubmit()
|
||||||
|
@ -3,6 +3,7 @@ import { VectorStore } from "@langchain/core/vectorstores"
|
|||||||
import type { EmbeddingsInterface } from "@langchain/core/embeddings"
|
import type { EmbeddingsInterface } from "@langchain/core/embeddings"
|
||||||
import { Document } from "@langchain/core/documents"
|
import { Document } from "@langchain/core/documents"
|
||||||
import { getVector, insertVector } from "@/db/vector"
|
import { getVector, insertVector } from "@/db/vector"
|
||||||
|
import { cp } from "fs"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface representing a vector in memory. It includes the content
|
* Interface representing a vector in memory. It includes the content
|
||||||
@ -116,8 +117,10 @@ export class PageAssistVectorStore extends VectorStore {
|
|||||||
})
|
})
|
||||||
return filter(doc)
|
return filter(doc)
|
||||||
}
|
}
|
||||||
const pgVector = await getVector(`vector:${this.knownledge_id}`)
|
const data = await getVector(`vector:${this.knownledge_id}`)
|
||||||
const filteredMemoryVectors = pgVector.vectors.filter(filterFunction)
|
const pgVector = [...data.vectors]
|
||||||
|
const filteredMemoryVectors = pgVector.filter(filterFunction)
|
||||||
|
console.log(filteredMemoryVectors)
|
||||||
const searches = filteredMemoryVectors
|
const searches = filteredMemoryVectors
|
||||||
.map((vector, index) => ({
|
.map((vector, index) => ({
|
||||||
similarity: this.similarity(query, vector.embedding),
|
similarity: this.similarity(query, vector.embedding),
|
||||||
@ -125,7 +128,7 @@ export class PageAssistVectorStore extends VectorStore {
|
|||||||
}))
|
}))
|
||||||
.sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
|
.sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
|
||||||
.slice(0, k)
|
.slice(0, k)
|
||||||
|
console.log(searches)
|
||||||
const result: [Document, number][] = searches.map((search) => [
|
const result: [Document, number][] = searches.map((search) => [
|
||||||
new Document({
|
new Document({
|
||||||
metadata: filteredMemoryVectors[search.index].metadata,
|
metadata: filteredMemoryVectors[search.index].metadata,
|
||||||
@ -133,7 +136,7 @@ export class PageAssistVectorStore extends VectorStore {
|
|||||||
}),
|
}),
|
||||||
search.similarity
|
search.similarity
|
||||||
])
|
])
|
||||||
|
console.log(result)
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ export class PageAssistPDFUrlLoader
|
|||||||
.trim()
|
.trim()
|
||||||
documents.push({
|
documents.push({
|
||||||
pageContent: text,
|
pageContent: text,
|
||||||
metadata: { source: this.name, page: i }
|
metadata: { source: this.name, page: i, type: "pdf" }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user