fix: bug fix
This commit is contained in:
parent
7b6b6751cc
commit
51804bc9ae
@ -120,20 +120,30 @@ export const Header: React.FC<Props> = ({
|
|||||||
{"/"}
|
{"/"}
|
||||||
</span>
|
</span>
|
||||||
<div className="hidden lg:block">
|
<div className="hidden lg:block">
|
||||||
<PageAssistSelect
|
<Select
|
||||||
className="w-80"
|
className="w-80"
|
||||||
placeholder={t("common:selectAModel")}
|
placeholder={t("common:selectAModel")}
|
||||||
loadingText={t("common:selectAModel")}
|
// loadingText={t("common:selectAModel")}
|
||||||
value={selectedModel}
|
value={selectedModel}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setSelectedModel(e.value)
|
setSelectedModel(e)
|
||||||
localStorage.setItem("selectedModel", e.value)
|
localStorage.setItem("selectedModel", e)
|
||||||
}}
|
}}
|
||||||
isLoading={isModelsLoading}
|
filterOption={(input, option) => {
|
||||||
|
//@ts-ignore
|
||||||
|
return (
|
||||||
|
option?.label?.props["data-title"]
|
||||||
|
?.toLowerCase()
|
||||||
|
?.indexOf(input.toLowerCase()) >= 0
|
||||||
|
)
|
||||||
|
}}
|
||||||
|
showSearch
|
||||||
|
loading={isModelsLoading}
|
||||||
options={models?.map((model) => ({
|
options={models?.map((model) => ({
|
||||||
label: (
|
label: (
|
||||||
<span
|
<span
|
||||||
key={model.model}
|
key={model.model}
|
||||||
|
data-title={model.name}
|
||||||
className="flex flex-row gap-3 items-center ">
|
className="flex flex-row gap-3 items-center ">
|
||||||
<ProviderIcons
|
<ProviderIcons
|
||||||
provider={model?.provider}
|
provider={model?.provider}
|
||||||
@ -144,9 +154,10 @@ export const Header: React.FC<Props> = ({
|
|||||||
),
|
),
|
||||||
value: model.model
|
value: model.model
|
||||||
}))}
|
}))}
|
||||||
onRefresh={() => {
|
size="large"
|
||||||
refetch()
|
// onRefresh={() => {
|
||||||
}}
|
// refetch()
|
||||||
|
// }}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="lg:hidden">
|
<div className="lg:hidden">
|
||||||
|
@ -122,7 +122,6 @@ const generateChatImage = async (messages: Message[]) => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
canvas.height = totalHeight
|
canvas.height = totalHeight
|
||||||
console.log(totalHeight)
|
|
||||||
|
|
||||||
ctx.fillStyle = "#ffffff"
|
ctx.fillStyle = "#ffffff"
|
||||||
ctx.fillRect(0, 0, canvas.width, canvas.height)
|
ctx.fillRect(0, 0, canvas.width, canvas.height)
|
||||||
|
@ -18,7 +18,7 @@ export const KnowledgeSettings = () => {
|
|||||||
const { data, status } = useQuery({
|
const { data, status } = useQuery({
|
||||||
queryKey: ["fetchAllKnowledge"],
|
queryKey: ["fetchAllKnowledge"],
|
||||||
queryFn: () => getAllKnowledge(),
|
queryFn: () => getAllKnowledge(),
|
||||||
refetchInterval: 1000
|
refetchInterval: 1000,
|
||||||
})
|
})
|
||||||
|
|
||||||
const { mutate: deleteKnowledgeMutation, isPending: isDeleting } =
|
const { mutate: deleteKnowledgeMutation, isPending: isDeleting } =
|
||||||
|
@ -104,7 +104,6 @@ export const Playground = () => {
|
|||||||
const lastUsedPrompt = await getLastUsedChatSystemPrompt(
|
const lastUsedPrompt = await getLastUsedChatSystemPrompt(
|
||||||
recentChat.history.id
|
recentChat.history.id
|
||||||
)
|
)
|
||||||
console.log("lastUsedPrompt", lastUsedPrompt)
|
|
||||||
if (lastUsedPrompt) {
|
if (lastUsedPrompt) {
|
||||||
if (lastUsedPrompt.prompt_id) {
|
if (lastUsedPrompt.prompt_id) {
|
||||||
const prompt = await getPromptById(lastUsedPrompt.prompt_id)
|
const prompt = await getPromptById(lastUsedPrompt.prompt_id)
|
||||||
|
@ -291,7 +291,7 @@ export const GeneralSettings = () => {
|
|||||||
await browser.storage.local.clear()
|
await browser.storage.local.clear()
|
||||||
await browser.storage.session.clear()
|
await browser.storage.session.clear()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("Error clearing storage:", e)
|
console.error("Error clearing storage:", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
@ -46,7 +46,7 @@ export const TTSModeSettings = ({ hideBorder }: { hideBorder?: boolean }) => {
|
|||||||
return { voices, models }
|
return { voices, models }
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e)
|
console.error(e)
|
||||||
message.error("Error fetching ElevenLabs data")
|
message.error("Error fetching ElevenLabs data")
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
|
@ -373,7 +373,6 @@ export const deleteChatForEdit = async (history_id: string, index: number) => {
|
|||||||
const db = new PageAssitDatabase()
|
const db = new PageAssitDatabase()
|
||||||
const chatHistory = (await db.getChatHistory(history_id)).reverse()
|
const chatHistory = (await db.getChatHistory(history_id)).reverse()
|
||||||
const previousHistory = chatHistory.slice(0, index + 1)
|
const previousHistory = chatHistory.slice(0, index + 1)
|
||||||
// console.log(previousHistory)
|
|
||||||
await db.db.set({ [history_id]: previousHistory.reverse() })
|
await db.db.set({ [history_id]: previousHistory.reverse() })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,8 +163,8 @@ export const getAllKnowledge = async (status?: string) => {
|
|||||||
|
|
||||||
if (status) {
|
if (status) {
|
||||||
return data
|
return data
|
||||||
.filter((d) => d.db_type === "knowledge")
|
.filter((d) => d?.db_type === "knowledge")
|
||||||
.filter((d) => d.status === status)
|
.filter((d) => d?.status === status)
|
||||||
.map((d) => {
|
.map((d) => {
|
||||||
d.source.forEach((s) => {
|
d.source.forEach((s) => {
|
||||||
delete s.content
|
delete s.content
|
||||||
@ -175,9 +175,9 @@ export const getAllKnowledge = async (status?: string) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
.filter((d) => d.db_type === "knowledge")
|
.filter((d) => d?.db_type === "knowledge")
|
||||||
.map((d) => {
|
.map((d) => {
|
||||||
d.source.forEach((s) => {
|
d?.source.forEach((s) => {
|
||||||
delete s.content
|
delete s.content
|
||||||
})
|
})
|
||||||
return d
|
return d
|
||||||
|
@ -293,7 +293,7 @@ export const getModelInfo = async (id: string) => {
|
|||||||
export const getAllCustomModels = async () => {
|
export const getAllCustomModels = async () => {
|
||||||
const db = new ModelDb()
|
const db = new ModelDb()
|
||||||
const models = (await db.getAll()).filter(
|
const models = (await db.getAll()).filter(
|
||||||
(model) => model.db_type === "openai_model"
|
(model) => model?.db_type === "openai_model"
|
||||||
)
|
)
|
||||||
const modelsWithProvider = await Promise.all(
|
const modelsWithProvider = await Promise.all(
|
||||||
models.map(async (model) => {
|
models.map(async (model) => {
|
||||||
@ -324,7 +324,7 @@ export const deleteAllModelsByProviderId = async (provider_id: string) => {
|
|||||||
export const isLookupExist = async (lookup: string) => {
|
export const isLookupExist = async (lookup: string) => {
|
||||||
const db = new ModelDb()
|
const db = new ModelDb()
|
||||||
const models = await db.getAll()
|
const models = await db.getAll()
|
||||||
const model = models.find((model) => model.lookup === lookup)
|
const model = models.find((model) => model?.lookup === lookup)
|
||||||
return model ? true : false
|
return model ? true : false
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -394,6 +394,7 @@ export const dynamicFetchLlamafile = async ({
|
|||||||
export const ollamaFormatAllCustomModels = async (
|
export const ollamaFormatAllCustomModels = async (
|
||||||
modelType: "all" | "chat" | "embedding" = "all"
|
modelType: "all" | "chat" | "embedding" = "all"
|
||||||
) => {
|
) => {
|
||||||
|
try {
|
||||||
const [allModles, allProviders] = await Promise.all([
|
const [allModles, allProviders] = await Promise.all([
|
||||||
getAllCustomModels(),
|
getAllCustomModels(),
|
||||||
getAllOpenAIConfig()
|
getAllOpenAIConfig()
|
||||||
@ -475,4 +476,8 @@ export const ollamaFormatAllCustomModels = async (
|
|||||||
})
|
})
|
||||||
|
|
||||||
return ollamaModels
|
return ollamaModels
|
||||||
|
} catch(e) {
|
||||||
|
console.error(e)
|
||||||
|
return []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,7 +114,7 @@ export const addOpenAICofig = async ({ name, baseUrl, apiKey, provider }: { name
|
|||||||
export const getAllOpenAIConfig = async () => {
|
export const getAllOpenAIConfig = async () => {
|
||||||
const openaiDb = new OpenAIModelDb()
|
const openaiDb = new OpenAIModelDb()
|
||||||
const configs = await openaiDb.getAll()
|
const configs = await openaiDb.getAll()
|
||||||
return configs.filter(config => config.db_type === "openai")
|
return configs.filter(config => config?.db_type === "openai")
|
||||||
}
|
}
|
||||||
|
|
||||||
export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => {
|
export const updateOpenAIConfig = async ({ id, name, baseUrl, apiKey }: { id: string, name: string, baseUrl: string, apiKey: string }) => {
|
||||||
|
@ -41,7 +41,6 @@ export default defineBackground({
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
browser.browserAction.onClicked.addListener((tab) => {
|
browser.browserAction.onClicked.addListener((tab) => {
|
||||||
console.log("browser.browserAction.onClicked.addListener")
|
|
||||||
browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
|
browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,6 @@ export default defineBackground({
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
browser.browserAction.onClicked.addListener((tab) => {
|
browser.browserAction.onClicked.addListener((tab) => {
|
||||||
console.log("browser.browserAction.onClicked.addListener")
|
|
||||||
browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
|
browser.tabs.create({ url: browser.runtime.getURL("/options.html") })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -402,7 +402,7 @@ export const useMessage = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -618,11 +618,7 @@ export const useMessage = () => {
|
|||||||
const applicationChatHistory = []
|
const applicationChatHistory = []
|
||||||
|
|
||||||
const data = await getScreenshotFromCurrentTab()
|
const data = await getScreenshotFromCurrentTab()
|
||||||
console.log(
|
|
||||||
data?.success
|
|
||||||
? `[PageAssist] Screenshot is taken`
|
|
||||||
: `[PageAssist] Screenshot is not taken`
|
|
||||||
)
|
|
||||||
const visionImage = data?.screenshot || ""
|
const visionImage = data?.screenshot || ""
|
||||||
|
|
||||||
if (visionImage === "") {
|
if (visionImage === "") {
|
||||||
@ -673,7 +669,7 @@ export const useMessage = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -943,7 +939,7 @@ export const useMessage = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1272,7 +1268,7 @@ export const useMessage = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1521,7 +1517,7 @@ export const useMessage = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -321,7 +321,7 @@ export const useMessageOption = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -637,7 +637,7 @@ export const useMessageOption = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -972,7 +972,7 @@ export const useMessageOption = () => {
|
|||||||
try {
|
try {
|
||||||
generationInfo = output?.generations?.[0][0]?.generationInfo
|
generationInfo = output?.generations?.[0][0]?.generationInfo
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("handleLLMEnd error", e)
|
console.error("handleLLMEnd error", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,7 +85,6 @@ export class PageAssistVectorStore extends VectorStore {
|
|||||||
metadata: documents[idx].metadata,
|
metadata: documents[idx].metadata,
|
||||||
file_id: this.file_id
|
file_id: this.file_id
|
||||||
}))
|
}))
|
||||||
console.log(`vector:${this.knownledge_id}`)
|
|
||||||
await insertVector(`vector:${this.knownledge_id}`, memoryVectors)
|
await insertVector(`vector:${this.knownledge_id}`, memoryVectors)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -118,7 +117,6 @@ export class PageAssistVectorStore extends VectorStore {
|
|||||||
const data = await getVector(`vector:${this.knownledge_id}`)
|
const data = await getVector(`vector:${this.knownledge_id}`)
|
||||||
const pgVector = [...data.vectors]
|
const pgVector = [...data.vectors]
|
||||||
const filteredMemoryVectors = pgVector.filter(filterFunction)
|
const filteredMemoryVectors = pgVector.filter(filterFunction)
|
||||||
console.log(filteredMemoryVectors)
|
|
||||||
const searches = filteredMemoryVectors
|
const searches = filteredMemoryVectors
|
||||||
.map((vector, index) => ({
|
.map((vector, index) => ({
|
||||||
similarity: this.similarity(query, vector.embedding),
|
similarity: this.similarity(query, vector.embedding),
|
||||||
@ -126,7 +124,6 @@ export class PageAssistVectorStore extends VectorStore {
|
|||||||
}))
|
}))
|
||||||
.sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
|
.sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
|
||||||
.slice(0, k)
|
.slice(0, k)
|
||||||
console.log(searches)
|
|
||||||
const result: [Document, number][] = searches.map((search) => [
|
const result: [Document, number][] = searches.map((search) => [
|
||||||
new Document({
|
new Document({
|
||||||
metadata: filteredMemoryVectors[search.index].metadata,
|
metadata: filteredMemoryVectors[search.index].metadata,
|
||||||
|
@ -52,7 +52,7 @@ export const getDataFromCurrentTab = async () => {
|
|||||||
resolve(data[0].result)
|
resolve(data[0].result)
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("error", e)
|
console.error("error", e)
|
||||||
// this is a weird method but it works
|
// this is a weird method but it works
|
||||||
if (import.meta.env.BROWSER === "firefox") {
|
if (import.meta.env.BROWSER === "firefox") {
|
||||||
// all I need is to get the pdf url but somehow
|
// all I need is to get the pdf url but somehow
|
||||||
|
@ -41,9 +41,9 @@ export const getAllOpenAIModels = async (baseUrl: string, apiKey?: string) => {
|
|||||||
return data.data
|
return data.data
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof DOMException && e.name === 'AbortError') {
|
if (e instanceof DOMException && e.name === 'AbortError') {
|
||||||
console.log('Request timed out')
|
console.error('Request timed out')
|
||||||
} else {
|
} else {
|
||||||
console.log(e)
|
console.error(e)
|
||||||
}
|
}
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ export function parseReasoning(text: string): { type: 'reasoning' | 'text', cont
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(`Error parsing reasoning: ${e}`)
|
console.error(`Error parsing reasoning: ${e}`)
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
type: 'text',
|
type: 'text',
|
||||||
|
@ -7,14 +7,12 @@ export const sendNotification = async (title: string, message: string) => {
|
|||||||
"sendNotificationAfterIndexing"
|
"sendNotificationAfterIndexing"
|
||||||
)
|
)
|
||||||
if (sendNotificationAfterIndexing) {
|
if (sendNotificationAfterIndexing) {
|
||||||
console.log("Sending notification")
|
|
||||||
browser.notifications.create({
|
browser.notifications.create({
|
||||||
type: "basic",
|
type: "basic",
|
||||||
iconUrl: browser.runtime.getURL("/icon/128.png"),
|
iconUrl: browser.runtime.getURL("/icon/128.png"),
|
||||||
title,
|
title,
|
||||||
message
|
message
|
||||||
})
|
})
|
||||||
console.log("Notification sent")
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error)
|
console.error(error)
|
||||||
|
@ -89,7 +89,6 @@ export class PageAssistHtmlLoader
|
|||||||
await urlRewriteRuntime(this.url, "web")
|
await urlRewriteRuntime(this.url, "web")
|
||||||
let text = "";
|
let text = "";
|
||||||
if (isWikipedia(this.url)) {
|
if (isWikipedia(this.url)) {
|
||||||
console.log("Wikipedia URL detected")
|
|
||||||
const fetchHTML = await fetch(this.url)
|
const fetchHTML = await fetch(this.url)
|
||||||
text = parseWikipedia(await fetchHTML.text())
|
text = parseWikipedia(await fetchHTML.text())
|
||||||
} else {
|
} else {
|
||||||
|
@ -108,7 +108,7 @@ export class ChatChromeAI extends SimpleChatModel<ChromeAICallOptions> {
|
|||||||
*/
|
*/
|
||||||
destroy() {
|
destroy() {
|
||||||
if (!this.session) {
|
if (!this.session) {
|
||||||
return console.log("No session found. Returning.")
|
return console.error("No session found. Returning.")
|
||||||
}
|
}
|
||||||
this.session.destroy()
|
this.session.destroy()
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,6 @@ export const pageAssistModel = async ({
|
|||||||
}
|
}
|
||||||
}) as any
|
}) as any
|
||||||
}
|
}
|
||||||
console.log('useMlock', useMlock)
|
|
||||||
return new ChatOllama({
|
return new ChatOllama({
|
||||||
baseUrl,
|
baseUrl,
|
||||||
keepAlive,
|
keepAlive,
|
||||||
|
@ -184,7 +184,6 @@ export const fetchChatModels = async ({
|
|||||||
try {
|
try {
|
||||||
|
|
||||||
const models = await getAllModels({ returnEmpty })
|
const models = await getAllModels({ returnEmpty })
|
||||||
|
|
||||||
const chatModels = models
|
const chatModels = models
|
||||||
?.filter((model) => {
|
?.filter((model) => {
|
||||||
return (
|
return (
|
||||||
|
@ -3,6 +3,7 @@ import { Storage } from "@plasmohq/storage"
|
|||||||
import { getOllamaURL } from "./ollama"
|
import { getOllamaURL } from "./ollama"
|
||||||
import { cleanUrl } from "@/libs/clean-url"
|
import { cleanUrl } from "@/libs/clean-url"
|
||||||
import { HumanMessage } from "langchain/schema"
|
import { HumanMessage } from "langchain/schema"
|
||||||
|
import { removeReasoning } from "@/libs/reasoning"
|
||||||
const storage = new Storage()
|
const storage = new Storage()
|
||||||
|
|
||||||
// this prompt is copied from the OpenWebUI codebase
|
// this prompt is copied from the OpenWebUI codebase
|
||||||
@ -64,9 +65,9 @@ export const generateTitle = async (model: string, query: string, fallBackTitle:
|
|||||||
})
|
})
|
||||||
])
|
])
|
||||||
|
|
||||||
return title.content.toString()
|
return removeReasoning(title.content.toString())
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(`Error generating title: ${error}`)
|
console.error(`Error generating title: ${error}`)
|
||||||
return fallBackTitle
|
return fallBackTitle
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -34,7 +34,6 @@ export const rerankDocs = async ({
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
console.log("similarity", similarity)
|
|
||||||
const sortedDocs = similarity
|
const sortedDocs = similarity
|
||||||
.sort((a, b) => b.similarity - a.similarity)
|
.sort((a, b) => b.similarity - a.similarity)
|
||||||
.filter((sim) => sim.similarity > 0.5)
|
.filter((sim) => sim.similarity > 0.5)
|
||||||
|
@ -17,7 +17,6 @@ export const getPageAssistTextSplitter = async () => {
|
|||||||
|
|
||||||
switch (splittingStrategy) {
|
switch (splittingStrategy) {
|
||||||
case "CharacterTextSplitter":
|
case "CharacterTextSplitter":
|
||||||
console.log("Using CharacterTextSplitter")
|
|
||||||
const splittingSeparator = await defaultSsplttingSeparator()
|
const splittingSeparator = await defaultSsplttingSeparator()
|
||||||
const processedSeparator = splittingSeparator
|
const processedSeparator = splittingSeparator
|
||||||
.replace(/\\n/g, "\n")
|
.replace(/\\n/g, "\n")
|
||||||
|
@ -42,7 +42,6 @@ export const localBraveSearch = async (query: string) => {
|
|||||||
return { title, link, content }
|
return { title, link, content }
|
||||||
}).filter((result) => result.link && result.title && result.content)
|
}).filter((result) => result.link && result.title && result.content)
|
||||||
|
|
||||||
console.log(searchResults)
|
|
||||||
|
|
||||||
return searchResults
|
return searchResults
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ export default defineConfig({
|
|||||||
{
|
{
|
||||||
extension_pages:
|
extension_pages:
|
||||||
"script-src 'self' 'wasm-unsafe-eval'; object-src 'self';"
|
"script-src 'self' 'wasm-unsafe-eval'; object-src 'self';"
|
||||||
} : undefined,
|
} : "script-src 'self' 'wasm-unsafe-eval' blob:; object-src 'self'; worker-src 'self' blob:;",
|
||||||
permissions:
|
permissions:
|
||||||
process.env.TARGET === "firefox"
|
process.env.TARGET === "firefox"
|
||||||
? firefoxMV2Permissions
|
? firefoxMV2Permissions
|
||||||
|
Loading…
x
Reference in New Issue
Block a user