feat: add OpenAI model support
Adds support for OpenAI models, allowing users to leverage various OpenAI models directly from the application. This includes custom OpenAI models and OpenAI-specific configurations for seamless integration.
This commit is contained in:
parent
2a2610afb8
commit
c8620637f8
@ -19,6 +19,7 @@
|
||||
"@headlessui/react": "^1.7.18",
|
||||
"@heroicons/react": "^2.1.1",
|
||||
"@langchain/community": "^0.0.41",
|
||||
"@langchain/openai": "0.0.24",
|
||||
"@mantine/form": "^7.5.0",
|
||||
"@mantine/hooks": "^7.5.3",
|
||||
"@mozilla/readability": "^0.5.0",
|
||||
@ -39,6 +40,7 @@
|
||||
"lucide-react": "^0.350.0",
|
||||
"mammoth": "^1.7.2",
|
||||
"ml-distance": "^4.0.1",
|
||||
"openai": "^4.65.0",
|
||||
"pdfjs-dist": "4.0.379",
|
||||
"property-information": "^6.4.1",
|
||||
"pubsub-js": "^1.9.4",
|
||||
|
@ -38,10 +38,10 @@ export const ModelSelect: React.FC = () => {
|
||||
</div>
|
||||
),
|
||||
onClick: () => {
|
||||
if (selectedModel === d.name) {
|
||||
if (selectedModel === d.model) {
|
||||
setSelectedModel(null)
|
||||
} else {
|
||||
setSelectedModel(d.name)
|
||||
setSelectedModel(d.model)
|
||||
}
|
||||
}
|
||||
})) || [],
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ChromeIcon } from "lucide-react"
|
||||
import { ChromeIcon, CloudCog } from "lucide-react"
|
||||
import { OllamaIcon } from "../Icons/Ollama"
|
||||
|
||||
export const ProviderIcons = ({
|
||||
@ -11,6 +11,8 @@ export const ProviderIcons = ({
|
||||
switch (provider) {
|
||||
case "chrome":
|
||||
return <ChromeIcon className={className} />
|
||||
case "custom":
|
||||
return <CloudCog className={className} />
|
||||
default:
|
||||
return <OllamaIcon className={className} />
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ import {
|
||||
} from "lucide-react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import { useLocation, NavLink } from "react-router-dom"
|
||||
import { OllamaIcon } from "../Icons/Ollama"
|
||||
import { SelectedKnowledge } from "../Option/Knowledge/SelectedKnwledge"
|
||||
import { ModelSelect } from "../Common/ModelSelect"
|
||||
import { PromptSelect } from "../Common/PromptSelect"
|
||||
|
@ -1,9 +1,5 @@
|
||||
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
|
||||
import { useMutation, } from "@tanstack/react-query"
|
||||
import {
|
||||
Skeleton,
|
||||
Table,
|
||||
Tag,
|
||||
Tooltip,
|
||||
notification,
|
||||
Modal,
|
||||
Input,
|
||||
|
@ -18,6 +18,11 @@ export const generateID = () => {
|
||||
export const removeModelPrefix = (id: string) => {
|
||||
return id.replace(/^model-/, "")
|
||||
}
|
||||
|
||||
export const isCustomModel = (model: string) => {
|
||||
const customModelRegex = /_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/
|
||||
return customModelRegex.test(model)
|
||||
}
|
||||
export class ModelDb {
|
||||
db: chrome.storage.StorageArea
|
||||
|
||||
@ -174,3 +179,30 @@ export const isLookupExist = async (lookup: string) => {
|
||||
const model = models.find((model) => model.lookup === lookup)
|
||||
return model ? true : false
|
||||
}
|
||||
|
||||
|
||||
export const ollamaFormatAllCustomModels = async () => {
|
||||
|
||||
const allModles = await getAllCustomModels()
|
||||
|
||||
const ollamaModels = allModles.map((model) => {
|
||||
return {
|
||||
name: model.name,
|
||||
model: model.id,
|
||||
modified_at: "",
|
||||
provider: "custom",
|
||||
size: 0,
|
||||
digest: "",
|
||||
details: {
|
||||
parent_model: "",
|
||||
format: "",
|
||||
family: "",
|
||||
families: [],
|
||||
parameter_size: "",
|
||||
quantization_level: ""
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return ollamaModels
|
||||
}
|
@ -1,5 +1,8 @@
|
||||
import { getModelInfo, isCustomModel } from "@/db/models"
|
||||
import { ChatChromeAI } from "./ChatChromeAi"
|
||||
import { ChatOllama } from "./ChatOllama"
|
||||
import { getOpenAIConfigById } from "@/db/openai"
|
||||
import { ChatOpenAI } from "@langchain/openai"
|
||||
|
||||
export const pageAssistModel = async ({
|
||||
model,
|
||||
@ -22,13 +25,37 @@ export const pageAssistModel = async ({
|
||||
seed?: number
|
||||
numGpu?: number
|
||||
}) => {
|
||||
switch (model) {
|
||||
case "chrome::gemini-nano::page-assist":
|
||||
|
||||
if (model === "chrome::gemini-nano::page-assist") {
|
||||
return new ChatChromeAI({
|
||||
temperature,
|
||||
topK
|
||||
})
|
||||
default:
|
||||
}
|
||||
|
||||
|
||||
const isCustom = isCustomModel(model)
|
||||
|
||||
console.log("isCustom", isCustom, model)
|
||||
|
||||
if (isCustom) {
|
||||
const modelInfo = await getModelInfo(model)
|
||||
const providerInfo = await getOpenAIConfigById(modelInfo.provider_id)
|
||||
|
||||
return new ChatOpenAI({
|
||||
modelName: modelInfo.model_id,
|
||||
openAIApiKey: providerInfo.apiKey || "",
|
||||
temperature,
|
||||
topP,
|
||||
configuration: {
|
||||
apiKey: providerInfo.apiKey || "",
|
||||
baseURL: providerInfo.baseUrl || "",
|
||||
}
|
||||
}) as any
|
||||
}
|
||||
|
||||
|
||||
|
||||
return new ChatOllama({
|
||||
baseUrl,
|
||||
keepAlive,
|
||||
@ -40,5 +67,7 @@ export const pageAssistModel = async ({
|
||||
model,
|
||||
numGpu
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import { urlRewriteRuntime } from "../libs/runtime"
|
||||
import { getChromeAIModel } from "./chrome"
|
||||
import { setNoOfRetrievedDocs, setTotalFilePerKB } from "./app"
|
||||
import fetcher from "@/libs/fetcher"
|
||||
import { ollamaFormatAllCustomModels } from "@/db/models"
|
||||
|
||||
|
||||
const storage = new Storage()
|
||||
@ -193,9 +194,13 @@ export const fetchChatModels = async ({
|
||||
}
|
||||
})
|
||||
const chromeModel = await getChromeAIModel()
|
||||
|
||||
const customModels = await ollamaFormatAllCustomModels()
|
||||
|
||||
return [
|
||||
...chatModels,
|
||||
...chromeModel
|
||||
...chromeModel,
|
||||
...customModels
|
||||
]
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
@ -207,10 +212,11 @@ export const fetchChatModels = async ({
|
||||
}
|
||||
})
|
||||
const chromeModel = await getChromeAIModel()
|
||||
|
||||
const customModels = await ollamaFormatAllCustomModels()
|
||||
return [
|
||||
...models,
|
||||
...chromeModel
|
||||
...chromeModel,
|
||||
...customModels
|
||||
]
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user