feat: add OpenAI model support

Adds support for OpenAI models, allowing users to leverage various OpenAI models directly from the application. This includes custom OpenAI models and OpenAI-specific configurations for seamless integration.
This commit is contained in:
n4ze3m
2024-09-29 19:57:26 +05:30
parent 2a2610afb8
commit c8620637f8
9 changed files with 97 additions and 31 deletions

View File

@@ -1,5 +1,8 @@
import { getModelInfo, isCustomModel } from "@/db/models"
import { ChatChromeAI } from "./ChatChromeAi"
import { ChatOllama } from "./ChatOllama"
import { getOpenAIConfigById } from "@/db/openai"
import { ChatOpenAI } from "@langchain/openai"
export const pageAssistModel = async ({
model,
@@ -22,23 +25,49 @@ export const pageAssistModel = async ({
seed?: number
numGpu?: number
}) => {
switch (model) {
case "chrome::gemini-nano::page-assist":
return new ChatChromeAI({
temperature,
topK
})
default:
return new ChatOllama({
baseUrl,
keepAlive,
temperature,
topK,
topP,
numCtx,
seed,
model,
numGpu
})
if (model === "chrome::gemini-nano::page-assist") {
return new ChatChromeAI({
temperature,
topK
})
}
const isCustom = isCustomModel(model)
console.log("isCustom", isCustom, model)
if (isCustom) {
const modelInfo = await getModelInfo(model)
const providerInfo = await getOpenAIConfigById(modelInfo.provider_id)
return new ChatOpenAI({
modelName: modelInfo.model_id,
openAIApiKey: providerInfo.apiKey || "",
temperature,
topP,
configuration: {
apiKey: providerInfo.apiKey || "",
baseURL: providerInfo.baseUrl || "",
}
}) as any
}
return new ChatOllama({
baseUrl,
keepAlive,
temperature,
topK,
topP,
numCtx,
seed,
model,
numGpu
})
}