feat: Improve model selection and embedding
Refactor embedding models and their handling to improve performance and simplify the process. Add a new model selection mechanism, and enhance the UI for model selection, offering clearer and more user-friendly options for embedding models. Refactor embeddings to use a common model for page assist and RAG, further improving performance and streamlining the workflow.
This commit is contained in:
@@ -44,21 +44,6 @@ export interface OpenAIEmbeddingsParams extends EmbeddingsParams {
|
||||
signal?: AbortSignal
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for generating embeddings using the OpenAI API. Extends the
|
||||
* Embeddings class and implements OpenAIEmbeddingsParams and
|
||||
* AzureOpenAIInput.
|
||||
* @example
|
||||
* ```typescript
|
||||
* // Embed a query using OpenAIEmbeddings to generate embeddings for a given text
|
||||
* const model = new OpenAIEmbeddings();
|
||||
* const res = await model.embedQuery(
|
||||
* "What would be a good company name for a company that makes colorful socks?",
|
||||
* );
|
||||
* console.log({ res });
|
||||
*
|
||||
* ```
|
||||
*/
|
||||
export class OAIEmbedding
|
||||
extends Embeddings
|
||||
implements OpenAIEmbeddingsParams {
|
||||
@@ -96,6 +81,7 @@ export class OAIEmbedding
|
||||
protected client: OpenAIClient
|
||||
|
||||
protected clientConfig: ClientOptions
|
||||
|
||||
signal?: AbortSignal
|
||||
|
||||
constructor(
|
||||
|
||||
36
src/models/embedding.ts
Normal file
36
src/models/embedding.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { getModelInfo, isCustomModel } from "@/db/models"
|
||||
import { OllamaEmbeddingsPageAssist } from "./OllamaEmbedding"
|
||||
import { OAIEmbedding } from "./OAIEmbedding"
|
||||
import { getOpenAIConfigById } from "@/db/openai"
|
||||
|
||||
type EmbeddingModel = {
|
||||
model: string
|
||||
baseUrl: string
|
||||
signal?: AbortSignal
|
||||
keepAlive?: string
|
||||
}
|
||||
|
||||
export const pageAssistEmbeddingModel = async ({ baseUrl, model, keepAlive, signal }: EmbeddingModel) => {
|
||||
const isCustom = isCustomModel(model)
|
||||
if (isCustom) {
|
||||
const modelInfo = await getModelInfo(model)
|
||||
const providerInfo = await getOpenAIConfigById(modelInfo.provider_id)
|
||||
return new OAIEmbedding({
|
||||
modelName: modelInfo.model_id,
|
||||
model: modelInfo.model_id,
|
||||
signal,
|
||||
openAIApiKey: providerInfo.apiKey || "temp",
|
||||
configuration: {
|
||||
apiKey: providerInfo.apiKey || "temp",
|
||||
baseURL: providerInfo.baseUrl || "",
|
||||
}
|
||||
}) as any
|
||||
}
|
||||
|
||||
return new OllamaEmbeddingsPageAssist({
|
||||
model,
|
||||
baseUrl,
|
||||
keepAlive,
|
||||
signal
|
||||
})
|
||||
}
|
||||
@@ -40,7 +40,7 @@ export const pageAssistModel = async ({
|
||||
if (isCustom) {
|
||||
const modelInfo = await getModelInfo(model)
|
||||
const providerInfo = await getOpenAIConfigById(modelInfo.provider_id)
|
||||
console.log(modelInfo, providerInfo)
|
||||
|
||||
return new ChatOpenAI({
|
||||
modelName: modelInfo.model_id,
|
||||
openAIApiKey: providerInfo.apiKey || "temp",
|
||||
|
||||
Reference in New Issue
Block a user