refactor: Parse keepAlive value in OllamaEmbeddingsPageAssist

This commit is contained in:
n4ze3m 2024-05-23 11:22:46 +05:30
parent b3a455382c
commit fadf736f70
3 changed files with 15 additions and 5 deletions

View File

@ -15,6 +15,7 @@ import type { StringWithAutocomplete } from "@langchain/core/utils/types";
import {
createOllamaChatStream,
createOllamaGenerateStream,
parseKeepAlive,
type OllamaInput,
type OllamaMessage,
} from "./utils/ollama";
@ -112,7 +113,7 @@ export class ChatOllama
this.baseUrl = fields.baseUrl?.endsWith("/")
? fields.baseUrl.slice(0, -1)
: fields.baseUrl ?? this.baseUrl;
this.keepAlive = fields.keepAlive ?? this.keepAlive;
this.keepAlive = parseKeepAlive(fields.keepAlive) ?? this.keepAlive;
this.embeddingOnly = fields.embeddingOnly;
this.f16KV = fields.f16KV;
this.frequencyPenalty = fields.frequencyPenalty;

View File

@ -1,12 +1,13 @@
import { Embeddings, EmbeddingsParams } from "@langchain/core/embeddings"
import type { StringWithAutocomplete } from "@langchain/core/utils/types"
import { parseKeepAlive } from "./utils/ollama"
export interface OllamaInput {
embeddingOnly?: boolean
f16KV?: boolean
frequencyPenalty?: number
headers?: Record<string, string>
keepAlive?: string
keepAlive?: any
logitsAll?: boolean
lowVram?: boolean
mainGpu?: number
@ -98,7 +99,7 @@ interface OllamaEmbeddingsParams extends EmbeddingsParams {
headers?: Record<string, string>
/** Defaults to "5m" */
keepAlive?: string
keepAlive?: any
/** Advanced Ollama API request parameters in camelCase, see
* https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values
@ -138,7 +139,7 @@ export class OllamaEmbeddingsPageAssist extends Embeddings {
}
if (params?.keepAlive) {
this.keepAlive = params.keepAlive
this.keepAlive = parseKeepAlive(params.keepAlive)
}
if (params?.requestOptions) {

View File

@ -7,7 +7,7 @@ export interface OllamaInput {
f16KV?: boolean;
frequencyPenalty?: number;
headers?: Record<string, string>;
keepAlive?: string;
keepAlive?: any;
logitsAll?: boolean;
lowVram?: boolean;
mainGpu?: number;
@ -198,4 +198,12 @@ export async function* createOllamaChatStream(
options: OllamaCallOptions
): AsyncGenerator<OllamaChatGenerationChunk> {
yield* createOllamaStream(`${baseUrl}/api/chat`, params, options);
}
export const parseKeepAlive = (keepAlive: any) => {
if (keepAlive === "-1") {
return -1
}
return keepAlive
}