fix: Improve ChromeAI model parameters

This commit is contained in:
n4ze3m 2024-08-31 20:17:10 +05:30
parent 26e799b246
commit 9bd3193ad9
2 changed files with 10 additions and 14 deletions

View File

@ -11,11 +11,6 @@ import { BaseMessage, AIMessageChunk } from "@langchain/core/messages"
import { ChatGenerationChunk } from "@langchain/core/outputs"
import { IterableReadableStream } from "@langchain/core/utils/stream"
export interface AI {
canCreateTextSession(): Promise<AIModelAvailability>
createTextSession(options?: AITextSessionOptions): Promise<AITextSession>
defaultTextSessionOptions(): Promise<AITextSessionOptions>
}
export interface AITextSession {
prompt(input: string): Promise<string>
@ -88,9 +83,9 @@ function formatPrompt(messages: BaseMessage[]): string {
export class ChatChromeAI extends SimpleChatModel<ChromeAICallOptions> {
session?: AITextSession
temperature = 0.5
temperature = 0.8
topK = 40
topK = 120
promptFormatter: (messages: BaseMessage[]) => string
@ -121,16 +116,17 @@ export class ChatChromeAI extends SimpleChatModel<ChromeAICallOptions> {
}
const { ai } = window as any
const canCreateTextSession = await ai.canCreateTextSession()
const capabilities = await ai?.assistant?.capabilities()
const canCreateTextSession = capabilities?.available
if (canCreateTextSession === AIModelAvailability.No) {
throw new Error("The AI model is not available.")
} else if (canCreateTextSession === AIModelAvailability.AfterDownload) {
throw new Error("The AI model is not yet downloaded.")
}
this.session = await ai.createTextSession({
topK: this.topK,
temperature: this.temperature
this.session = await ai?.assistant?.create({
temperature: this.temperature,
topK: this.topK
})
}

View File

@ -12,8 +12,8 @@ export const getChromeAISupported = async () => {
}
//@ts-ignore
const createSession = await ai?.canCreateGenericSession()
if (createSession !== "readily") {
const capabilities = await ai?.assistant?.capabilities()
if (capabilities?.available !== "readily") {
return "ai_not_ready"
}