feat: Add useMMap option to model settings
This commit is contained in:
parent
e5e04c3674
commit
6d80798da9
@ -87,6 +87,9 @@
|
|||||||
"label": "Top P",
|
"label": "Top P",
|
||||||
"placeholder": "Enter Top P value (e.g. 0.9, 0.95)"
|
"placeholder": "Enter Top P value (e.g. 0.9, 0.95)"
|
||||||
},
|
},
|
||||||
|
"useMMap": {
|
||||||
|
"label": "useMmap"
|
||||||
|
},
|
||||||
"numGpu": {
|
"numGpu": {
|
||||||
"label": "Num GPU",
|
"label": "Num GPU",
|
||||||
"placeholder": "Enter number of layers to send to GPU(s)"
|
"placeholder": "Enter number of layers to send to GPU(s)"
|
||||||
|
@ -10,7 +10,8 @@ import {
|
|||||||
Input,
|
Input,
|
||||||
InputNumber,
|
InputNumber,
|
||||||
Modal,
|
Modal,
|
||||||
Skeleton
|
Skeleton,
|
||||||
|
Switch
|
||||||
} from "antd"
|
} from "antd"
|
||||||
import React from "react"
|
import React from "react"
|
||||||
import { useTranslation } from "react-i18next"
|
import { useTranslation } from "react-i18next"
|
||||||
@ -35,7 +36,7 @@ export const CurrentChatModelSettings = ({
|
|||||||
queryFn: async () => {
|
queryFn: async () => {
|
||||||
const data = await getAllModelSettings()
|
const data = await getAllModelSettings()
|
||||||
|
|
||||||
let tempSystemPrompt = "";
|
let tempSystemPrompt = ""
|
||||||
|
|
||||||
// i hate this method but i need this feature so badly that i need to do this
|
// i hate this method but i need this feature so badly that i need to do this
|
||||||
if (selectedSystemPrompt) {
|
if (selectedSystemPrompt) {
|
||||||
@ -52,7 +53,8 @@ export const CurrentChatModelSettings = ({
|
|||||||
seed: cUserSettings.seed,
|
seed: cUserSettings.seed,
|
||||||
numGpu: cUserSettings.numGpu ?? data.numGpu,
|
numGpu: cUserSettings.numGpu ?? data.numGpu,
|
||||||
numPredict: cUserSettings.numPredict ?? data.numPredict,
|
numPredict: cUserSettings.numPredict ?? data.numPredict,
|
||||||
systemPrompt: cUserSettings.systemPrompt ?? tempSystemPrompt
|
systemPrompt: cUserSettings.systemPrompt ?? tempSystemPrompt,
|
||||||
|
useMMap: cUserSettings.useMMap ?? data.useMMap
|
||||||
})
|
})
|
||||||
return data
|
return data
|
||||||
},
|
},
|
||||||
@ -61,7 +63,6 @@ export const CurrentChatModelSettings = ({
|
|||||||
refetchOnWindowFocus: false
|
refetchOnWindowFocus: false
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
const renderBody = () => {
|
const renderBody = () => {
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
@ -176,6 +177,12 @@ export const CurrentChatModelSettings = ({
|
|||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
|
|
||||||
|
<Form.Item
|
||||||
|
name="useMMap"
|
||||||
|
label={t("modelSettings.form.useMMap.label")}>
|
||||||
|
<Switch />
|
||||||
|
</Form.Item>
|
||||||
</React.Fragment>
|
</React.Fragment>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ import { BetaTag } from "@/components/Common/Beta"
|
|||||||
import { SaveButton } from "@/components/Common/SaveButton"
|
import { SaveButton } from "@/components/Common/SaveButton"
|
||||||
import { getAllModelSettings, setModelSetting } from "@/services/model-settings"
|
import { getAllModelSettings, setModelSetting } from "@/services/model-settings"
|
||||||
import { useQuery, useQueryClient } from "@tanstack/react-query"
|
import { useQuery, useQueryClient } from "@tanstack/react-query"
|
||||||
import { Form, Skeleton, Input, InputNumber, Collapse } from "antd"
|
import { Form, Skeleton, Input, InputNumber, Collapse, Switch } from "antd"
|
||||||
import React from "react"
|
import React from "react"
|
||||||
import { useTranslation } from "react-i18next"
|
import { useTranslation } from "react-i18next"
|
||||||
|
|
||||||
@ -119,11 +119,14 @@ export const ModelSettings = () => {
|
|||||||
<InputNumber
|
<InputNumber
|
||||||
style={{ width: "100%" }}
|
style={{ width: "100%" }}
|
||||||
size="large"
|
size="large"
|
||||||
placeholder={t(
|
placeholder={t("modelSettings.form.numGpu.placeholder")}
|
||||||
"modelSettings.form.numGpu.placeholder"
|
|
||||||
)}
|
|
||||||
/>
|
/>
|
||||||
</Form.Item>
|
</Form.Item>
|
||||||
|
<Form.Item
|
||||||
|
name="useMMap"
|
||||||
|
label={t("modelSettings.form.useMMap.label")}>
|
||||||
|
<Switch />
|
||||||
|
</Form.Item>
|
||||||
</React.Fragment>
|
</React.Fragment>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,9 @@ export const useMessage = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -271,7 +273,10 @@ export const useMessage = () => {
|
|||||||
userDefaultModelSettings?.numGpu,
|
userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ??
|
||||||
|
userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
const response = await questionOllama.invoke(promptForQuestion)
|
const response = await questionOllama.invoke(promptForQuestion)
|
||||||
query = response.content.toString()
|
query = response.content.toString()
|
||||||
@ -482,7 +487,9 @@ export const useMessage = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -716,7 +723,9 @@ export const useMessage = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -944,7 +953,9 @@ export const useMessage = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -1023,7 +1034,10 @@ export const useMessage = () => {
|
|||||||
userDefaultModelSettings?.numGpu,
|
userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ??
|
||||||
|
userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
const response = await questionOllama.invoke(promptForQuestion)
|
const response = await questionOllama.invoke(promptForQuestion)
|
||||||
query = response.content.toString()
|
query = response.content.toString()
|
||||||
@ -1211,7 +1225,9 @@ export const useMessage = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
|
@ -130,7 +130,9 @@ export const useMessageOption = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -209,7 +211,10 @@ export const useMessageOption = () => {
|
|||||||
userDefaultModelSettings?.numGpu,
|
userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ??
|
||||||
|
userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
const response = await questionOllama.invoke(promptForQuestion)
|
const response = await questionOllama.invoke(promptForQuestion)
|
||||||
query = response.content.toString()
|
query = response.content.toString()
|
||||||
@ -429,7 +434,9 @@ export const useMessageOption = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -667,7 +674,9 @@ export const useMessageOption = () => {
|
|||||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
|
|
||||||
let newMessage: Message[] = []
|
let newMessage: Message[] = []
|
||||||
@ -762,7 +771,10 @@ export const useMessageOption = () => {
|
|||||||
userDefaultModelSettings?.numGpu,
|
userDefaultModelSettings?.numGpu,
|
||||||
numPredict:
|
numPredict:
|
||||||
currentChatModelSettings?.numPredict ??
|
currentChatModelSettings?.numPredict ??
|
||||||
userDefaultModelSettings?.numPredict
|
userDefaultModelSettings?.numPredict,
|
||||||
|
useMMap:
|
||||||
|
currentChatModelSettings?.useMMap ??
|
||||||
|
userDefaultModelSettings?.useMMap
|
||||||
})
|
})
|
||||||
const response = await questionOllama.invoke(promptForQuestion)
|
const response = await questionOllama.invoke(promptForQuestion)
|
||||||
query = response.content.toString()
|
query = response.content.toString()
|
||||||
|
@ -16,6 +16,7 @@ export const pageAssistModel = async ({
|
|||||||
seed,
|
seed,
|
||||||
numGpu,
|
numGpu,
|
||||||
numPredict,
|
numPredict,
|
||||||
|
useMMap
|
||||||
}: {
|
}: {
|
||||||
model: string
|
model: string
|
||||||
baseUrl: string
|
baseUrl: string
|
||||||
@ -27,6 +28,7 @@ export const pageAssistModel = async ({
|
|||||||
seed?: number
|
seed?: number
|
||||||
numGpu?: number
|
numGpu?: number
|
||||||
numPredict?: number
|
numPredict?: number
|
||||||
|
useMMap?: boolean
|
||||||
}) => {
|
}) => {
|
||||||
|
|
||||||
if (model === "chrome::gemini-nano::page-assist") {
|
if (model === "chrome::gemini-nano::page-assist") {
|
||||||
@ -73,7 +75,8 @@ export const pageAssistModel = async ({
|
|||||||
seed,
|
seed,
|
||||||
model,
|
model,
|
||||||
numGpu,
|
numGpu,
|
||||||
numPredict
|
numPredict,
|
||||||
|
useMMap,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user