feat: Add useMMap option to model settings
This commit is contained in:
parent
e5e04c3674
commit
6d80798da9
@ -87,6 +87,9 @@
|
||||
"label": "Top P",
|
||||
"placeholder": "Enter Top P value (e.g. 0.9, 0.95)"
|
||||
},
|
||||
"useMMap": {
|
||||
"label": "useMmap"
|
||||
},
|
||||
"numGpu": {
|
||||
"label": "Num GPU",
|
||||
"placeholder": "Enter number of layers to send to GPU(s)"
|
||||
|
@ -10,7 +10,8 @@ import {
|
||||
Input,
|
||||
InputNumber,
|
||||
Modal,
|
||||
Skeleton
|
||||
Skeleton,
|
||||
Switch
|
||||
} from "antd"
|
||||
import React from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
@ -35,7 +36,7 @@ export const CurrentChatModelSettings = ({
|
||||
queryFn: async () => {
|
||||
const data = await getAllModelSettings()
|
||||
|
||||
let tempSystemPrompt = "";
|
||||
let tempSystemPrompt = ""
|
||||
|
||||
// i hate this method but i need this feature so badly that i need to do this
|
||||
if (selectedSystemPrompt) {
|
||||
@ -52,7 +53,8 @@ export const CurrentChatModelSettings = ({
|
||||
seed: cUserSettings.seed,
|
||||
numGpu: cUserSettings.numGpu ?? data.numGpu,
|
||||
numPredict: cUserSettings.numPredict ?? data.numPredict,
|
||||
systemPrompt: cUserSettings.systemPrompt ?? tempSystemPrompt
|
||||
systemPrompt: cUserSettings.systemPrompt ?? tempSystemPrompt,
|
||||
useMMap: cUserSettings.useMMap ?? data.useMMap
|
||||
})
|
||||
return data
|
||||
},
|
||||
@ -61,7 +63,6 @@ export const CurrentChatModelSettings = ({
|
||||
refetchOnWindowFocus: false
|
||||
})
|
||||
|
||||
|
||||
const renderBody = () => {
|
||||
return (
|
||||
<>
|
||||
@ -176,6 +177,12 @@ export const CurrentChatModelSettings = ({
|
||||
)}
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name="useMMap"
|
||||
label={t("modelSettings.form.useMMap.label")}>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ import { BetaTag } from "@/components/Common/Beta"
|
||||
import { SaveButton } from "@/components/Common/SaveButton"
|
||||
import { getAllModelSettings, setModelSetting } from "@/services/model-settings"
|
||||
import { useQuery, useQueryClient } from "@tanstack/react-query"
|
||||
import { Form, Skeleton, Input, InputNumber, Collapse } from "antd"
|
||||
import { Form, Skeleton, Input, InputNumber, Collapse, Switch } from "antd"
|
||||
import React from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
|
||||
@ -119,11 +119,14 @@ export const ModelSettings = () => {
|
||||
<InputNumber
|
||||
style={{ width: "100%" }}
|
||||
size="large"
|
||||
placeholder={t(
|
||||
"modelSettings.form.numGpu.placeholder"
|
||||
)}
|
||||
placeholder={t("modelSettings.form.numGpu.placeholder")}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name="useMMap"
|
||||
label={t("modelSettings.form.useMMap.label")}>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
</React.Fragment>
|
||||
)
|
||||
}
|
||||
|
@ -139,7 +139,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -271,7 +273,10 @@ export const useMessage = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
@ -482,7 +487,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -716,7 +723,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -944,7 +953,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -1023,7 +1034,10 @@ export const useMessage = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
@ -1211,7 +1225,9 @@ export const useMessage = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
|
@ -130,7 +130,9 @@ export const useMessageOption = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -209,7 +211,10 @@ export const useMessageOption = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
@ -429,7 +434,9 @@ export const useMessageOption = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -667,7 +674,9 @@ export const useMessageOption = () => {
|
||||
currentChatModelSettings?.numGpu ?? userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ?? userDefaultModelSettings?.useMMap
|
||||
})
|
||||
|
||||
let newMessage: Message[] = []
|
||||
@ -762,7 +771,10 @@ export const useMessageOption = () => {
|
||||
userDefaultModelSettings?.numGpu,
|
||||
numPredict:
|
||||
currentChatModelSettings?.numPredict ??
|
||||
userDefaultModelSettings?.numPredict
|
||||
userDefaultModelSettings?.numPredict,
|
||||
useMMap:
|
||||
currentChatModelSettings?.useMMap ??
|
||||
userDefaultModelSettings?.useMMap
|
||||
})
|
||||
const response = await questionOllama.invoke(promptForQuestion)
|
||||
query = response.content.toString()
|
||||
|
@ -16,6 +16,7 @@ export const pageAssistModel = async ({
|
||||
seed,
|
||||
numGpu,
|
||||
numPredict,
|
||||
useMMap
|
||||
}: {
|
||||
model: string
|
||||
baseUrl: string
|
||||
@ -27,6 +28,7 @@ export const pageAssistModel = async ({
|
||||
seed?: number
|
||||
numGpu?: number
|
||||
numPredict?: number
|
||||
useMMap?: boolean
|
||||
}) => {
|
||||
|
||||
if (model === "chrome::gemini-nano::page-assist") {
|
||||
@ -73,7 +75,8 @@ export const pageAssistModel = async ({
|
||||
seed,
|
||||
model,
|
||||
numGpu,
|
||||
numPredict
|
||||
numPredict,
|
||||
useMMap,
|
||||
})
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user