feat: Add OpenAI Provider Selection
Add a provider selection dropdown to the OpenAI settings, enabling users to choose from pre-configured options like "Azure" or "Custom." This streamlines setup and allows for more flexibility in configuring OpenAI API endpoints. The dropdown pre-populates base URLs and names based on the selected provider. The dropdown also automatically populates base URLs and names based on the selected provider, further simplifying the configuration process.
This commit is contained in:
parent
3d8c8671e1
commit
ff371d6eef
@ -17,7 +17,7 @@
|
||||
},
|
||||
"baseUrl": {
|
||||
"label": "Base URL",
|
||||
"help": "The base URL of the OpenAI API provider. eg (http://loocalhost:8080/v1)",
|
||||
"help": "The base URL of the OpenAI API provider. eg (http://localhost:1234/v1)",
|
||||
"required": "Base URL is required.",
|
||||
"placeholder": "Enter base URL"
|
||||
},
|
||||
@ -58,5 +58,6 @@
|
||||
"confirm": {
|
||||
"delete": "Are you sure you want to delete this model?"
|
||||
}
|
||||
}
|
||||
},
|
||||
"noModelFound": "No model found. Make sure you have added correct provider with base URL and API key."
|
||||
}
|
@ -15,6 +15,7 @@ import { useTranslation } from "react-i18next"
|
||||
import { MessageSource } from "./MessageSource"
|
||||
import { useTTS } from "@/hooks/useTTS"
|
||||
import { tagColors } from "@/utils/color"
|
||||
import { removeModelSuffix } from "@/db/models"
|
||||
|
||||
type Props = {
|
||||
message: string
|
||||
@ -69,7 +70,9 @@ export const PlaygroundMessage = (props: Props) => {
|
||||
{props.isBot
|
||||
? props.name === "chrome::gemini-nano::page-assist"
|
||||
? "Gemini Nano"
|
||||
: props.name
|
||||
: removeModelSuffix(
|
||||
props.name?.replaceAll(/accounts\/[^\/]+\/models\//g, "")
|
||||
)
|
||||
: "You"}
|
||||
</span>
|
||||
|
||||
@ -135,7 +138,7 @@ export const PlaygroundMessage = (props: Props) => {
|
||||
key: "1",
|
||||
label: (
|
||||
<div className="italic text-gray-500 dark:text-gray-400">
|
||||
{t('citations')}
|
||||
{t("citations")}
|
||||
</div>
|
||||
),
|
||||
children: (
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { ChromeIcon, CloudCog } from "lucide-react"
|
||||
import { ChromeIcon, CpuIcon } from "lucide-react"
|
||||
import { OllamaIcon } from "../Icons/Ollama"
|
||||
|
||||
export const ProviderIcons = ({
|
||||
@ -12,7 +12,7 @@ export const ProviderIcons = ({
|
||||
case "chrome":
|
||||
return <ChromeIcon className={className} />
|
||||
case "custom":
|
||||
return <CloudCog className={className} />
|
||||
return <CpuIcon className={className} />
|
||||
default:
|
||||
return <OllamaIcon className={className} />
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ import {
|
||||
InfoIcon,
|
||||
CombineIcon,
|
||||
ChromeIcon,
|
||||
CloudCogIcon
|
||||
CpuIcon
|
||||
} from "lucide-react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import { Link, useLocation } from "react-router-dom"
|
||||
@ -93,7 +93,7 @@ export const SettingsLayout = ({ children }: { children: React.ReactNode }) => {
|
||||
<LinkComponent
|
||||
href="/settings/openai"
|
||||
name={t("openai:settings")}
|
||||
icon={CloudCogIcon}
|
||||
icon={CpuIcon}
|
||||
current={location.pathname}
|
||||
beta
|
||||
/>
|
||||
|
@ -79,11 +79,15 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => {
|
||||
if (status === "pending") {
|
||||
return <Spin />
|
||||
}
|
||||
|
||||
if (status === "error" || !data || data.length === 0) {
|
||||
return <div>{t("noModelFound")}</div>
|
||||
return (
|
||||
<div className="flex items-center justify-center h-40">
|
||||
<p className="text-md text-center text-gray-600 dark:text-gray-300">
|
||||
{t("noModelFound")}
|
||||
</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">
|
||||
@ -116,7 +120,12 @@ export const OpenAIFetchModel = ({ openaiId, setOpenModelModal }: Props) => {
|
||||
key={model.id}
|
||||
checked={selectedModels.includes(model.id)}
|
||||
onChange={(e) => handleModelSelect(model.id, e.target.checked)}>
|
||||
{model?.name || model.id}
|
||||
<div className="max-w-[200px] truncate">
|
||||
{`${model?.name || model.id}`.replaceAll(
|
||||
/accounts\/[^\/]+\/models\//g,
|
||||
""
|
||||
)}
|
||||
</div>
|
||||
</Checkbox>
|
||||
))}
|
||||
</div>
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Form, Input, Modal, Table, message, Tooltip } from "antd"
|
||||
import { Form, Input, Modal, Table, message, Tooltip, Select } from "antd"
|
||||
import { useState } from "react"
|
||||
import { useTranslation } from "react-i18next"
|
||||
import {
|
||||
@ -10,6 +10,7 @@ import {
|
||||
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"
|
||||
import { Pencil, Trash2, RotateCwIcon } from "lucide-react"
|
||||
import { OpenAIFetchModel } from "./openai-fetch-model"
|
||||
import { OAI_API_PROVIDERS } from "@/utils/oai-api-providers"
|
||||
|
||||
export const OpenAIApp = () => {
|
||||
const { t } = useTranslation("openai")
|
||||
@ -182,11 +183,25 @@ export const OpenAIApp = () => {
|
||||
form.resetFields()
|
||||
}}
|
||||
footer={null}>
|
||||
{!editingConfig && (
|
||||
<Select
|
||||
defaultValue="custom"
|
||||
onSelect={(e) => {
|
||||
const value = OAI_API_PROVIDERS.find((item) => item.value === e)
|
||||
form.setFieldsValue({
|
||||
baseUrl: value?.baseUrl,
|
||||
name: value?.label
|
||||
})
|
||||
}}
|
||||
className="w-full !mb-4"
|
||||
options={OAI_API_PROVIDERS}
|
||||
/>
|
||||
)}
|
||||
<Form
|
||||
form={form}
|
||||
layout="vertical"
|
||||
onFinish={handleSubmit}
|
||||
initialValues={editingConfig}>
|
||||
initialValues={{ ...editingConfig }}>
|
||||
<Form.Item
|
||||
name="name"
|
||||
label={t("modal.name.label")}
|
||||
|
@ -15,8 +15,8 @@ export const generateID = () => {
|
||||
})
|
||||
}
|
||||
|
||||
export const removeModelPrefix = (id: string) => {
|
||||
return id.replace(/^model-/, "")
|
||||
export const removeModelSuffix = (id: string) => {
|
||||
return id.replace(/_model-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{3,4}-[a-f0-9]{4}/, "")
|
||||
}
|
||||
|
||||
export const isCustomModel = (model: string) => {
|
||||
@ -114,7 +114,8 @@ export const createManyModels = async (
|
||||
...item,
|
||||
lookup: `${item.model_id}_${item.provider_id}`,
|
||||
id: `${item.model_id}_${generateID()}`,
|
||||
db_type: "openai_model"
|
||||
db_type: "openai_model",
|
||||
name: item.name.replaceAll(/accounts\/[^\/]+\/models\//g, "")
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -6,6 +6,7 @@ type OpenAIModelConfig = {
|
||||
baseUrl: string
|
||||
apiKey?: string
|
||||
createdAt: number
|
||||
provider?: string
|
||||
db_type: string
|
||||
}
|
||||
export const generateID = () => {
|
||||
|
@ -532,24 +532,14 @@ export const useMessage = () => {
|
||||
if (prompt && !selectedPrompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: prompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: prompt
|
||||
})
|
||||
)
|
||||
}
|
||||
if (selectedPrompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: selectedPrompt.content,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: selectedPrompt.content
|
||||
})
|
||||
)
|
||||
}
|
||||
@ -794,12 +784,7 @@ export const useMessage = () => {
|
||||
if (prompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: prompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: prompt
|
||||
})
|
||||
)
|
||||
}
|
||||
|
@ -238,12 +238,7 @@ export const useMessageOption = () => {
|
||||
if (prompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: prompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: prompt
|
||||
})
|
||||
)
|
||||
}
|
||||
@ -445,12 +440,7 @@ export const useMessageOption = () => {
|
||||
if (prompt && !selectedPrompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: prompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: prompt
|
||||
})
|
||||
)
|
||||
}
|
||||
@ -462,12 +452,7 @@ export const useMessageOption = () => {
|
||||
if (!isTempSystemprompt && selectedPrompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: selectedPrompt.content,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: selectedPrompt.content
|
||||
})
|
||||
)
|
||||
}
|
||||
@ -475,12 +460,7 @@ export const useMessageOption = () => {
|
||||
if (isTempSystemprompt) {
|
||||
applicationChatHistory.unshift(
|
||||
new SystemMessage({
|
||||
content: [
|
||||
{
|
||||
text: currentChatModelSettings.systemPrompt,
|
||||
type: "text"
|
||||
}
|
||||
]
|
||||
content: currentChatModelSettings.systemPrompt
|
||||
})
|
||||
)
|
||||
}
|
||||
|
32
src/utils/oai-api-providers.ts
Normal file
32
src/utils/oai-api-providers.ts
Normal file
@ -0,0 +1,32 @@
|
||||
export const OAI_API_PROVIDERS = [
|
||||
{
|
||||
label: "LM Studio",
|
||||
value: "lmstudio",
|
||||
baseUrl: "http://localhost:1234/v1"
|
||||
},
|
||||
{
|
||||
label: "OpenAI",
|
||||
value: "openai",
|
||||
baseUrl: "https://api.openai.com/v1"
|
||||
},
|
||||
{
|
||||
label: "Fireworks",
|
||||
value: "fireworks",
|
||||
baseUrl: "https://api.fireworks.ai/inference/v1"
|
||||
},
|
||||
{
|
||||
label: "Groq",
|
||||
value: "groq",
|
||||
baseUrl: "https://api.groq.com/openai/v1"
|
||||
},
|
||||
{
|
||||
label: "Together",
|
||||
value: "together",
|
||||
baseUrl: "https://api.together.xyz/v1"
|
||||
},
|
||||
{
|
||||
label: "Custsom",
|
||||
value: "custom",
|
||||
baseUrl: ""
|
||||
}
|
||||
]
|
Loading…
x
Reference in New Issue
Block a user