Add support for LlamaFile, a new model provider that allows users to interact with models stored in LlamaFile format. This includes: - Adding an icon for LlamaFile in the provider selection menu. - Updating the model provider selection to include LlamaFile. - Updating the model handling logic to properly identify and process LlamaFile models. - Updating the API providers list to include LlamaFile. This enables users to leverage the capabilities of LlamaFile models within the application.
42 lines
919 B
TypeScript
42 lines
919 B
TypeScript
export const OAI_API_PROVIDERS = [
|
|
{
|
|
label: "LM Studio",
|
|
value: "lmstudio",
|
|
baseUrl: "http://localhost:1234/v1"
|
|
},
|
|
{
|
|
label: "LlamaFile",
|
|
value: "llamafile",
|
|
baseUrl: "http://127.0.0.1:8080/v1"
|
|
},
|
|
{
|
|
label: "OpenAI",
|
|
value: "openai",
|
|
baseUrl: "https://api.openai.com/v1"
|
|
},
|
|
{
|
|
label: "Fireworks",
|
|
value: "fireworks",
|
|
baseUrl: "https://api.fireworks.ai/inference/v1"
|
|
},
|
|
{
|
|
label: "Groq",
|
|
value: "groq",
|
|
baseUrl: "https://api.groq.com/openai/v1"
|
|
},
|
|
{
|
|
label: "Together",
|
|
value: "together",
|
|
baseUrl: "https://api.together.xyz/v1"
|
|
},
|
|
{
|
|
label: "OpenRouter",
|
|
value: "openrouter",
|
|
baseUrl: "https://openrouter.ai/api/v1"
|
|
},
|
|
{
|
|
label: "Custom",
|
|
value: "custom",
|
|
baseUrl: ""
|
|
}
|
|
] |