export const OAI_API_PROVIDERS = [ { label: "LM Studio", value: "lmstudio", baseUrl: "http://localhost:1234/v1" }, { label: "LlamaFile", value: "llamafile", baseUrl: "http://127.0.0.1:8080/v1" }, { label: "OpenAI", value: "openai", baseUrl: "https://api.openai.com/v1" }, { label: "Fireworks", value: "fireworks", baseUrl: "https://api.fireworks.ai/inference/v1" }, { label: "Groq", value: "groq", baseUrl: "https://api.groq.com/openai/v1" }, { label: "Together", value: "together", baseUrl: "https://api.together.xyz/v1" }, { label: "OpenRouter", value: "openrouter", baseUrl: "https://openrouter.ai/api/v1" }, { label: "Custom", value: "custom", baseUrl: "" } ]