feat(ask_sb): OpenAI compatible language models (#424)

* wip

* docs + add option for api key

* feedback

* nit
This commit is contained in:
Brendan Kellam 2025-08-04 17:25:54 -07:00 committed by GitHub
parent 68107da7dc
commit 65d3cd9dc6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 708 additions and 25 deletions

View file

@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
### Added
- [ask sb] Added OpenAI Compatible Language Provider. [#424](https://github.com/sourcebot-dev/sourcebot/pull/424)
## [4.6.2] - 2025-07-31 ## [4.6.2] - 2025-07-31
### Changed ### Changed

View file

@ -3,6 +3,12 @@ title: Language Model Providers
sidebarTitle: Language model providers sidebarTitle: Language model providers
--- ---
import LanguageModelSchema from '/snippets/schemas/v3/languageModel.schema.mdx'
<Note>
Looking to self-host your own model? Check out the [OpenAI Compatible](#openai-compatible) provider.
</Note>
To use [Ask Sourcebot](/docs/features/ask) you must define at least one Language Model Provider. These providers are defined within the [config file](/docs/configuration/config-file) you To use [Ask Sourcebot](/docs/features/ask) you must define at least one Language Model Provider. These providers are defined within the [config file](/docs/configuration/config-file) you
provide Sourcebot. provide Sourcebot.
@ -45,7 +51,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### Amazon Bedrock ### Amazon Bedrock
[Vercel AI SDK Amazon Bedrock Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/amazon-bedrock) [Vercel AI SDK Amazon Bedrock Docs](https://ai-sdk.dev/providers/ai-sdk-providers/amazon-bedrock)
```json wrap icon="code" Example config with Amazon Bedrock provider ```json wrap icon="code" Example config with Amazon Bedrock provider
{ {
@ -70,7 +76,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### Anthropic ### Anthropic
[Vercel AI SDK Anthropic Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/anthropic) [Vercel AI SDK Anthropic Docs](https://ai-sdk.dev/providers/ai-sdk-providers/anthropic)
```json wrap icon="code" Example config with Anthropic provider ```json wrap icon="code" Example config with Anthropic provider
{ {
@ -91,7 +97,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### Azure OpenAI ### Azure OpenAI
[Vercel AI SDK Azure OpenAI Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/azure) [Vercel AI SDK Azure OpenAI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/azure)
```json wrap icon="code" Example config with Azure AI provider ```json wrap icon="code" Example config with Azure AI provider
{ {
@ -114,7 +120,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### Deepseek ### Deepseek
[Vercel AI SDK Deepseek Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/deepseek) [Vercel AI SDK Deepseek Docs](https://ai-sdk.dev/providers/ai-sdk-providers/deepseek)
```json wrap icon="code" Example config with Deepseek provider ```json wrap icon="code" Example config with Deepseek provider
{ {
@ -135,7 +141,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### Google Generative AI ### Google Generative AI
[Vercel AI SDK Google Generative AI Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/google-generative-ai) [Vercel AI SDK Google Generative AI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/google-generative-ai)
```json wrap icon="code" Example config with Google Generative AI provider ```json wrap icon="code" Example config with Google Generative AI provider
{ {
@ -159,7 +165,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
<Note>If you're using an Anthropic model on Google Vertex, you must define a [Google Vertex Anthropic](#google-vertex-anthropic) provider instead</Note> <Note>If you're using an Anthropic model on Google Vertex, you must define a [Google Vertex Anthropic](#google-vertex-anthropic) provider instead</Note>
<Note>The `credentials` paramater here expects a **path** to a [credentials](https://console.cloud.google.com/apis/credentials) file. This file **must be in a volume mounted by Sourcebot** for it to be readable.</Note> <Note>The `credentials` paramater here expects a **path** to a [credentials](https://console.cloud.google.com/apis/credentials) file. This file **must be in a volume mounted by Sourcebot** for it to be readable.</Note>
[Vercel AI SDK Google Vertex AI Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/google-vertex) [Vercel AI SDK Google Vertex AI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/google-vertex)
```json wrap icon="code" Example config with Google Vertex provider ```json wrap icon="code" Example config with Google Vertex provider
{ {
@ -185,7 +191,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
<Note>The `credentials` paramater here expects a **path** to a [credentials](https://console.cloud.google.com/apis/credentials) file. This file **must be in a volume mounted by Sourcebot** for it to be readable.</Note> <Note>The `credentials` paramater here expects a **path** to a [credentials](https://console.cloud.google.com/apis/credentials) file. This file **must be in a volume mounted by Sourcebot** for it to be readable.</Note>
[Vercel AI SDK Google Vertex Anthropic Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/google-vertex#google-vertex-anthropic-provider-usage) [Vercel AI SDK Google Vertex Anthropic Docs](https://ai-sdk.dev/providers/ai-sdk-providers/google-vertex#google-vertex-anthropic-provider-usage)
```json wrap icon="code" Example config with Google Vertex Anthropic provider ```json wrap icon="code" Example config with Google Vertex Anthropic provider
{ {
@ -208,7 +214,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### Mistral ### Mistral
[Vercel AI SDK Mistral Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/mistral) [Vercel AI SDK Mistral Docs](https://ai-sdk.dev/providers/ai-sdk-providers/mistral)
```json wrap icon="code" Example config with Mistral provider ```json wrap icon="code" Example config with Mistral provider
{ {
@ -229,7 +235,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### OpenAI ### OpenAI
[Vercel AI SDK OpenAI Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/openai) [Vercel AI SDK OpenAI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/openai)
```json wrap icon="code" Example config with OpenAI provider ```json wrap icon="code" Example config with OpenAI provider
{ {
@ -248,9 +254,36 @@ For a detailed description of all the providers, please refer to the [schema](ht
} }
``` ```
### OpenAI Compatible
[Vercel AI SDK OpenAI Compatible Docs](https://ai-sdk.dev/providers/openai-compatible-providers)
The OpenAI compatible provider allows you to use any model that is compatible with the OpenAI [Chat Completions API](https://github.com/ollama/ollama/blob/main/docs/openai.md). This includes self-hosted tools like [Ollama](https://ollama.ai/) and [llama.cpp](https://github.com/ggerganov/llama.cpp).
```json wrap icon="code" Example config with OpenAI Compatible provider
{
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
"models": [
{
"provider": "openai-compatible",
"baseUrl": "BASE_URL_HERE",
"model": "YOUR_MODEL_HERE",
"displayName": "OPTIONAL_DISPLAY_NAME",
"token": {
"env": "OPTIONAL_API_KEY"
}
}
]
}
```
<Accordion title="Troubleshooting">
- When using [llama.cpp](https://github.com/ggml-org/llama.cpp), if you hit "Failed after 3 attempts. Last error: tools param requires --jinja flag", add the `--jinja` flag to your `llama-server` command.
</Accordion>
### OpenRouter ### OpenRouter
[Vercel AI SDK OpenRouter Docs](https://v5.ai-sdk.dev/providers/community-providers/openrouter) [Vercel AI SDK OpenRouter Docs](https://ai-sdk.dev/providers/community-providers/openrouter)
```json wrap icon="code" Example config with OpenRouter provider ```json wrap icon="code" Example config with OpenRouter provider
{ {
@ -271,7 +304,7 @@ For a detailed description of all the providers, please refer to the [schema](ht
### xAI ### xAI
[Vercel AI SDK xAI Docs](https://v5.ai-sdk.dev/providers/ai-sdk-providers/xai) [Vercel AI SDK xAI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/xai)
```json wrap icon="code" Example config with xAI provider ```json wrap icon="code" Example config with xAI provider
{ {
@ -288,4 +321,14 @@ For a detailed description of all the providers, please refer to the [schema](ht
} }
] ]
} }
``` ```
## Schema reference
<Accordion title="Reference">
[schemas/v3/languageModel.json](https://github.com/sourcebot-dev/sourcebot/blob/main/schemas/v3/languageModel.json)
<LanguageModelSchema />
</Accordion>

View file

@ -1785,6 +1785,69 @@
], ],
"additionalProperties": false "additionalProperties": false
}, },
"OpenAICompatibleLanguageModel": {
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
"OpenRouterLanguageModel": { "OpenRouterLanguageModel": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -2528,6 +2591,69 @@
], ],
"additionalProperties": false "additionalProperties": false
}, },
{
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
{ {
"type": "object", "type": "object",
"properties": { "properties": {

View file

@ -623,6 +623,69 @@
], ],
"additionalProperties": false "additionalProperties": false
}, },
"OpenAICompatibleLanguageModel": {
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
"OpenRouterLanguageModel": { "OpenRouterLanguageModel": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -1366,6 +1429,69 @@
], ],
"additionalProperties": false "additionalProperties": false
}, },
{
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
{ {
"type": "object", "type": "object",
"properties": { "properties": {

View file

@ -1784,6 +1784,69 @@ const schema = {
], ],
"additionalProperties": false "additionalProperties": false
}, },
"OpenAICompatibleLanguageModel": {
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
"OpenRouterLanguageModel": { "OpenRouterLanguageModel": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -2527,6 +2590,69 @@ const schema = {
], ],
"additionalProperties": false "additionalProperties": false
}, },
{
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
{ {
"type": "object", "type": "object",
"properties": { "properties": {

View file

@ -21,6 +21,7 @@ export type LanguageModel =
| GoogleVertexLanguageModel | GoogleVertexLanguageModel
| MistralLanguageModel | MistralLanguageModel
| OpenAILanguageModel | OpenAILanguageModel
| OpenAICompatibleLanguageModel
| OpenRouterLanguageModel | OpenRouterLanguageModel
| XaiLanguageModel; | XaiLanguageModel;
@ -791,6 +792,40 @@ export interface OpenAILanguageModel {
*/ */
baseUrl?: string; baseUrl?: string;
} }
export interface OpenAICompatibleLanguageModel {
/**
* OpenAI Compatible Configuration
*/
provider: "openai-compatible";
/**
* The name of the language model.
*/
model: string;
/**
* Optional display name.
*/
displayName?: string;
/**
* Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.
*/
token?:
| {
/**
* The name of the secret that contains the token.
*/
secret: string;
}
| {
/**
* The name of the environment variable that contains the token. Only supported in declarative connection configs.
*/
env: string;
};
/**
* Base URL of the OpenAI-compatible chat completions API endpoint.
*/
baseUrl: string;
}
export interface OpenRouterLanguageModel { export interface OpenRouterLanguageModel {
/** /**
* OpenRouter Configuration * OpenRouter Configuration

View file

@ -622,6 +622,69 @@ const schema = {
], ],
"additionalProperties": false "additionalProperties": false
}, },
"OpenAICompatibleLanguageModel": {
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
"OpenRouterLanguageModel": { "OpenRouterLanguageModel": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -1365,6 +1428,69 @@ const schema = {
], ],
"additionalProperties": false "additionalProperties": false
}, },
{
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.",
"anyOf": [
{
"type": "object",
"properties": {
"secret": {
"type": "string",
"description": "The name of the secret that contains the token."
}
},
"required": [
"secret"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token. Only supported in declarative connection configs."
}
},
"required": [
"env"
],
"additionalProperties": false
}
]
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
{ {
"type": "object", "type": "object",
"properties": { "properties": {

View file

@ -10,6 +10,7 @@ export type LanguageModel =
| GoogleVertexLanguageModel | GoogleVertexLanguageModel
| MistralLanguageModel | MistralLanguageModel
| OpenAILanguageModel | OpenAILanguageModel
| OpenAICompatibleLanguageModel
| OpenRouterLanguageModel | OpenRouterLanguageModel
| XaiLanguageModel; | XaiLanguageModel;
@ -363,6 +364,40 @@ export interface OpenAILanguageModel {
*/ */
baseUrl?: string; baseUrl?: string;
} }
export interface OpenAICompatibleLanguageModel {
/**
* OpenAI Compatible Configuration
*/
provider: "openai-compatible";
/**
* The name of the language model.
*/
model: string;
/**
* Optional display name.
*/
displayName?: string;
/**
* Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>.
*/
token?:
| {
/**
* The name of the secret that contains the token.
*/
secret: string;
}
| {
/**
* The name of the environment variable that contains the token. Only supported in declarative connection configs.
*/
env: string;
};
/**
* Base URL of the OpenAI-compatible chat completions API endpoint.
*/
baseUrl: string;
}
export interface OpenRouterLanguageModel { export interface OpenRouterLanguageModel {
/** /**
* OpenRouter Configuration * OpenRouter Configuration

View file

@ -20,6 +20,7 @@
"@ai-sdk/google-vertex": "3.0.0", "@ai-sdk/google-vertex": "3.0.0",
"@ai-sdk/mistral": "2.0.0", "@ai-sdk/mistral": "2.0.0",
"@ai-sdk/openai": "2.0.0", "@ai-sdk/openai": "2.0.0",
"@ai-sdk/openai-compatible": "^1.0.0",
"@ai-sdk/react": "2.0.0", "@ai-sdk/react": "2.0.0",
"@ai-sdk/xai": "2.0.0", "@ai-sdk/xai": "2.0.0",
"@auth/prisma-adapter": "^2.7.4", "@auth/prisma-adapter": "^2.7.4",

View file

@ -17,6 +17,7 @@ import { createVertex } from '@ai-sdk/google-vertex';
import { createVertexAnthropic } from '@ai-sdk/google-vertex/anthropic'; import { createVertexAnthropic } from '@ai-sdk/google-vertex/anthropic';
import { createMistral } from '@ai-sdk/mistral'; import { createMistral } from '@ai-sdk/mistral';
import { createOpenAI, OpenAIResponsesProviderOptions } from "@ai-sdk/openai"; import { createOpenAI, OpenAIResponsesProviderOptions } from "@ai-sdk/openai";
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
import { LanguageModelV2 as AISDKLanguageModelV2 } from "@ai-sdk/provider"; import { LanguageModelV2 as AISDKLanguageModelV2 } from "@ai-sdk/provider";
import { createXai } from '@ai-sdk/xai'; import { createXai } from '@ai-sdk/xai';
import { createOpenRouter } from '@openrouter/ai-sdk-provider'; import { createOpenRouter } from '@openrouter/ai-sdk-provider';
@ -444,6 +445,19 @@ const getAISDKLanguageModelAndOptions = async (config: LanguageModel, orgId: num
}, },
}; };
} }
case 'openai-compatible': {
const openai = createOpenAICompatible({
baseURL: config.baseUrl,
name: config.displayName ?? modelId,
apiKey: config.token
? await getTokenFromConfig(config.token, orgId, prisma)
: undefined,
});
return {
model: openai.chatModel(modelId),
}
}
case 'openrouter': { case 'openrouter': {
const openrouter = createOpenRouter({ const openrouter = createOpenRouter({
baseURL: config.baseUrl, baseURL: config.baseUrl,

View file

@ -13,6 +13,7 @@ import deepseekLogo from "@/public/deepseek.svg";
import mistralLogo from "@/public/mistral.svg"; import mistralLogo from "@/public/mistral.svg";
import openrouterLogo from "@/public/openrouter.svg"; import openrouterLogo from "@/public/openrouter.svg";
import xaiLogo from "@/public/xai.svg"; import xaiLogo from "@/public/xai.svg";
import { Box, LucideIcon } from "lucide-react";
interface ModelProviderLogoProps { interface ModelProviderLogoProps {
provider: LanguageModelProvider; provider: LanguageModelProvider;
@ -23,12 +24,12 @@ export const ModelProviderLogo = ({
provider, provider,
className, className,
}: ModelProviderLogoProps) => { }: ModelProviderLogoProps) => {
const { src, className: logoClassName } = useMemo(() => { const { src, Icon, className: logoClassName } = useMemo((): { src?: string, Icon?: LucideIcon, className?: string } => {
switch (provider) { switch (provider) {
case 'amazon-bedrock': case 'amazon-bedrock':
return { return {
src: bedrockLogo, src: bedrockLogo,
className: 'w-3.5 h-3.5 dark:invert' className: 'dark:invert'
}; };
case 'anthropic': case 'anthropic':
return { return {
@ -38,23 +39,20 @@ export const ModelProviderLogo = ({
case 'azure': case 'azure':
return { return {
src: azureAiLogo, src: azureAiLogo,
className: 'w-3.5 h-3.5'
}; };
case 'deepseek': case 'deepseek':
return { return {
src: deepseekLogo, src: deepseekLogo,
className: 'w-3.5 h-3.5'
}; };
case 'openai': case 'openai':
return { return {
src: openaiLogo, src: openaiLogo,
className: 'dark:invert w-3.5 h-3.5' className: 'dark:invert'
}; };
case 'google-generative-ai': case 'google-generative-ai':
case 'google-vertex': case 'google-vertex':
return { return {
src: geminiLogo, src: geminiLogo,
className: 'w-3.5 h-3.5'
}; };
case 'google-vertex-anthropic': case 'google-vertex-anthropic':
return { return {
@ -64,30 +62,40 @@ export const ModelProviderLogo = ({
case 'mistral': case 'mistral':
return { return {
src: mistralLogo, src: mistralLogo,
className: 'w-3.5 h-3.5'
}; };
case 'openrouter': case 'openrouter':
return { return {
src: openrouterLogo, src: openrouterLogo,
className: 'dark:invert w-3.5 h-3.5' className: 'dark:invert'
}; };
case 'xai': case 'xai':
return { return {
src: xaiLogo, src: xaiLogo,
className: 'dark:invert w-3.5 h-3.5' className: 'dark:invert'
};
case 'openai-compatible':
return {
Icon: Box,
className: 'text-muted-foreground'
}; };
} }
}, [provider]); }, [provider]);
return ( return src ? (
<Image <Image
src={src} src={src}
alt={provider} alt={provider}
className={cn( className={cn(
'w-4 h-4', 'w-3.5 h-3.5',
logoClassName, logoClassName,
className className
)} )}
/> />
) ) : Icon ? (
<Icon className={cn(
'w-3.5 h-3.5',
logoClassName,
className
)} />
) : null;
} }

View file

@ -351,6 +351,42 @@
], ],
"additionalProperties": false "additionalProperties": false
}, },
"OpenAICompatibleLanguageModel": {
"type": "object",
"properties": {
"provider": {
"const": "openai-compatible",
"description": "OpenAI Compatible Configuration"
},
"model": {
"type": "string",
"description": "The name of the language model."
},
"displayName": {
"type": "string",
"description": "Optional display name."
},
"token": {
"$ref": "./shared.json#/definitions/Token",
"description": "Optional API key. If specified, adds an `Authorization` header to request headers with the value Bearer <token>."
},
"baseUrl": {
"type": "string",
"format": "url",
"pattern": "^https?:\\/\\/[^\\s/$.?#].[^\\s]*$",
"description": "Base URL of the OpenAI-compatible chat completions API endpoint.",
"examples": [
"http://localhost:8080/v1"
]
}
},
"required": [
"provider",
"model",
"baseUrl"
],
"additionalProperties": false
},
"OpenRouterLanguageModel": { "OpenRouterLanguageModel": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -448,6 +484,9 @@
{ {
"$ref": "#/definitions/OpenAILanguageModel" "$ref": "#/definitions/OpenAILanguageModel"
}, },
{
"$ref": "#/definitions/OpenAICompatibleLanguageModel"
},
{ {
"$ref": "#/definitions/OpenRouterLanguageModel" "$ref": "#/definitions/OpenRouterLanguageModel"
}, },

View file

@ -110,7 +110,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@ai-sdk/openai-compatible@npm:1.0.0": "@ai-sdk/openai-compatible@npm:1.0.0, @ai-sdk/openai-compatible@npm:^1.0.0":
version: 1.0.0 version: 1.0.0
resolution: "@ai-sdk/openai-compatible@npm:1.0.0" resolution: "@ai-sdk/openai-compatible@npm:1.0.0"
dependencies: dependencies:
@ -6506,6 +6506,7 @@ __metadata:
"@ai-sdk/google-vertex": "npm:3.0.0" "@ai-sdk/google-vertex": "npm:3.0.0"
"@ai-sdk/mistral": "npm:2.0.0" "@ai-sdk/mistral": "npm:2.0.0"
"@ai-sdk/openai": "npm:2.0.0" "@ai-sdk/openai": "npm:2.0.0"
"@ai-sdk/openai-compatible": "npm:^1.0.0"
"@ai-sdk/react": "npm:2.0.0" "@ai-sdk/react": "npm:2.0.0"
"@ai-sdk/xai": "npm:2.0.0" "@ai-sdk/xai": "npm:2.0.0"
"@auth/prisma-adapter": "npm:^2.7.4" "@auth/prisma-adapter": "npm:^2.7.4"