mirror of
https://github.com/sourcebot-dev/sourcebot.git
synced 2025-12-18 15:25:21 +00:00
373 lines
No EOL
12 KiB
Text
373 lines
No EOL
12 KiB
Text
---
|
|
title: Language Model Providers
|
|
sidebarTitle: Language model providers
|
|
---
|
|
|
|
import LanguageModelSchema from '/snippets/schemas/v3/languageModel.schema.mdx'
|
|
|
|
<Note>
|
|
Looking to self-host your own model? Check out the [OpenAI Compatible](#openai-compatible) provider.
|
|
</Note>
|
|
|
|
To use [Ask Sourcebot](/docs/features/ask) you must define at least one Language Model Provider. These providers are defined within the [config file](/docs/configuration/config-file) you
|
|
provide Sourcebot.
|
|
|
|
|
|
```json wrap icon="code" Example config with language model provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
// 1. Google Vertex config for Gemini 2.5 Pro
|
|
{
|
|
"provider": "google-vertex",
|
|
"model": "gemini-2.5-pro",
|
|
"displayName": "Gemini 2.5 Pro",
|
|
"project": "sourcebot",
|
|
"credentials": {
|
|
"env": "GOOGLE_APPLICATION_CREDENTIALS"
|
|
}
|
|
},
|
|
// 2. OpenAI config for o3
|
|
{
|
|
"provider": "openai",
|
|
"model": "o3",
|
|
"displayName": "o3",
|
|
"token": {
|
|
"env": "OPENAI_API_KEY"
|
|
}
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
# Supported Providers
|
|
|
|
Sourcebot uses the [Vercel AI SDK](https://ai-sdk.dev/docs/introduction), so it can integrate with any provider the SDK supports. If you don't see your provider below please submit
|
|
a [feature request](https://github.com/sourcebot-dev/sourcebot/issues/new?template=feature_request.md).
|
|
|
|
For a detailed description of all the providers, please refer to the [schema](https://github.com/sourcebot-dev/sourcebot/blob/main/schemas/v3/languageModel.json).
|
|
|
|
<Note>Any parameter defined using `env` will read the value from the corresponding environment variable you provide Sourcebot</Note>
|
|
|
|
### Amazon Bedrock
|
|
|
|
[Vercel AI SDK Amazon Bedrock Docs](https://ai-sdk.dev/providers/ai-sdk-providers/amazon-bedrock)
|
|
|
|
```json wrap icon="code" Example config with Amazon Bedrock provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "amazon-bedrock",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"accessKeyId": {
|
|
"env": "AWS_ACCESS_KEY_ID"
|
|
},
|
|
"accessKeySecret": {
|
|
"env": "AWS_SECRET_ACCESS_KEY"
|
|
},
|
|
"sessionToken": {
|
|
"env": "AWS_SESSION_TOKEN"
|
|
},
|
|
"region": "YOUR_REGION_HERE", // defaults to the AWS_REGION env var if not set
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Anthropic
|
|
|
|
[Vercel AI SDK Anthropic Docs](https://ai-sdk.dev/providers/ai-sdk-providers/anthropic)
|
|
|
|
```json wrap icon="code" Example config with Anthropic provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "anthropic",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "ANTHROPIC_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Azure OpenAI
|
|
|
|
[Vercel AI SDK Azure OpenAI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/azure)
|
|
|
|
```json wrap icon="code" Example config with Azure AI provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "azure",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"resourceName": "YOUR_RESOURCE_NAME", // defaults to the AZURE_RESOURCE_NAME env var if not set
|
|
"apiVersion": "OPTIONAL_API_VERSION", // defailts to 'preview' if not set
|
|
"token": {
|
|
"env": "AZURE_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Deepseek
|
|
|
|
[Vercel AI SDK Deepseek Docs](https://ai-sdk.dev/providers/ai-sdk-providers/deepseek)
|
|
|
|
```json wrap icon="code" Example config with Deepseek provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "deepseek",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "DEEPSEEK_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Google Generative AI
|
|
|
|
[Vercel AI SDK Google Generative AI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/google-generative-ai)
|
|
|
|
```json wrap icon="code" Example config with Google Generative AI provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "google-generative-ai",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "GOOGLE_GENERATIVE_AI_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Google Vertex
|
|
|
|
<Note>If you're using an Anthropic model on Google Vertex, you must define a [Google Vertex Anthropic](#google-vertex-anthropic) provider instead</Note>
|
|
<Note>The `credentials` paramater here expects a **path** to a [credentials](https://console.cloud.google.com/apis/credentials) file. This file **must be in a volume mounted by Sourcebot** for it to be readable.</Note>
|
|
|
|
[Vercel AI SDK Google Vertex AI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/google-vertex)
|
|
|
|
```json wrap icon="code" Example config with Google Vertex provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "google-vertex",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"project": "YOUR_PROJECT_ID", // defaults to the GOOGLE_VERTEX_PROJECT env var if not set
|
|
"region": "YOUR_REGION_HERE", // defaults to the GOOGLE_VERTEX_REGION env var if not set
|
|
"credentials": {
|
|
"env": "GOOGLE_APPLICATION_CREDENTIALS"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Google Vertex Anthropic
|
|
|
|
<Note>The `credentials` paramater here expects a **path** to a [credentials](https://console.cloud.google.com/apis/credentials) file. This file **must be in a volume mounted by Sourcebot** for it to be readable.</Note>
|
|
|
|
|
|
[Vercel AI SDK Google Vertex Anthropic Docs](https://ai-sdk.dev/providers/ai-sdk-providers/google-vertex#google-vertex-anthropic-provider-usage)
|
|
|
|
```json wrap icon="code" Example config with Google Vertex Anthropic provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "google-vertex-anthropic",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"project": "YOUR_PROJECT_ID", // defaults to the GOOGLE_VERTEX_PROJECT env var if not set
|
|
"region": "YOUR_REGION_HERE", // defaults to the GOOGLE_VERTEX_REGION env var if not set
|
|
"credentials": {
|
|
"env": "GOOGLE_APPLICATION_CREDENTIALS"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### Mistral
|
|
|
|
[Vercel AI SDK Mistral Docs](https://ai-sdk.dev/providers/ai-sdk-providers/mistral)
|
|
|
|
```json wrap icon="code" Example config with Mistral provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "mistral",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "MISTRAL_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### OpenAI
|
|
|
|
[Vercel AI SDK OpenAI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/openai)
|
|
|
|
```json wrap icon="code" Example config with OpenAI provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "openai",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "OPENAI_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL",
|
|
"reasoningEffort": "OPTIONAL_REASONING_EFFORT" // defaults to "medium"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### OpenAI Compatible
|
|
|
|
[Vercel AI SDK OpenAI Compatible Docs](https://ai-sdk.dev/providers/openai-compatible-providers)
|
|
|
|
The OpenAI compatible provider allows you to use any model that is compatible with the OpenAI [Chat Completions API](https://github.com/ollama/ollama/blob/main/docs/openai.md). This includes self-hosted tools like [Ollama](https://ollama.ai/) and [llama.cpp](https://github.com/ggerganov/llama.cpp).
|
|
|
|
```json wrap icon="code" Example config with OpenAI Compatible provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "openai-compatible",
|
|
"baseUrl": "BASE_URL_HERE",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "OPTIONAL_API_KEY"
|
|
},
|
|
// Optional query parameters can be passed in the request url as:
|
|
"queryParams": {
|
|
// raw string values
|
|
"optional-query-param": "foo",
|
|
// or as environment variables
|
|
"optional-query-param-secret": {
|
|
"env": "MY_SECRET_ENV_VAR"
|
|
}
|
|
}
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
<Accordion title="Troubleshooting">
|
|
- When using [llama.cpp](https://github.com/ggml-org/llama.cpp), if you hit "Failed after 3 attempts. Last error: tools param requires --jinja flag", add the `--jinja` flag to your `llama-server` command.
|
|
</Accordion>
|
|
|
|
### OpenRouter
|
|
|
|
[Vercel AI SDK OpenRouter Docs](https://ai-sdk.dev/providers/community-providers/openrouter)
|
|
|
|
```json wrap icon="code" Example config with OpenRouter provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "openai",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "OPENROUTER_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
### xAI
|
|
|
|
[Vercel AI SDK xAI Docs](https://ai-sdk.dev/providers/ai-sdk-providers/xai)
|
|
|
|
```json wrap icon="code" Example config with xAI provider
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
"provider": "xai",
|
|
"model": "YOUR_MODEL_HERE",
|
|
"displayName": "OPTIONAL_DISPLAY_NAME",
|
|
"token": {
|
|
"env": "XAI_API_KEY"
|
|
},
|
|
"baseUrl": "OPTIONAL_BASE_URL"
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
# Custom headers
|
|
|
|
You can pass custom headers to the language model provider by using the `headers` parameter. Header values can either be a string or a environment variable. Headers are supported for all providers.
|
|
|
|
```json wrap icon="code" Example config with custom headers
|
|
{
|
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
|
|
"models": [
|
|
{
|
|
// ... provider, model, displayName, etc...
|
|
|
|
// Key-value pairs of headers
|
|
"headers": {
|
|
// Header values can be passed as a environment variable...
|
|
"my-secret-header": {
|
|
"env": "MY_SECRET_HEADER_ENV_VAR"
|
|
},
|
|
|
|
// ... or directly as a string.
|
|
"my-non-secret-header": "plaintextvalue"
|
|
}
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
|
|
# Schema reference
|
|
|
|
<Accordion title="Reference">
|
|
[schemas/v3/languageModel.json](https://github.com/sourcebot-dev/sourcebot/blob/main/schemas/v3/languageModel.json)
|
|
|
|
<LanguageModelSchema />
|
|
|
|
</Accordion> |