Skip to content

Commit 980cb9b

Browse files
committed
feat: add Ollama Turbo support with API key authentication
- Add ollamaApiKey field to provider settings schema - Update Ollama providers to use Bearer token when API key is provided - Add API key input field to Ollama settings UI with description - Pass API key to all Ollama API calls for authentication - Update translation strings for new API key field This enables users to use Ollama Turbo and other authenticated Ollama services by providing an API key that will be sent as a Bearer token in the Authorization header. Fixes #7147
1 parent 2a974e8 commit 980cb9b

File tree

9 files changed

+47
-11
lines changed

9 files changed

+47
-11
lines changed

packages/types/src/provider-settings.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,7 @@ const openAiSchema = baseProviderSettingsSchema.extend({
165165
const ollamaSchema = baseProviderSettingsSchema.extend({
166166
ollamaModelId: z.string().optional(),
167167
ollamaBaseUrl: z.string().optional(),
168+
ollamaApiKey: z.string().optional(),
168169
})
169170

170171
const vsCodeLmSchema = baseProviderSettingsSchema.extend({

src/api/providers/fetchers/modelCache.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ export const getModels = async (options: GetModelsOptions): Promise<ModelRecord>
7373
models = await getLiteLLMModels(options.apiKey, options.baseUrl)
7474
break
7575
case "ollama":
76-
models = await getOllamaModels(options.baseUrl)
76+
models = await getOllamaModels(options.baseUrl, options.apiKey)
7777
break
7878
case "lmstudio":
7979
models = await getLMStudioModels(options.baseUrl)

src/api/providers/fetchers/ollama.ts

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,10 @@ export const parseOllamaModel = (rawModel: OllamaModelInfoResponse): ModelInfo =
5454
return modelInfo
5555
}
5656

57-
export async function getOllamaModels(baseUrl = "http://localhost:11434"): Promise<Record<string, ModelInfo>> {
57+
export async function getOllamaModels(
58+
baseUrl = "http://localhost:11434",
59+
apiKey?: string,
60+
): Promise<Record<string, ModelInfo>> {
5861
const models: Record<string, ModelInfo> = {}
5962

6063
// clearing the input can leave an empty string; use the default in that case
@@ -65,17 +68,26 @@ export async function getOllamaModels(baseUrl = "http://localhost:11434"): Promi
6568
return models
6669
}
6770

68-
const response = await axios.get<OllamaModelsResponse>(`${baseUrl}/api/tags`)
71+
const headers: Record<string, string> = {}
72+
if (apiKey) {
73+
headers["Authorization"] = `Bearer ${apiKey}`
74+
}
75+
76+
const response = await axios.get<OllamaModelsResponse>(`${baseUrl}/api/tags`, { headers })
6977
const parsedResponse = OllamaModelsResponseSchema.safeParse(response.data)
7078
let modelInfoPromises = []
7179

7280
if (parsedResponse.success) {
7381
for (const ollamaModel of parsedResponse.data.models) {
7482
modelInfoPromises.push(
7583
axios
76-
.post<OllamaModelInfoResponse>(`${baseUrl}/api/show`, {
77-
model: ollamaModel.model,
78-
})
84+
.post<OllamaModelInfoResponse>(
85+
`${baseUrl}/api/show`,
86+
{
87+
model: ollamaModel.model,
88+
},
89+
{ headers },
90+
)
7991
.then((ollamaModelInfo) => {
8092
models[ollamaModel.name] = parseOllamaModel(ollamaModelInfo.data)
8193
}),

src/api/providers/native-ollama.ts

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -140,10 +140,19 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
140140
private ensureClient(): Ollama {
141141
if (!this.client) {
142142
try {
143-
this.client = new Ollama({
143+
const clientOptions: any = {
144144
host: this.options.ollamaBaseUrl || "http://localhost:11434",
145145
// Note: The ollama npm package handles timeouts internally
146-
})
146+
}
147+
148+
// Add authorization header if API key is provided
149+
if (this.options.ollamaApiKey) {
150+
clientOptions.headers = {
151+
Authorization: `Bearer ${this.options.ollamaApiKey}`,
152+
}
153+
}
154+
155+
this.client = new Ollama(clientOptions)
147156
} catch (error: any) {
148157
throw new Error(`Error creating Ollama client: ${error.message}`)
149158
}
@@ -247,7 +256,7 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
247256
}
248257

249258
async fetchModel() {
250-
this.models = await getOllamaModels(this.options.ollamaBaseUrl)
259+
this.models = await getOllamaModels(this.options.ollamaBaseUrl, this.options.ollamaApiKey)
251260
return this.getModel()
252261
}
253262

src/api/providers/ollama.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ export class OllamaHandler extends BaseProvider implements SingleCompletionHandl
2727

2828
this.client = new OpenAI({
2929
baseURL: (this.options.ollamaBaseUrl || "http://localhost:11434") + "/v1",
30-
apiKey: "ollama",
30+
apiKey: this.options.ollamaApiKey || "ollama",
3131
timeout: getApiRequestTimeout(),
3232
})
3333
}

src/core/webview/webviewMessageHandler.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -638,6 +638,7 @@ export const webviewMessageHandler = async (
638638
const ollamaModels = await getModels({
639639
provider: "ollama",
640640
baseUrl: ollamaApiConfig.ollamaBaseUrl,
641+
apiKey: ollamaApiConfig.ollamaApiKey,
641642
})
642643

643644
if (Object.keys(ollamaModels).length > 0) {

src/shared/api.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,6 @@ export type GetModelsOptions =
138138
| { provider: "requesty"; apiKey?: string }
139139
| { provider: "unbound"; apiKey?: string }
140140
| { provider: "litellm"; apiKey: string; baseUrl: string }
141-
| { provider: "ollama"; baseUrl?: string }
141+
| { provider: "ollama"; baseUrl?: string; apiKey?: string }
142142
| { provider: "lmstudio"; baseUrl?: string }
143143
| { provider: "io-intelligence"; apiKey: string }

webview-ui/src/components/settings/providers/Ollama.tsx

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,17 @@ export const Ollama = ({ apiConfiguration, setApiConfigurationField }: OllamaPro
8686
className="w-full">
8787
<label className="block font-medium mb-1">{t("settings:providers.ollama.baseUrl")}</label>
8888
</VSCodeTextField>
89+
<VSCodeTextField
90+
value={apiConfiguration?.ollamaApiKey || ""}
91+
type="password"
92+
onInput={handleInputChange("ollamaApiKey")}
93+
placeholder={t("settings:placeholders.apiKey")}
94+
className="w-full">
95+
<label className="block font-medium mb-1">{t("settings:providers.ollama.apiKey")}</label>
96+
</VSCodeTextField>
97+
<div className="text-sm text-vscode-descriptionForeground -mt-2">
98+
{t("settings:providers.ollama.apiKeyDescription")}
99+
</div>
89100
<VSCodeTextField
90101
value={apiConfiguration?.ollamaModelId || ""}
91102
onInput={handleInputChange("ollamaModelId")}

webview-ui/src/i18n/locales/en/settings.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -364,6 +364,8 @@
364364
},
365365
"ollama": {
366366
"baseUrl": "Base URL (optional)",
367+
"apiKey": "Ollama API Key (optional)",
368+
"apiKeyDescription": "Optional API key for authenticated Ollama instances or cloud services. Leave empty for local installations.",
367369
"modelId": "Model ID",
368370
"description": "Ollama allows you to run models locally on your computer. For instructions on how to get started, see their quickstart guide.",
369371
"warning": "Note: Roo Code uses complex prompts and works best with Claude models. Less capable models may not work as expected."

0 commit comments

Comments
 (0)