Skip to content

Commit 180fbd5

Browse files
authored
feat: add LiteLLM API provider support (RooCodeInc#1618)
1 parent 42924c9 commit 180fbd5

File tree

5 files changed

+137
-1
lines changed

5 files changed

+137
-1
lines changed

src/api/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { ApiStream } from "./transform/stream"
1313
import { DeepSeekHandler } from "./providers/deepseek"
1414
import { MistralHandler } from "./providers/mistral"
1515
import { VsCodeLmHandler } from "./providers/vscode-lm"
16+
import { LiteLlmHandler } from "./providers/litellm"
1617

1718
export interface ApiHandler {
1819
createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
@@ -50,6 +51,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
5051
return new MistralHandler(options)
5152
case "vscode-lm":
5253
return new VsCodeLmHandler(options)
54+
case "litellm":
55+
return new LiteLlmHandler(options)
5356
default:
5457
return new AnthropicHandler(options)
5558
}

src/api/providers/litellm.ts

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import { Anthropic } from "@anthropic-ai/sdk"
2+
import OpenAI from "openai"
3+
import { ApiHandlerOptions, liteLlmDefaultModelId, liteLlmModelInfoSaneDefaults } from "../../shared/api"
4+
import { ApiHandler } from ".."
5+
import { ApiStream } from "../transform/stream"
6+
import { convertToOpenAiMessages } from "../transform/openai-format"
7+
8+
export class LiteLlmHandler implements ApiHandler {
9+
private options: ApiHandlerOptions
10+
private client: OpenAI
11+
12+
constructor(options: ApiHandlerOptions) {
13+
this.options = options
14+
this.client = new OpenAI({
15+
baseURL: this.options.liteLlmBaseUrl || "http://localhost:4000",
16+
apiKey: "not-needed",
17+
})
18+
}
19+
20+
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
21+
const formattedMessages = convertToOpenAiMessages(messages)
22+
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
23+
role: "system",
24+
content: systemPrompt,
25+
}
26+
27+
const stream = await this.client.chat.completions.create({
28+
model: this.options.liteLlmModelId || liteLlmDefaultModelId,
29+
messages: [systemMessage, ...formattedMessages],
30+
temperature: 0,
31+
stream: true,
32+
stream_options: { include_usage: true },
33+
})
34+
35+
for await (const chunk of stream) {
36+
const delta = chunk.choices[0]?.delta
37+
if (delta?.content) {
38+
yield {
39+
type: "text",
40+
text: delta.content,
41+
}
42+
}
43+
44+
if (chunk.usage) {
45+
yield {
46+
type: "usage",
47+
inputTokens: chunk.usage.prompt_tokens || 0,
48+
outputTokens: chunk.usage.completion_tokens || 0,
49+
}
50+
}
51+
}
52+
}
53+
54+
getModel() {
55+
return {
56+
id: this.options.liteLlmModelId || liteLlmDefaultModelId,
57+
info: liteLlmModelInfoSaneDefaults,
58+
}
59+
}
60+
}

src/core/webview/ClineProvider.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,8 @@ type GlobalStateKey =
7676
| "previousModeApiProvider"
7777
| "previousModeModelId"
7878
| "previousModeModelInfo"
79+
| "liteLlmBaseUrl"
80+
| "liteLlmModelId"
7981

8082
export const GlobalFileNames = {
8183
apiConversationHistory: "api_conversation_history.json",
@@ -443,6 +445,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
443445
openRouterModelId,
444446
openRouterModelInfo,
445447
vsCodeLmModelSelector,
448+
liteLlmBaseUrl,
449+
liteLlmModelId,
446450
} = message.apiConfiguration
447451
await this.updateGlobalState("apiProvider", apiProvider)
448452
await this.updateGlobalState("apiModelId", apiModelId)
@@ -471,6 +475,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
471475
await this.updateGlobalState("openRouterModelId", openRouterModelId)
472476
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
473477
await this.updateGlobalState("vsCodeLmModelSelector", vsCodeLmModelSelector)
478+
await this.updateGlobalState("liteLlmBaseUrl", liteLlmBaseUrl)
479+
await this.updateGlobalState("liteLlmModelId", liteLlmModelId)
474480
if (this.cline) {
475481
this.cline.api = buildApiHandler(message.apiConfiguration)
476482
}
@@ -535,6 +541,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
535541
case "lmstudio":
536542
await this.updateGlobalState("previousModeModelId", apiConfiguration.lmStudioModelId)
537543
break
544+
case "litellm":
545+
await this.updateGlobalState("previousModeModelId", apiConfiguration.liteLlmModelId)
546+
break
538547
}
539548

540549
// Restore the model used in previous mode
@@ -563,6 +572,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
563572
case "lmstudio":
564573
await this.updateGlobalState("lmStudioModelId", newModelId)
565574
break
575+
case "litellm":
576+
await this.updateGlobalState("liteLlmModelId", newModelId)
577+
break
566578
}
567579

568580
if (this.cline) {
@@ -1364,6 +1376,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
13641376
browserSettings,
13651377
chatSettings,
13661378
vsCodeLmModelSelector,
1379+
liteLlmBaseUrl,
1380+
liteLlmModelId,
13671381
userInfo,
13681382
authToken,
13691383
previousModeApiProvider,
@@ -1403,6 +1417,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
14031417
this.getGlobalState("browserSettings") as Promise<BrowserSettings | undefined>,
14041418
this.getGlobalState("chatSettings") as Promise<ChatSettings | undefined>,
14051419
this.getGlobalState("vsCodeLmModelSelector") as Promise<vscode.LanguageModelChatSelector | undefined>,
1420+
this.getGlobalState("liteLlmBaseUrl") as Promise<string | undefined>,
1421+
this.getGlobalState("liteLlmModelId") as Promise<string | undefined>,
14061422
this.getGlobalState("userInfo") as Promise<UserInfo | undefined>,
14071423
this.getSecret("authToken") as Promise<string | undefined>,
14081424
this.getGlobalState("previousModeApiProvider") as Promise<ApiProvider | undefined>,
@@ -1453,6 +1469,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
14531469
openRouterModelId,
14541470
openRouterModelInfo,
14551471
vsCodeLmModelSelector,
1472+
liteLlmBaseUrl,
1473+
liteLlmModelId,
14561474
},
14571475
lastShownAnnouncementId,
14581476
customInstructions,

src/shared/api.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,13 @@ export type ApiProvider =
1111
| "deepseek"
1212
| "mistral"
1313
| "vscode-lm"
14+
| "litellm"
1415

1516
export interface ApiHandlerOptions {
1617
apiModelId?: string
1718
apiKey?: string // anthropic
19+
liteLlmBaseUrl?: string
20+
liteLlmModelId?: string
1821
anthropicBaseUrl?: string
1922
openRouterApiKey?: string
2023
openRouterModelId?: string
@@ -419,3 +422,16 @@ export const mistralModels = {
419422
outputPrice: 0.9,
420423
},
421424
} as const satisfies Record<string, ModelInfo>
425+
426+
// LiteLLM
427+
// https://docs.litellm.ai/docs/
428+
export type LiteLLMModelId = string
429+
export const liteLlmDefaultModelId = "gpt-3.5-turbo"
430+
export const liteLlmModelInfoSaneDefaults: ModelInfo = {
431+
maxTokens: 4096,
432+
contextWindow: 8192,
433+
supportsImages: false,
434+
supportsPromptCache: false,
435+
inputPrice: 0,
436+
outputPrice: 0,
437+
}

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 40 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
133133
VSCodeDropdown has an open bug where dynamically rendered options don't auto select the provided value prop. You can see this for yourself by comparing it with normal select/option elements, which work as expected.
134134
https://github.com/microsoft/vscode-webview-ui-toolkit/issues/433
135135
136-
In our case, when the user switches between providers, we recalculate the selectedModelId depending on the provider, the default model for that provider, and a modelId that the user may have selected. Unfortunately, the VSCodeDropdown component wouldn't select this calculated value, and would default to the first "Select a model..." option instead, which makes it seem like the model was cleared out when it wasn't.
136+
In our case, when the user switches between providers, we recalculate the selectedModelId depending on the provider, the default model for that provider, and a modelId that the user may have selected. Unfortunately, the VSCodeDropdown component wouldn't select this calculated value, and would default to the first "Select a model..." option instead, which makes it seem like the model was cleared out when it wasn't.
137137
138138
As a workaround, we create separate instances of the dropdown for each provider, and then conditionally render the one that matches the current provider.
139139
*/
@@ -187,6 +187,7 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
187187
<VSCodeOption value="vscode-lm">VS Code LM API</VSCodeOption>
188188
<VSCodeOption value="lmstudio">LM Studio</VSCodeOption>
189189
<VSCodeOption value="ollama">Ollama</VSCodeOption>
190+
<VSCodeOption value="litellm">LiteLLM</VSCodeOption>
190191
</VSCodeDropdown>
191192
</DropdownContainer>
192193

@@ -739,6 +740,38 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
739740
</div>
740741
)}
741742

743+
{selectedProvider === "litellm" && (
744+
<div>
745+
<VSCodeTextField
746+
value={apiConfiguration?.liteLlmBaseUrl || ""}
747+
style={{ width: "100%" }}
748+
type="url"
749+
onInput={handleInputChange("liteLlmBaseUrl")}
750+
placeholder={"Default: http://localhost:4000"}>
751+
<span style={{ fontWeight: 500 }}>Base URL (optional)</span>
752+
</VSCodeTextField>
753+
<VSCodeTextField
754+
value={apiConfiguration?.liteLlmModelId || ""}
755+
style={{ width: "100%" }}
756+
onInput={handleInputChange("liteLlmModelId")}
757+
placeholder={"e.g. gpt-4"}>
758+
<span style={{ fontWeight: 500 }}>Model ID</span>
759+
</VSCodeTextField>
760+
<p
761+
style={{
762+
fontSize: "12px",
763+
marginTop: "5px",
764+
color: "var(--vscode-descriptionForeground)",
765+
}}>
766+
LiteLLM provides a unified interface to access various LLM providers' models. See their{" "}
767+
<VSCodeLink href="https://docs.litellm.ai/docs/" style={{ display: "inline", fontSize: "inherit" }}>
768+
quickstart guide
769+
</VSCodeLink>{" "}
770+
for more information.
771+
</p>
772+
</div>
773+
)}
774+
742775
{selectedProvider === "ollama" && (
743776
<div>
744777
<VSCodeTextField
@@ -1072,6 +1105,12 @@ export function normalizeApiConfiguration(apiConfiguration?: ApiConfiguration):
10721105
supportsImages: false, // VSCode LM API currently doesn't support images
10731106
},
10741107
}
1108+
case "litellm":
1109+
return {
1110+
selectedProvider: provider,
1111+
selectedModelId: apiConfiguration?.liteLlmModelId || "",
1112+
selectedModelInfo: openAiModelInfoSaneDefaults,
1113+
}
10751114
default:
10761115
return getProviderData(anthropicModels, anthropicDefaultModelId)
10771116
}

0 commit comments

Comments
 (0)