Skip to content

Commit 9d62a7b

Browse files
committed
feat(vscode-lm): implement VS Code Language Models provider
1 parent 5e099e2 commit 9d62a7b

File tree

14 files changed

+2473
-153
lines changed

14 files changed

+2473
-153
lines changed

docs/vscode_lm_api_docs.md

Lines changed: 1319 additions & 0 deletions
Large diffs are not rendered by default.

package.json

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,10 @@
4242
"ai",
4343
"llama"
4444
],
45-
"activationEvents": [],
45+
"activationEvents": [
46+
"onLanguage",
47+
"onStartupFinished"
48+
],
4649
"main": "./dist/extension.js",
4750
"contributes": {
4851
"viewsContainers": {
@@ -141,6 +144,20 @@
141144
"git show"
142145
],
143146
"description": "Commands that can be auto-executed when 'Always approve execute operations' is enabled"
147+
},
148+
"roo-cline.vsCodeLmModelSelector": {
149+
"type": "object",
150+
"properties": {
151+
"vendor": {
152+
"type": "string",
153+
"description": "The vendor of the language model (e.g. copilot)"
154+
},
155+
"family": {
156+
"type": "string",
157+
"description": "The family of the language model (e.g. gpt-4)"
158+
}
159+
},
160+
"description": "Settings for VSCode Language Model API"
144161
}
145162
}
146163
}

src/api/index.ts

Lines changed: 32 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -10,41 +10,45 @@ import { LmStudioHandler } from "./providers/lmstudio"
1010
import { GeminiHandler } from "./providers/gemini"
1111
import { OpenAiNativeHandler } from "./providers/openai-native"
1212
import { DeepSeekHandler } from "./providers/deepseek"
13+
import { VsCodeLmHandler } from "./providers/vscode-lm"
1314
import { ApiStream } from "./transform/stream"
1415

1516
export interface SingleCompletionHandler {
16-
completePrompt(prompt: string): Promise<string>
17+
completePrompt(prompt: string): Promise<string>
1718
}
1819

1920
export interface ApiHandler {
20-
createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
21-
getModel(): { id: string; info: ModelInfo }
21+
createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
22+
getModel(): { id: string; info: ModelInfo }
2223
}
2324

2425
export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
25-
const { apiProvider, ...options } = configuration
26-
switch (apiProvider) {
27-
case "anthropic":
28-
return new AnthropicHandler(options)
29-
case "openrouter":
30-
return new OpenRouterHandler(options)
31-
case "bedrock":
32-
return new AwsBedrockHandler(options)
33-
case "vertex":
34-
return new VertexHandler(options)
35-
case "openai":
36-
return new OpenAiHandler(options)
37-
case "ollama":
38-
return new OllamaHandler(options)
39-
case "lmstudio":
40-
return new LmStudioHandler(options)
41-
case "gemini":
42-
return new GeminiHandler(options)
43-
case "openai-native":
44-
return new OpenAiNativeHandler(options)
45-
case "deepseek":
46-
return new DeepSeekHandler(options)
47-
default:
48-
return new AnthropicHandler(options)
49-
}
26+
const { apiProvider, ...options } = configuration
27+
28+
switch (apiProvider) {
29+
case "anthropic":
30+
return new AnthropicHandler(options)
31+
case "openrouter":
32+
return new OpenRouterHandler(options)
33+
case "bedrock":
34+
return new AwsBedrockHandler(options)
35+
case "vertex":
36+
return new VertexHandler(options)
37+
case "openai":
38+
return new OpenAiHandler(options)
39+
case "ollama":
40+
return new OllamaHandler(options)
41+
case "lmstudio":
42+
return new LmStudioHandler(options)
43+
case "gemini":
44+
return new GeminiHandler(options)
45+
case "openai-native":
46+
return new OpenAiNativeHandler(options)
47+
case "deepseek":
48+
return new DeepSeekHandler(options)
49+
case "vscode-lm":
50+
return new VsCodeLmHandler(options)
51+
default:
52+
return new AnthropicHandler(options)
53+
}
5054
}

0 commit comments

Comments
 (0)