Skip to content

Commit 94cd26c

Browse files
authored
Merge pull request #9 from RooVetGit/merge/latestCline
2 parents 40abb7c + fb22258 commit 94cd26c

File tree

17 files changed

+226
-12
lines changed

17 files changed

+226
-12
lines changed

.github/ISSUE_TEMPLATE/bug_report.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ body:
1515
- AWS Bedrock
1616
- OpenAI
1717
- OpenAI Compatible
18+
- LM Studio
1819
- Ollama
1920
validations:
2021
required: true

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Change Log
22

3+
## [2.1.6]
4+
5+
- Add LM Studio as an API provider option (make sure to start the LM Studio server to use it with the extension!)
6+
7+
## [2.1.5]
8+
9+
- Add support for prompt caching for new Claude model IDs on OpenRouter (e.g. `anthropic/claude-3.5-sonnet-20240620`)
10+
311
## [2.1.4]
412

513
- AWS Bedrock fixes (add missing regions, support for cross-region inference, and older Sonnet model for regions where new model is not available)

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ Thanks to [Claude 3.5 Sonnet's agentic coding capabilities](https://www-cdn.ant
7878

7979
### Use any API and Model
8080

81-
Cline supports API providers like OpenRouter, Anthropic, OpenAI, Google Gemini, AWS Bedrock, Azure, and GCP Vertex. You can also configure any OpenAI compatible API, or use a local model through Ollama. If you're using OpenRouter, the extension fetches their latest model list, allowing you to use the newest models as soon as they're available.
81+
Cline supports API providers like OpenRouter, Anthropic, OpenAI, Google Gemini, AWS Bedrock, Azure, and GCP Vertex. You can also configure any OpenAI compatible API, or use a local model through LM Studio/Ollama. If you're using OpenRouter, the extension fetches their latest model list, allowing you to use the newest models as soon as they're available.
8282

8383
The extension also keeps track of total tokens and API usage cost for the entire task loop and individual requests, keeping you informed of spend every step of the way.
8484

@@ -138,7 +138,7 @@ To contribute to the project, start by exploring [open issues](https://github.co
138138
<details>
139139
<summary>Local Development Instructions</summary>
140140

141-
1. Clone the repository:
141+
1. Clone the repository _(Requires [git-lfs](https://git-lfs.com/))_:
142142
```bash
143143
git clone https://github.com/cline/cline.git
144144
```

bin/roo-cline-2.0.1.vsix

23.8 MB
Binary file not shown.

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "roo-cline",
33
"displayName": "Roo Cline",
44
"description": "Autonomous coding agent right in your IDE, capable of creating/editing files, running commands, using the browser, and more with your permission every step of the way.",
5-
"version": "2.0.0",
5+
"version": "2.0.1",
66
"icon": "assets/icons/icon.png",
77
"galleryBanner": {
88
"color": "#617A91",

src/api/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { OpenRouterHandler } from "./providers/openrouter"
66
import { VertexHandler } from "./providers/vertex"
77
import { OpenAiHandler } from "./providers/openai"
88
import { OllamaHandler } from "./providers/ollama"
9+
import { LmStudioHandler } from "./providers/lmstudio"
910
import { GeminiHandler } from "./providers/gemini"
1011
import { OpenAiNativeHandler } from "./providers/openai-native"
1112
import { ApiStream } from "./transform/stream"
@@ -30,6 +31,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
3031
return new OpenAiHandler(options)
3132
case "ollama":
3233
return new OllamaHandler(options)
34+
case "lmstudio":
35+
return new LmStudioHandler(options)
3336
case "gemini":
3437
return new GeminiHandler(options)
3538
case "openai-native":

src/api/providers/lmstudio.ts

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
import { Anthropic } from "@anthropic-ai/sdk"
2+
import OpenAI from "openai"
3+
import { ApiHandler } from "../"
4+
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
5+
import { convertToOpenAiMessages } from "../transform/openai-format"
6+
import { ApiStream } from "../transform/stream"
7+
8+
export class LmStudioHandler implements ApiHandler {
9+
private options: ApiHandlerOptions
10+
private client: OpenAI
11+
12+
constructor(options: ApiHandlerOptions) {
13+
this.options = options
14+
this.client = new OpenAI({
15+
baseURL: (this.options.lmStudioBaseUrl || "http://localhost:1234") + "/v1",
16+
apiKey: "noop",
17+
})
18+
}
19+
20+
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
21+
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
22+
{ role: "system", content: systemPrompt },
23+
...convertToOpenAiMessages(messages),
24+
]
25+
26+
try {
27+
const stream = await this.client.chat.completions.create({
28+
model: this.getModel().id,
29+
messages: openAiMessages,
30+
temperature: 0,
31+
stream: true,
32+
})
33+
for await (const chunk of stream) {
34+
const delta = chunk.choices[0]?.delta
35+
if (delta?.content) {
36+
yield {
37+
type: "text",
38+
text: delta.content,
39+
}
40+
}
41+
}
42+
} catch (error) {
43+
// LM Studio doesn't return an error code/body for now
44+
throw new Error(
45+
"Please check the LM Studio developer logs to debug what went wrong. You may need to load the model with a larger context length to work with Cline's prompts."
46+
)
47+
}
48+
}
49+
50+
getModel(): { id: string; info: ModelInfo } {
51+
return {
52+
id: this.options.lmStudioModelId || "",
53+
info: openAiModelInfoSaneDefaults,
54+
}
55+
}
56+
}

src/api/providers/openrouter.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,19 @@ export class OpenRouterHandler implements ApiHandler {
3131
]
3232

3333
// prompt caching: https://openrouter.ai/docs/prompt-caching
34+
// this is specifically for claude models (some models may 'support prompt caching' automatically without this)
3435
switch (this.getModel().id) {
36+
case "anthropic/claude-3.5-sonnet":
3537
case "anthropic/claude-3.5-sonnet:beta":
38+
case "anthropic/claude-3.5-sonnet-20240620":
39+
case "anthropic/claude-3.5-sonnet-20240620:beta":
40+
case "anthropic/claude-3-5-haiku":
41+
case "anthropic/claude-3-5-haiku:beta":
42+
case "anthropic/claude-3-5-haiku-20241022":
43+
case "anthropic/claude-3-5-haiku-20241022:beta":
44+
case "anthropic/claude-3-haiku":
3645
case "anthropic/claude-3-haiku:beta":
46+
case "anthropic/claude-3-opus":
3747
case "anthropic/claude-3-opus:beta":
3848
openAiMessages[0] = {
3949
role: "system",
@@ -76,6 +86,12 @@ export class OpenRouterHandler implements ApiHandler {
7686
switch (this.getModel().id) {
7787
case "anthropic/claude-3.5-sonnet":
7888
case "anthropic/claude-3.5-sonnet:beta":
89+
case "anthropic/claude-3.5-sonnet-20240620":
90+
case "anthropic/claude-3.5-sonnet-20240620:beta":
91+
case "anthropic/claude-3-5-haiku":
92+
case "anthropic/claude-3-5-haiku:beta":
93+
case "anthropic/claude-3-5-haiku-20241022":
94+
case "anthropic/claude-3-5-haiku-20241022:beta":
7995
maxTokens = 8_192
8096
break
8197
}

src/core/webview/ClineProvider.ts

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,8 @@ type GlobalStateKey =
5353
| "openAiModelId"
5454
| "ollamaModelId"
5555
| "ollamaBaseUrl"
56+
| "lmStudioModelId"
57+
| "lmStudioBaseUrl"
5658
| "anthropicBaseUrl"
5759
| "azureApiVersion"
5860
| "openRouterModelId"
@@ -363,6 +365,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
363365
openAiModelId,
364366
ollamaModelId,
365367
ollamaBaseUrl,
368+
lmStudioModelId,
369+
lmStudioBaseUrl,
366370
anthropicBaseUrl,
367371
geminiApiKey,
368372
openAiNativeApiKey,
@@ -386,6 +390,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
386390
await this.updateGlobalState("openAiModelId", openAiModelId)
387391
await this.updateGlobalState("ollamaModelId", ollamaModelId)
388392
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
393+
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
394+
await this.updateGlobalState("lmStudioBaseUrl", lmStudioBaseUrl)
389395
await this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl)
390396
await this.storeSecret("geminiApiKey", geminiApiKey)
391397
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
@@ -460,6 +466,10 @@ export class ClineProvider implements vscode.WebviewViewProvider {
460466
const ollamaModels = await this.getOllamaModels(message.text)
461467
this.postMessageToWebview({ type: "ollamaModels", ollamaModels })
462468
break
469+
case "requestLmStudioModels":
470+
const lmStudioModels = await this.getLmStudioModels(message.text)
471+
this.postMessageToWebview({ type: "lmStudioModels", lmStudioModels })
472+
break
463473
case "refreshOpenRouterModels":
464474
await this.refreshOpenRouterModels()
465475
break
@@ -527,6 +537,25 @@ export class ClineProvider implements vscode.WebviewViewProvider {
527537
}
528538
}
529539

540+
// LM Studio
541+
542+
async getLmStudioModels(baseUrl?: string) {
543+
try {
544+
if (!baseUrl) {
545+
baseUrl = "http://localhost:1234"
546+
}
547+
if (!URL.canParse(baseUrl)) {
548+
return []
549+
}
550+
const response = await axios.get(`${baseUrl}/v1/models`)
551+
const modelsArray = response.data?.data?.map((model: any) => model.id) || []
552+
const models = [...new Set<string>(modelsArray)]
553+
return models
554+
} catch (error) {
555+
return []
556+
}
557+
}
558+
530559
// OpenRouter
531560

532561
async handleOpenRouterCallback(code: string) {
@@ -855,6 +884,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
855884
openAiModelId,
856885
ollamaModelId,
857886
ollamaBaseUrl,
887+
lmStudioModelId,
888+
lmStudioBaseUrl,
858889
anthropicBaseUrl,
859890
geminiApiKey,
860891
openAiNativeApiKey,
@@ -884,6 +915,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
884915
this.getGlobalState("openAiModelId") as Promise<string | undefined>,
885916
this.getGlobalState("ollamaModelId") as Promise<string | undefined>,
886917
this.getGlobalState("ollamaBaseUrl") as Promise<string | undefined>,
918+
this.getGlobalState("lmStudioModelId") as Promise<string | undefined>,
919+
this.getGlobalState("lmStudioBaseUrl") as Promise<string | undefined>,
887920
this.getGlobalState("anthropicBaseUrl") as Promise<string | undefined>,
888921
this.getSecret("geminiApiKey") as Promise<string | undefined>,
889922
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
@@ -930,6 +963,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
930963
openAiModelId,
931964
ollamaModelId,
932965
ollamaBaseUrl,
966+
lmStudioModelId,
967+
lmStudioBaseUrl,
933968
anthropicBaseUrl,
934969
geminiApiKey,
935970
openAiNativeApiKey,

0 commit comments

Comments
 (0)