Skip to content

Commit 58532bc

Browse files
authored
Merge pull request #1 from cannuri/update-from-upstream
Update from upstream
2 parents 9fcf69c + cff4577 commit 58532bc

File tree

23 files changed

+323
-52
lines changed

23 files changed

+323
-52
lines changed

.changeset/empty-bees-suffer.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Revert tool progress for now

.changeset/sixty-ants-begin.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
v3.8.4

CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
# Roo Code Changelog
22

3+
## [3.8.3] - 2025-03-09
4+
5+
- Fix VS Code LM API model picker truncation issue
6+
7+
## [3.8.2] - 2025-03-08
8+
9+
- Create an auto-approval toggle for subtask creation and completion (thanks @shaybc!)
10+
- Show a progress indicator when using the multi-diff editing strategy (thanks @qdaxb!)
11+
- Add o3-mini support to the OpenAI-compatible provider (thanks @yt3trees!)
12+
- Fix encoding issue where unreadable characters were sometimes getting added to the beginning of files
13+
- Fix issue where settings dropdowns were getting truncated in some cases
14+
315
## [3.8.1] - 2025-03-07
416

517
- Show the reserved output tokens in the context window visualization

jest.config.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,10 @@ module.exports = {
3030
"^strip-ansi$": "<rootDir>/src/__mocks__/strip-ansi.js",
3131
"^default-shell$": "<rootDir>/src/__mocks__/default-shell.js",
3232
"^os-name$": "<rootDir>/src/__mocks__/os-name.js",
33+
"^strip-bom$": "<rootDir>/src/__mocks__/strip-bom.js",
3334
},
3435
transformIgnorePatterns: [
35-
"node_modules/(?!(@modelcontextprotocol|delay|p-wait-for|globby|serialize-error|strip-ansi|default-shell|os-name)/)",
36+
"node_modules/(?!(@modelcontextprotocol|delay|p-wait-for|globby|serialize-error|strip-ansi|default-shell|os-name|strip-bom)/)",
3637
],
3738
roots: ["<rootDir>/src", "<rootDir>/webview-ui/src"],
3839
modulePathIgnorePatterns: [".vscode-test"],

package-lock.json

Lines changed: 19 additions & 7 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"displayName": "Roo Code (prev. Roo Cline)",
44
"description": "A whole dev team of AI agents in your editor.",
55
"publisher": "RooVeterinaryInc",
6-
"version": "3.8.1",
6+
"version": "3.8.3",
77
"icon": "assets/icons/rocket.png",
88
"galleryBanner": {
99
"color": "#617A91",
@@ -265,8 +265,8 @@
265265
"@anthropic-ai/sdk": "^0.37.0",
266266
"@anthropic-ai/vertex-sdk": "^0.7.0",
267267
"@aws-sdk/client-bedrock-runtime": "^3.706.0",
268-
"@google/generative-ai": "^0.18.0",
269268
"@google-cloud/vertexai": "^1.9.3",
269+
"@google/generative-ai": "^0.18.0",
270270
"@mistralai/mistralai": "^1.3.6",
271271
"@modelcontextprotocol/sdk": "^1.0.1",
272272
"@types/clone-deep": "^4.0.4",
@@ -304,6 +304,7 @@
304304
"sound-play": "^1.1.0",
305305
"string-similarity": "^4.0.4",
306306
"strip-ansi": "^7.1.0",
307+
"strip-bom": "^5.0.0",
307308
"tmp": "^0.2.3",
308309
"tree-sitter-wasms": "^0.1.11",
309310
"turndown": "^7.2.0",

src/__mocks__/strip-bom.js

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
// Mock implementation of strip-bom
2+
module.exports = function stripBom(string) {
3+
if (typeof string !== "string") {
4+
throw new TypeError("Expected a string")
5+
}
6+
7+
// Removes UTF-8 BOM
8+
if (string.charCodeAt(0) === 0xfeff) {
9+
return string.slice(1)
10+
}
11+
12+
return string
13+
}

src/api/providers/openai.ts

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,11 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
6666
const deepseekReasoner = modelId.includes("deepseek-reasoner")
6767
const ark = modelUrl.includes(".volces.com")
6868

69+
if (modelId.startsWith("o3-mini")) {
70+
yield* this.handleO3FamilyMessage(modelId, systemPrompt, messages)
71+
return
72+
}
73+
6974
if (this.options.openAiStreamingEnabled ?? true) {
7075
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
7176
role: "system",
@@ -169,6 +174,69 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
169174
throw error
170175
}
171176
}
177+
178+
private async *handleO3FamilyMessage(
179+
modelId: string,
180+
systemPrompt: string,
181+
messages: Anthropic.Messages.MessageParam[],
182+
): ApiStream {
183+
if (this.options.openAiStreamingEnabled ?? true) {
184+
const stream = await this.client.chat.completions.create({
185+
model: "o3-mini",
186+
messages: [
187+
{
188+
role: "developer",
189+
content: `Formatting re-enabled\n${systemPrompt}`,
190+
},
191+
...convertToOpenAiMessages(messages),
192+
],
193+
stream: true,
194+
stream_options: { include_usage: true },
195+
reasoning_effort: this.getModel().info.reasoningEffort,
196+
})
197+
198+
yield* this.handleStreamResponse(stream)
199+
} else {
200+
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
201+
model: modelId,
202+
messages: [
203+
{
204+
role: "developer",
205+
content: `Formatting re-enabled\n${systemPrompt}`,
206+
},
207+
...convertToOpenAiMessages(messages),
208+
],
209+
}
210+
211+
const response = await this.client.chat.completions.create(requestOptions)
212+
213+
yield {
214+
type: "text",
215+
text: response.choices[0]?.message.content || "",
216+
}
217+
yield this.processUsageMetrics(response.usage)
218+
}
219+
}
220+
221+
private async *handleStreamResponse(stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>): ApiStream {
222+
for await (const chunk of stream) {
223+
const delta = chunk.choices[0]?.delta
224+
if (delta?.content) {
225+
yield {
226+
type: "text",
227+
text: delta.content,
228+
}
229+
}
230+
231+
if (chunk.usage) {
232+
yield {
233+
type: "usage",
234+
inputTokens: chunk.usage.prompt_tokens || 0,
235+
outputTokens: chunk.usage.completion_tokens || 0,
236+
}
237+
}
238+
}
239+
}
172240
}
173241

174242
export async function getOpenAiModels(baseUrl?: string, apiKey?: string) {

src/core/Cline.ts

Lines changed: 21 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1417,6 +1417,18 @@ export class Cline {
14171417
return true
14181418
}
14191419

1420+
const askFinishSubTaskApproval = async () => {
1421+
// ask the user to approve this task has completed, and he has reviewd it, and we can declare task is finished
1422+
// and return control to the parent task to continue running the rest of the sub-tasks
1423+
const toolMessage = JSON.stringify({
1424+
tool: "finishTask",
1425+
content:
1426+
"Subtask completed! You can review the results and suggest any corrections or next steps. If everything looks good, confirm to return the result to the parent task.",
1427+
})
1428+
1429+
return await askApproval("tool", toolMessage)
1430+
}
1431+
14201432
const handleError = async (action: string, error: Error) => {
14211433
const errorString = `Error ${action}: ${JSON.stringify(serializeError(error))}`
14221434
await this.say(
@@ -2945,13 +2957,6 @@ export class Cline {
29452957
// havent sent a command message yet so first send completion_result then command
29462958
await this.say("completion_result", result, undefined, false)
29472959
telemetryService.captureTaskCompleted(this.taskId)
2948-
if (this.isSubTask) {
2949-
// tell the provider to remove the current subtask and resume the previous task in the stack
2950-
await this.providerRef
2951-
.deref()
2952-
?.finishSubTask(`Task complete: ${lastMessage?.text}`)
2953-
break
2954-
}
29552960
}
29562961

29572962
// complete command message
@@ -2970,13 +2975,17 @@ export class Cline {
29702975
} else {
29712976
await this.say("completion_result", result, undefined, false)
29722977
telemetryService.captureTaskCompleted(this.taskId)
2973-
if (this.isSubTask) {
2974-
// tell the provider to remove the current subtask and resume the previous task in the stack
2975-
await this.providerRef
2976-
.deref()
2977-
?.finishSubTask(`Task complete: ${lastMessage?.text}`)
2978+
}
2979+
2980+
if (this.isSubTask) {
2981+
const didApprove = await askFinishSubTaskApproval()
2982+
if (!didApprove) {
29782983
break
29792984
}
2985+
2986+
// tell the provider to remove the current subtask and resume the previous task in the stack
2987+
await this.providerRef.deref()?.finishSubTask(`Task complete: ${lastMessage?.text}`)
2988+
break
29802989
}
29812990

29822991
// we already sent completion_result says, an empty string asks relinquishes control over button and field

src/core/prompts/sections/modes.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,19 @@ export async function getModesSection(context: vscode.ExtensionContext): Promise
1111
// Get all modes with their overrides from extension state
1212
const allModes = await getAllModesWithPrompts(context)
1313

14-
return `====
14+
// Get enableCustomModeCreation setting from extension state
15+
const shouldEnableCustomModeCreation = await context.globalState.get<boolean>("enableCustomModeCreation") ?? true
16+
17+
let modesContent = `====
1518
1619
MODES
1720
1821
- These are the currently available modes:
19-
${allModes.map((mode: ModeConfig) => ` * "${mode.name}" mode (${mode.slug}) - ${mode.roleDefinition.split(".")[0]}`).join("\n")}
22+
${allModes.map((mode: ModeConfig) => ` * "${mode.name}" mode (${mode.slug}) - ${mode.roleDefinition.split(".")[0]}`).join("\n")}`
23+
24+
// Only include custom modes documentation if the feature is enabled
25+
if (shouldEnableCustomModeCreation) {
26+
modesContent += `
2027
2128
- Custom modes can be configured in two ways:
2229
1. Globally via '${customModesPath}' (created automatically on startup)
@@ -56,4 +63,7 @@ Both files should follow this structure:
5663
}
5764
]
5865
}`
66+
}
67+
68+
return modesContent
5969
}

0 commit comments

Comments
 (0)