Skip to content

Commit 51a59a1

Browse files
Merge pull request #14 from RooVetGit/main
updating from main.
2 parents 5b070bb + bc3af52 commit 51a59a1

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

55 files changed

+1982
-451
lines changed

.changeset/clever-news-arrive.md

Lines changed: 0 additions & 5 deletions
This file was deleted.

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Roo Code Changelog
22

3+
## [3.3.20]
4+
5+
- Support project-specific custom modes in a .roomodes file
6+
- Add more Mistral models (thanks @d-oit and @bramburn!)
7+
- By popular request, make it so Ask mode can't write to Markdown files and is purely for chatting with
8+
- Add a setting to control the number of open editor tabs to tell the model about (665 is probably too many!)
9+
- Fix race condition bug with entering API key on the welcome screen
10+
311
## [3.3.19]
412

513
- Fix a bug where aborting in the middle of file writes would not revert the write

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@
1515
<a href="https://marketplace.visualstudio.com/items?itemName=RooVeterinaryInc.roo-cline" target="_blank"><img src="https://img.shields.io/badge/Download%20on%20VS%20Marketplace-blue?style=for-the-badge&logo=visualstudiocode&logoColor=white" alt="Download on VS Marketplace"></a>
1616
<a href="https://github.com/RooVetGit/Roo-Code/discussions/categories/feature-requests?discussions_q=is%3Aopen+category%3A%22Feature+Requests%22+sort%3Atop" target="_blank"><img src="https://img.shields.io/badge/Feature%20Requests-yellow?style=for-the-badge" alt="Feature Requests"></a>
1717
<a href="https://marketplace.visualstudio.com/items?itemName=RooVeterinaryInc.roo-cline&ssr=false#review-details" target="_blank"><img src="https://img.shields.io/badge/Rate%20%26%20Review-green?style=for-the-badge" alt="Rate & Review"></a>
18+
<a href="https://docs.roocode.com" target="_blank"><img src="https://img.shields.io/badge/Documentation-6B46C1?style=for-the-badge&logo=readthedocs&logoColor=white" alt="Documentation"></a>
19+
1820
</div>
1921

2022
**Roo Code** is an AI-powered **autonomous coding agent** that lives in your editor. It can:

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
{
22
"name": "roo-cline",
33
"displayName": "Roo Code (prev. Roo Cline)",
4-
"description": "A VS Code plugin that enhances coding with AI-powered automation, multi-model support, and experimental features.",
4+
"description": "An AI-powered autonomous coding agent that lives in your editor.",
55
"publisher": "RooVeterinaryInc",
6-
"version": "3.3.19",
6+
"version": "3.3.20",
77
"icon": "assets/icons/rocket.png",
88
"galleryBanner": {
99
"color": "#617A91",

src/__mocks__/get-folder-size.js

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,10 @@ module.exports = async function getFolderSize() {
44
errors: [],
55
}
66
}
7+
8+
module.exports.loose = async function getFolderSizeLoose() {
9+
return {
10+
size: 1000,
11+
errors: [],
12+
}
13+
}
Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,126 @@
1+
import { MistralHandler } from "../mistral"
2+
import { ApiHandlerOptions, mistralDefaultModelId } from "../../../shared/api"
3+
import { Anthropic } from "@anthropic-ai/sdk"
4+
import { ApiStreamTextChunk } from "../../transform/stream"
5+
6+
// Mock Mistral client
7+
const mockCreate = jest.fn()
8+
jest.mock("@mistralai/mistralai", () => {
9+
return {
10+
Mistral: jest.fn().mockImplementation(() => ({
11+
chat: {
12+
stream: mockCreate.mockImplementation(async (options) => {
13+
const stream = {
14+
[Symbol.asyncIterator]: async function* () {
15+
yield {
16+
data: {
17+
choices: [
18+
{
19+
delta: { content: "Test response" },
20+
index: 0,
21+
},
22+
],
23+
},
24+
}
25+
},
26+
}
27+
return stream
28+
}),
29+
},
30+
})),
31+
}
32+
})
33+
34+
describe("MistralHandler", () => {
35+
let handler: MistralHandler
36+
let mockOptions: ApiHandlerOptions
37+
38+
beforeEach(() => {
39+
mockOptions = {
40+
apiModelId: "codestral-latest", // Update to match the actual model ID
41+
mistralApiKey: "test-api-key",
42+
includeMaxTokens: true,
43+
modelTemperature: 0,
44+
}
45+
handler = new MistralHandler(mockOptions)
46+
mockCreate.mockClear()
47+
})
48+
49+
describe("constructor", () => {
50+
it("should initialize with provided options", () => {
51+
expect(handler).toBeInstanceOf(MistralHandler)
52+
expect(handler.getModel().id).toBe(mockOptions.apiModelId)
53+
})
54+
55+
it("should throw error if API key is missing", () => {
56+
expect(() => {
57+
new MistralHandler({
58+
...mockOptions,
59+
mistralApiKey: undefined,
60+
})
61+
}).toThrow("Mistral API key is required")
62+
})
63+
64+
it("should use custom base URL if provided", () => {
65+
const customBaseUrl = "https://custom.mistral.ai/v1"
66+
const handlerWithCustomUrl = new MistralHandler({
67+
...mockOptions,
68+
mistralCodestralUrl: customBaseUrl,
69+
})
70+
expect(handlerWithCustomUrl).toBeInstanceOf(MistralHandler)
71+
})
72+
})
73+
74+
describe("getModel", () => {
75+
it("should return correct model info", () => {
76+
const model = handler.getModel()
77+
expect(model.id).toBe(mockOptions.apiModelId)
78+
expect(model.info).toBeDefined()
79+
expect(model.info.supportsPromptCache).toBe(false)
80+
})
81+
})
82+
83+
describe("createMessage", () => {
84+
const systemPrompt = "You are a helpful assistant."
85+
const messages: Anthropic.Messages.MessageParam[] = [
86+
{
87+
role: "user",
88+
content: [{ type: "text", text: "Hello!" }],
89+
},
90+
]
91+
92+
it("should create message successfully", async () => {
93+
const iterator = handler.createMessage(systemPrompt, messages)
94+
const result = await iterator.next()
95+
96+
expect(mockCreate).toHaveBeenCalledWith({
97+
model: mockOptions.apiModelId,
98+
messages: expect.any(Array),
99+
maxTokens: expect.any(Number),
100+
temperature: 0,
101+
})
102+
103+
expect(result.value).toBeDefined()
104+
expect(result.done).toBe(false)
105+
})
106+
107+
it("should handle streaming response correctly", async () => {
108+
const iterator = handler.createMessage(systemPrompt, messages)
109+
const results: ApiStreamTextChunk[] = []
110+
111+
for await (const chunk of iterator) {
112+
if ("text" in chunk) {
113+
results.push(chunk as ApiStreamTextChunk)
114+
}
115+
}
116+
117+
expect(results.length).toBeGreaterThan(0)
118+
expect(results[0].text).toBe("Test response")
119+
})
120+
121+
it("should handle errors gracefully", async () => {
122+
mockCreate.mockRejectedValueOnce(new Error("API Error"))
123+
await expect(handler.createMessage(systemPrompt, messages).next()).rejects.toThrow("API Error")
124+
})
125+
})
126+
})

src/api/providers/__tests__/openai-native.test.ts

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -130,11 +130,20 @@ describe("OpenAiNativeHandler", () => {
130130
})
131131

132132
mockCreate.mockResolvedValueOnce({
133-
choices: [{ message: { content: null } }],
134-
usage: {
135-
prompt_tokens: 0,
136-
completion_tokens: 0,
137-
total_tokens: 0,
133+
[Symbol.asyncIterator]: async function* () {
134+
yield {
135+
choices: [
136+
{
137+
delta: { content: null },
138+
index: 0,
139+
},
140+
],
141+
usage: {
142+
prompt_tokens: 0,
143+
completion_tokens: 0,
144+
total_tokens: 0,
145+
},
146+
}
138147
},
139148
})
140149

@@ -144,10 +153,7 @@ describe("OpenAiNativeHandler", () => {
144153
results.push(result)
145154
}
146155

147-
expect(results).toEqual([
148-
{ type: "text", text: "" },
149-
{ type: "usage", inputTokens: 0, outputTokens: 0 },
150-
])
156+
expect(results).toEqual([{ type: "usage", inputTokens: 0, outputTokens: 0 }])
151157

152158
// Verify developer role is used for system prompt with o1 model
153159
expect(mockCreate).toHaveBeenCalledWith({
@@ -156,6 +162,8 @@ describe("OpenAiNativeHandler", () => {
156162
{ role: "developer", content: "Formatting re-enabled\n" + systemPrompt },
157163
{ role: "user", content: "Hello!" },
158164
],
165+
stream: true,
166+
stream_options: { include_usage: true },
159167
})
160168
})
161169

src/api/providers/mistral.ts

Lines changed: 48 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,23 +21,42 @@ export class MistralHandler implements ApiHandler {
2121
private client: Mistral
2222

2323
constructor(options: ApiHandlerOptions) {
24-
this.options = options
24+
if (!options.mistralApiKey) {
25+
throw new Error("Mistral API key is required")
26+
}
27+
28+
// Set default model ID if not provided
29+
this.options = {
30+
...options,
31+
apiModelId: options.apiModelId || mistralDefaultModelId,
32+
}
33+
34+
const baseUrl = this.getBaseUrl()
35+
console.debug(`[Roo Code] MistralHandler using baseUrl: ${baseUrl}`)
2536
this.client = new Mistral({
26-
serverURL: "https://codestral.mistral.ai",
37+
serverURL: baseUrl,
2738
apiKey: this.options.mistralApiKey,
2839
})
2940
}
3041

42+
private getBaseUrl(): string {
43+
const modelId = this.options.apiModelId ?? mistralDefaultModelId
44+
console.debug(`[Roo Code] MistralHandler using modelId: ${modelId}`)
45+
if (modelId?.startsWith("codestral-")) {
46+
return this.options.mistralCodestralUrl || "https://codestral.mistral.ai"
47+
}
48+
return "https://api.mistral.ai"
49+
}
50+
3151
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
32-
const stream = await this.client.chat.stream({
33-
model: this.getModel().id,
34-
// max_completion_tokens: this.getModel().info.maxTokens,
35-
temperature: this.options.modelTemperature ?? MISTRAL_DEFAULT_TEMPERATURE,
52+
const response = await this.client.chat.stream({
53+
model: this.options.apiModelId || mistralDefaultModelId,
3654
messages: [{ role: "system", content: systemPrompt }, ...convertToMistralMessages(messages)],
37-
stream: true,
55+
maxTokens: this.options.includeMaxTokens ? this.getModel().info.maxTokens : undefined,
56+
temperature: this.options.modelTemperature ?? MISTRAL_DEFAULT_TEMPERATURE,
3857
})
3958

40-
for await (const chunk of stream) {
59+
for await (const chunk of response) {
4160
const delta = chunk.data.choices[0]?.delta
4261
if (delta?.content) {
4362
let content: string = ""
@@ -73,4 +92,25 @@ export class MistralHandler implements ApiHandler {
7392
info: mistralModels[mistralDefaultModelId],
7493
}
7594
}
95+
96+
async completePrompt(prompt: string): Promise<string> {
97+
try {
98+
const response = await this.client.chat.complete({
99+
model: this.options.apiModelId || mistralDefaultModelId,
100+
messages: [{ role: "user", content: prompt }],
101+
temperature: this.options.modelTemperature ?? MISTRAL_DEFAULT_TEMPERATURE,
102+
})
103+
104+
const content = response.choices?.[0]?.message.content
105+
if (Array.isArray(content)) {
106+
return content.map((c) => (c.type === "text" ? c.text : "")).join("")
107+
}
108+
return content || ""
109+
} catch (error) {
110+
if (error instanceof Error) {
111+
throw new Error(`Mistral completion error: ${error.message}`)
112+
}
113+
throw error
114+
}
115+
}
76116
}

src/api/providers/openai-native.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,9 +56,11 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
5656
},
5757
...convertToOpenAiMessages(messages),
5858
],
59+
stream: true,
60+
stream_options: { include_usage: true },
5961
})
6062

61-
yield* this.yieldResponseData(response)
63+
yield* this.handleStreamResponse(response)
6264
}
6365

6466
private async *handleO3FamilyMessage(

0 commit comments

Comments
 (0)