Skip to content

Commit b0b3a9e

Browse files
committed
Merge remote-tracking branch 'origin/main' into rus
2 parents a1609c7 + b75379b commit b0b3a9e

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+561
-197
lines changed

CHANGELOG.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Roo Code Changelog
22

3+
## [3.14.1] - 2025-04-24
4+
5+
- Disable Gemini caching while we investigate issues reported by the community.
6+
37
## [3.14.0] - 2025-04-23
48

59
- Add prompt caching for `gemini-2.5-pro-preview-03-25` in the Gemini provider (Vertex and OpenRouter coming soon!)
@@ -8,7 +12,7 @@
812
- Make the list_files tool more efficient and smarter about excluding directories like .git/
913
- Fix file drag and drop on Windows and when using SSH tunnels (thanks @NyxJae!)
1014
- Correctly revert changes and suggest alternative tools when write_to_file fails on a missing line count
11-
- Allow interpolation of `workspace`, `mode`, `language`, `shell`, and `operatingSystem` into custom system prompt overrides (thanks @daniel-lxs!)
15+
- Allow interpolation of `workspace`, `mode`, `language`, `shell`, and `operatingSystem` into custom system prompt overrides (thanks @daniel-lxs!)
1216
- Fix interpolation bug in the “add to context” code action (thanks @elianiva!)
1317
- Preserve editor state and prevent tab unpinning during diffs (thanks @seedlord!)
1418
- Improvements to icon rendering on Linux (thanks @elianiva!)

package-lock.json

Lines changed: 99 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"displayName": "%extension.displayName%",
44
"description": "%extension.description%",
55
"publisher": "RooVeterinaryInc",
6-
"version": "3.14.0",
6+
"version": "3.14.1",
77
"icon": "assets/icons/icon.png",
88
"galleryBanner": {
99
"color": "#617A91",
@@ -485,6 +485,7 @@
485485
"knip": "^5.44.4",
486486
"lint-staged": "^15.2.11",
487487
"mkdirp": "^3.0.1",
488+
"nock": "^14.0.4",
488489
"npm-run-all": "^4.1.5",
489490
"prettier": "^3.4.2",
490491
"rimraf": "^6.0.1",

src/api/providers/__tests__/openai.test.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import { OpenAiHandler } from "../openai"
22
import { ApiHandlerOptions } from "../../../shared/api"
33
import { Anthropic } from "@anthropic-ai/sdk"
4-
import { DEEP_SEEK_DEFAULT_TEMPERATURE } from "../constants"
54

65
// Mock OpenAI client
76
const mockCreate = jest.fn()

src/api/providers/__tests__/openrouter.test.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,6 @@ describe("OpenRouterHandler", () => {
5555
info: mockOptions.openRouterModelInfo,
5656
maxTokens: 1000,
5757
reasoning: undefined,
58-
supportsPromptCache: false,
5958
temperature: 0,
6059
thinking: undefined,
6160
topP: undefined,

src/api/providers/constants.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,3 @@ export const DEFAULT_HEADERS = {
66
export const ANTHROPIC_DEFAULT_MAX_TOKENS = 8192
77

88
export const DEEP_SEEK_DEFAULT_TEMPERATURE = 0.6
9-
10-
export const AZURE_AI_INFERENCE_PATH = "/models/chat/completions"
11-
12-
export const REASONING_MODELS = new Set(["x-ai/grok-3-mini-beta", "grok-3-mini-beta", "grok-3-mini-fast-beta"])

src/api/providers/fetchers/__tests__/fixtures/openrouter-models.json

Lines changed: 25 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
// npx jest src/api/providers/fetchers/__tests__/openrouter.test.ts
2+
3+
import path from "path"
4+
5+
import { back as nockBack } from "nock"
6+
7+
import { PROMPT_CACHING_MODELS } from "../../../../shared/api"
8+
9+
import { getOpenRouterModels } from "../openrouter"
10+
11+
nockBack.fixtures = path.join(__dirname, "fixtures")
12+
nockBack.setMode("dryrun")
13+
14+
describe("OpenRouter API", () => {
15+
describe("getOpenRouterModels", () => {
16+
it("fetches models and validates schema", async () => {
17+
const { nockDone } = await nockBack("openrouter-models.json")
18+
19+
const models = await getOpenRouterModels()
20+
21+
expect(
22+
Object.entries(models)
23+
.filter(([_, model]) => model.supportsPromptCache)
24+
.map(([id, _]) => id)
25+
.sort(),
26+
).toEqual(Array.from(PROMPT_CACHING_MODELS).sort())
27+
28+
expect(
29+
Object.entries(models)
30+
.filter(([_, model]) => model.supportsComputerUse)
31+
.map(([id, _]) => id)
32+
.sort(),
33+
).toEqual([
34+
"anthropic/claude-3.5-sonnet",
35+
"anthropic/claude-3.5-sonnet:beta",
36+
"anthropic/claude-3.7-sonnet",
37+
"anthropic/claude-3.7-sonnet:beta",
38+
"anthropic/claude-3.7-sonnet:thinking",
39+
])
40+
41+
expect(models["anthropic/claude-3.7-sonnet"]).toEqual({
42+
maxTokens: 8192,
43+
contextWindow: 200000,
44+
supportsImages: true,
45+
supportsPromptCache: true,
46+
inputPrice: 3,
47+
outputPrice: 15,
48+
cacheWritesPrice: 3.75,
49+
cacheReadsPrice: 0.3,
50+
description: expect.any(String),
51+
thinking: false,
52+
supportsComputerUse: true,
53+
})
54+
55+
expect(models["anthropic/claude-3.7-sonnet:thinking"]).toEqual({
56+
maxTokens: 128000,
57+
contextWindow: 200000,
58+
supportsImages: true,
59+
supportsPromptCache: true,
60+
inputPrice: 3,
61+
outputPrice: 15,
62+
cacheWritesPrice: 3.75,
63+
cacheReadsPrice: 0.3,
64+
description: expect.any(String),
65+
thinking: true,
66+
supportsComputerUse: true,
67+
})
68+
69+
nockDone()
70+
})
71+
})
72+
})

0 commit comments

Comments
 (0)