Skip to content

Commit 532728e

Browse files
cteroomotemrubens
authored
Expose thinking tokens for roo/sonic (#7212)
Co-authored-by: Roo Code <[email protected]> Co-authored-by: Matt Rubens <[email protected]>
1 parent fd3535c commit 532728e

File tree

5 files changed

+58
-9
lines changed

5 files changed

+58
-9
lines changed

.env.sample

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,4 @@ POSTHOG_API_KEY=key-goes-here
33
# Roo Code Cloud / Local Development
44
CLERK_BASE_URL=https://epic-chamois-85.clerk.accounts.dev
55
ROO_CODE_API_URL=http://localhost:3000
6+
ROO_CODE_PROVIDER_URL=http://localhost:8080/proxy/v1

packages/types/src/providers/roo.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,10 @@ export const rooModels = {
1010
maxTokens: 8192,
1111
contextWindow: 262_144,
1212
supportsImages: false,
13-
supportsPromptCache: false,
13+
supportsPromptCache: true,
1414
inputPrice: 0,
1515
outputPrice: 0,
1616
description:
17-
"Stealth coding model with 262K context window, accessible for free through Roo Code Cloud for a limited time. (Note: prompts and completions are logged by the model creator and used to improve the model.)",
17+
"A stealth reasoning model that is blazing fast and excels at agentic coding, accessible for free through Roo Code Cloud for a limited time. (Note: prompts and completions are logged by the model creator and used to improve the model.)",
1818
},
1919
} as const satisfies Record<string, ModelInfo>

src/api/providers/__tests__/roo.spec.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -331,7 +331,7 @@ describe("RooHandler", () => {
331331
expect(modelInfo.info.maxTokens).toBe(8192)
332332
expect(modelInfo.info.contextWindow).toBe(262_144)
333333
expect(modelInfo.info.supportsImages).toBe(false)
334-
expect(modelInfo.info.supportsPromptCache).toBe(false)
334+
expect(modelInfo.info.supportsPromptCache).toBe(true)
335335
expect(modelInfo.info.inputPrice).toBe(0)
336336
expect(modelInfo.info.outputPrice).toBe(0)
337337
})

src/api/providers/base-openai-compatible-provider.ts

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,11 +62,11 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
6262
})
6363
}
6464

65-
override async *createMessage(
65+
protected createStream(
6666
systemPrompt: string,
6767
messages: Anthropic.Messages.MessageParam[],
6868
metadata?: ApiHandlerCreateMessageMetadata,
69-
): ApiStream {
69+
) {
7070
const {
7171
id: model,
7272
info: { maxTokens: max_tokens },
@@ -83,7 +83,15 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
8383
stream_options: { include_usage: true },
8484
}
8585

86-
const stream = await this.client.chat.completions.create(params)
86+
return this.client.chat.completions.create(params)
87+
}
88+
89+
override async *createMessage(
90+
systemPrompt: string,
91+
messages: Anthropic.Messages.MessageParam[],
92+
metadata?: ApiHandlerCreateMessageMetadata,
93+
): ApiStream {
94+
const stream = await this.createStream(systemPrompt, messages, metadata)
8795

8896
for await (const chunk of stream) {
8997
const delta = chunk.choices[0]?.delta

src/api/providers/roo.ts

Lines changed: 43 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,14 @@
1+
import { Anthropic } from "@anthropic-ai/sdk"
12
import { rooDefaultModelId, rooModels, type RooModelId } from "@roo-code/types"
23
import { CloudService } from "@roo-code/cloud"
34

45
import type { ApiHandlerOptions } from "../../shared/api"
5-
import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"
6+
import { ApiStream } from "../transform/stream"
67
import { t } from "../../i18n"
78

9+
import type { ApiHandlerCreateMessageMetadata } from "../index"
10+
import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"
11+
812
export class RooHandler extends BaseOpenAiCompatibleProvider<RooModelId> {
913
constructor(options: ApiHandlerOptions) {
1014
// Check if CloudService is available and get the session token.
@@ -21,14 +25,50 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<RooModelId> {
2125
super({
2226
...options,
2327
providerName: "Roo Code Cloud",
24-
baseURL: "https://api.roocode.com/proxy/v1",
28+
baseURL: process.env.ROO_CODE_PROVIDER_URL ?? "https://api.roocode.com/proxy/v1",
2529
apiKey: sessionToken,
2630
defaultProviderModelId: rooDefaultModelId,
2731
providerModels: rooModels,
2832
defaultTemperature: 0.7,
2933
})
3034
}
3135

36+
override async *createMessage(
37+
systemPrompt: string,
38+
messages: Anthropic.Messages.MessageParam[],
39+
metadata?: ApiHandlerCreateMessageMetadata,
40+
): ApiStream {
41+
const stream = await this.createStream(systemPrompt, messages, metadata)
42+
43+
for await (const chunk of stream) {
44+
const delta = chunk.choices[0]?.delta
45+
46+
if (delta) {
47+
if (delta.content) {
48+
yield {
49+
type: "text",
50+
text: delta.content,
51+
}
52+
}
53+
54+
if ("reasoning_content" in delta && typeof delta.reasoning_content === "string") {
55+
yield {
56+
type: "reasoning",
57+
text: delta.reasoning_content,
58+
}
59+
}
60+
}
61+
62+
if (chunk.usage) {
63+
yield {
64+
type: "usage",
65+
inputTokens: chunk.usage.prompt_tokens || 0,
66+
outputTokens: chunk.usage.completion_tokens || 0,
67+
}
68+
}
69+
}
70+
}
71+
3272
override getModel() {
3373
const modelId = this.options.apiModelId || rooDefaultModelId
3474
const modelInfo = this.providerModels[modelId as RooModelId] ?? this.providerModels[rooDefaultModelId]
@@ -44,7 +84,7 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<RooModelId> {
4484
maxTokens: 8192,
4585
contextWindow: 262_144,
4686
supportsImages: false,
47-
supportsPromptCache: false,
87+
supportsPromptCache: true,
4888
inputPrice: 0,
4989
outputPrice: 0,
5090
},

0 commit comments

Comments
 (0)