Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@ POSTHOG_API_KEY=key-goes-here
# Roo Code Cloud / Local Development
CLERK_BASE_URL=https://epic-chamois-85.clerk.accounts.dev
ROO_CODE_API_URL=http://localhost:3000
ROO_CODE_PROVIDER_URL=http://localhost:8080/proxy/v1
14 changes: 11 additions & 3 deletions src/api/providers/base-openai-compatible-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,11 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
})
}

override async *createMessage(
protected createStream(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
metadata?: ApiHandlerCreateMessageMetadata,
): ApiStream {
) {
const {
id: model,
info: { maxTokens: max_tokens },
Expand All @@ -83,7 +83,15 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
stream_options: { include_usage: true },
}

const stream = await this.client.chat.completions.create(params)
return this.client.chat.completions.create(params)
}

override async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
metadata?: ApiHandlerCreateMessageMetadata,
): ApiStream {
const stream = await this.createStream(systemPrompt, messages, metadata)

for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta
Expand Down
44 changes: 42 additions & 2 deletions src/api/providers/roo.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import { Anthropic } from "@anthropic-ai/sdk"
import { rooDefaultModelId, rooModels, type RooModelId } from "@roo-code/types"
import { CloudService } from "@roo-code/cloud"

import type { ApiHandlerOptions } from "../../shared/api"
import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"
import { ApiStream } from "../transform/stream"
import { t } from "../../i18n"

import type { ApiHandlerCreateMessageMetadata } from "../index"
import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"

export class RooHandler extends BaseOpenAiCompatibleProvider<RooModelId> {
constructor(options: ApiHandlerOptions) {
// Check if CloudService is available and get the session token.
Expand All @@ -21,14 +25,50 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<RooModelId> {
super({
...options,
providerName: "Roo Code Cloud",
baseURL: "https://api.roocode.com/proxy/v1",
baseURL: process.env.ROO_CODE_PROVIDER_URL ?? "https://api.roocode.com/proxy/v1",
apiKey: sessionToken,
defaultProviderModelId: rooDefaultModelId,
providerModels: rooModels,
defaultTemperature: 0.7,
})
}

override async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
metadata?: ApiHandlerCreateMessageMetadata,
): ApiStream {
const stream = await this.createStream(systemPrompt, messages, metadata)

for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta

if (delta) {
if (delta.content) {
yield {
type: "text",
text: delta.content,
}
}

if ("reasoning_content" in delta && typeof delta.reasoning_content === "string") {
yield {
type: "reasoning",
text: delta.reasoning_content,
}
}
}

if (chunk.usage) {
yield {
type: "usage",
inputTokens: chunk.usage.prompt_tokens || 0,
outputTokens: chunk.usage.completion_tokens || 0,
}
}
}
}

override getModel() {
const modelId = this.options.apiModelId || rooDefaultModelId
const modelInfo = this.providerModels[modelId as RooModelId] ?? this.providerModels[rooDefaultModelId]
Expand Down
Loading