Skip to content

Commit 73ac649

Browse files
committed
Merge remote-tracking branch 'origin/main' into human-relay
2 parents d6ae95a + 957d022 commit 73ac649

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+2419
-1816
lines changed

.changeset/wise-pears-join.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Improved observability of openai compatible APIs, by sending x-title and http-referer headers, as per Open Router standard.

package.json

Lines changed: 0 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -128,31 +128,6 @@
128128
"command": "roo-cline.addToContext",
129129
"title": "Roo Code: Add To Context",
130130
"category": "Roo Code"
131-
},
132-
{
133-
"command": "roo-cline.terminalAddToContext",
134-
"title": "Roo Code: Add Terminal Content to Context",
135-
"category": "Terminal"
136-
},
137-
{
138-
"command": "roo-cline.terminalFixCommand",
139-
"title": "Roo Code: Fix This Command",
140-
"category": "Terminal"
141-
},
142-
{
143-
"command": "roo-cline.terminalExplainCommand",
144-
"title": "Roo Code: Explain This Command",
145-
"category": "Terminal"
146-
},
147-
{
148-
"command": "roo-cline.terminalFixCommandInCurrentTask",
149-
"title": "Roo Code: Fix This Command (Current Task)",
150-
"category": "Terminal"
151-
},
152-
{
153-
"command": "roo-cline.terminalExplainCommandInCurrentTask",
154-
"title": "Roo Code: Explain This Command (Current Task)",
155-
"category": "Terminal"
156131
}
157132
],
158133
"menus": {
@@ -178,28 +153,6 @@
178153
"group": "Roo Code@4"
179154
}
180155
],
181-
"terminal/context": [
182-
{
183-
"command": "roo-cline.terminalAddToContext",
184-
"group": "Roo Code@1"
185-
},
186-
{
187-
"command": "roo-cline.terminalFixCommand",
188-
"group": "Roo Code@2"
189-
},
190-
{
191-
"command": "roo-cline.terminalExplainCommand",
192-
"group": "Roo Code@3"
193-
},
194-
{
195-
"command": "roo-cline.terminalFixCommandInCurrentTask",
196-
"group": "Roo Code@5"
197-
},
198-
{
199-
"command": "roo-cline.terminalExplainCommandInCurrentTask",
200-
"group": "Roo Code@6"
201-
}
202-
],
203156
"view/title": [
204157
{
205158
"command": "roo-cline.plusButtonClicked",

src/activate/index.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
export { handleUri } from "./handleUri"
22
export { registerCommands } from "./registerCommands"
33
export { registerCodeActions } from "./registerCodeActions"
4-
export { registerTerminalActions } from "./registerTerminalActions"

src/activate/registerTerminalActions.ts

Lines changed: 0 additions & 81 deletions
This file was deleted.

src/api/providers/__tests__/openai.test.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,20 @@ describe("OpenAiHandler", () => {
9090
})
9191
expect(handlerWithCustomUrl).toBeInstanceOf(OpenAiHandler)
9292
})
93+
94+
it("should set default headers correctly", () => {
95+
// Get the mock constructor from the jest mock system
96+
const openAiMock = jest.requireMock("openai").default
97+
98+
expect(openAiMock).toHaveBeenCalledWith({
99+
baseURL: expect.any(String),
100+
apiKey: expect.any(String),
101+
defaultHeaders: {
102+
"HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
103+
"X-Title": "Roo Code",
104+
},
105+
})
106+
})
93107
})
94108

95109
describe("createMessage", () => {

src/api/providers/lmstudio.ts

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,24 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
3030
]
3131

3232
try {
33-
const stream = await this.client.chat.completions.create({
33+
// Create params object with optional draft model
34+
const params: any = {
3435
model: this.getModel().id,
3536
messages: openAiMessages,
3637
temperature: this.options.modelTemperature ?? LMSTUDIO_DEFAULT_TEMPERATURE,
3738
stream: true,
38-
})
39-
for await (const chunk of stream) {
39+
}
40+
41+
// Add draft model if speculative decoding is enabled and a draft model is specified
42+
if (this.options.lmStudioSpeculativeDecodingEnabled && this.options.lmStudioDraftModelId) {
43+
params.draft_model = this.options.lmStudioDraftModelId
44+
}
45+
46+
const results = await this.client.chat.completions.create(params)
47+
48+
// Stream handling
49+
// @ts-ignore
50+
for await (const chunk of results) {
4051
const delta = chunk.choices[0]?.delta
4152
if (delta?.content) {
4253
yield {
@@ -62,12 +73,20 @@ export class LmStudioHandler extends BaseProvider implements SingleCompletionHan
6273

6374
async completePrompt(prompt: string): Promise<string> {
6475
try {
65-
const response = await this.client.chat.completions.create({
76+
// Create params object with optional draft model
77+
const params: any = {
6678
model: this.getModel().id,
6779
messages: [{ role: "user", content: prompt }],
6880
temperature: this.options.modelTemperature ?? LMSTUDIO_DEFAULT_TEMPERATURE,
6981
stream: false,
70-
})
82+
}
83+
84+
// Add draft model if speculative decoding is enabled and a draft model is specified
85+
if (this.options.lmStudioSpeculativeDecodingEnabled && this.options.lmStudioDraftModelId) {
86+
params.draft_model = this.options.lmStudioDraftModelId
87+
}
88+
89+
const response = await this.client.chat.completions.create(params)
7190
return response.choices[0]?.message.content || ""
7291
} catch (error) {
7392
throw new Error(

src/api/providers/openai.ts

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,14 @@ import { ApiStream, ApiStreamUsageChunk } from "../transform/stream"
1616
import { BaseProvider } from "./base-provider"
1717

1818
const DEEP_SEEK_DEFAULT_TEMPERATURE = 0.6
19-
export interface OpenAiHandlerOptions extends ApiHandlerOptions {
20-
defaultHeaders?: Record<string, string>
19+
20+
export const defaultHeaders = {
21+
"HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
22+
"X-Title": "Roo Code",
2123
}
2224

25+
export interface OpenAiHandlerOptions extends ApiHandlerOptions {}
26+
2327
export class OpenAiHandler extends BaseProvider implements SingleCompletionHandler {
2428
protected options: OpenAiHandlerOptions
2529
private client: OpenAI
@@ -47,9 +51,10 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
4751
baseURL,
4852
apiKey,
4953
apiVersion: this.options.azureApiVersion || azureOpenAiDefaultApiVersion,
54+
defaultHeaders,
5055
})
5156
} else {
52-
this.client = new OpenAI({ baseURL, apiKey, defaultHeaders: this.options.defaultHeaders })
57+
this.client = new OpenAI({ baseURL, apiKey, defaultHeaders })
5358
}
5459
}
5560

src/api/providers/openrouter.ts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { convertToR1Format } from "../transform/r1-format"
1313
import { DEEP_SEEK_DEFAULT_TEMPERATURE } from "./constants"
1414
import { getModelParams, SingleCompletionHandler } from ".."
1515
import { BaseProvider } from "./base-provider"
16+
import { defaultHeaders } from "./openai"
1617

1718
// Add custom interface for OpenRouter params.
1819
type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
@@ -37,11 +38,6 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
3738
const baseURL = this.options.openRouterBaseUrl || "https://openrouter.ai/api/v1"
3839
const apiKey = this.options.openRouterApiKey ?? "not-provided"
3940

40-
const defaultHeaders = {
41-
"HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
42-
"X-Title": "Roo Code",
43-
}
44-
4541
this.client = new OpenAI({ baseURL, apiKey, defaultHeaders })
4642
}
4743

src/api/providers/requesty.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,6 @@ export class RequestyHandler extends OpenAiHandler {
1616
openAiModelId: options.requestyModelId ?? requestyDefaultModelId,
1717
openAiBaseUrl: "https://router.requesty.ai/v1",
1818
openAiCustomModelInfo: options.requestyModelInfo ?? requestyModelInfoSaneDefaults,
19-
defaultHeaders: {
20-
"HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
21-
"X-Title": "Roo Code",
22-
},
2319
})
2420
}
2521

src/core/Cline.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import getFolderSize from "get-folder-size"
1010
import * as path from "path"
1111
import { serializeError } from "serialize-error"
1212
import * as vscode from "vscode"
13+
1314
import { ApiHandler, buildApiHandler } from "../api"
1415
import { ApiStream } from "../api/transform/stream"
1516
import { DIFF_VIEW_URI_SCHEME, DiffViewProvider } from "../integrations/editor/DiffViewProvider"
@@ -31,6 +32,7 @@ import { UrlContentFetcher } from "../services/browser/UrlContentFetcher"
3132
import { listFiles } from "../services/glob/list-files"
3233
import { regexSearchFiles } from "../services/ripgrep"
3334
import { parseSourceCodeForDefinitionsTopLevel } from "../services/tree-sitter"
35+
import { CheckpointStorage } from "../shared/checkpoints"
3436
import { ApiConfiguration } from "../shared/api"
3537
import { findLastIndex } from "../shared/array"
3638
import { combineApiRequests } from "../shared/combineApiRequests"
@@ -81,7 +83,7 @@ export type ClineOptions = {
8183
customInstructions?: string
8284
enableDiff?: boolean
8385
enableCheckpoints?: boolean
84-
checkpointStorage?: "task" | "workspace"
86+
checkpointStorage?: CheckpointStorage
8587
fuzzyMatchThreshold?: number
8688
task?: string
8789
images?: string[]
@@ -121,7 +123,7 @@ export class Cline {
121123

122124
// checkpoints
123125
private enableCheckpoints: boolean
124-
private checkpointStorage: "task" | "workspace"
126+
private checkpointStorage: CheckpointStorage
125127
private checkpointService?: RepoPerTaskCheckpointService | RepoPerWorkspaceCheckpointService
126128

127129
// streaming

0 commit comments

Comments
 (0)