Skip to content

Commit 3cacd57

Browse files
Add dedicated Requesty provider (RooCodeInc#1677)
* feat: Add dedicated Requesty provider * Update ExtensionStateContext.tsx --------- Co-authored-by: Saoud Rizwan <[email protected]>
1 parent 9bddd9a commit 3cacd57

File tree

9 files changed

+188
-0
lines changed

9 files changed

+188
-0
lines changed

.changeset/big-plums-wave.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"claude-dev": minor
3+
---
4+
5+
Adding Requesty API Provider

src/api/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import { GeminiHandler } from "./providers/gemini"
1111
import { OpenAiNativeHandler } from "./providers/openai-native"
1212
import { ApiStream } from "./transform/stream"
1313
import { DeepSeekHandler } from "./providers/deepseek"
14+
import { RequestyHandler } from "./providers/requesty"
1415
import { QwenHandler } from "./providers/qwen"
1516
import { MistralHandler } from "./providers/mistral"
1617
import { VsCodeLmHandler } from "./providers/vscode-lm"
@@ -48,6 +49,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
4849
return new OpenAiNativeHandler(options)
4950
case "deepseek":
5051
return new DeepSeekHandler(options)
52+
case "requesty":
53+
return new RequestyHandler(options)
5154
case "qwen":
5255
return new QwenHandler(options)
5356
case "mistral":

src/api/providers/requesty.ts

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
import { Anthropic } from "@anthropic-ai/sdk"
2+
import OpenAI from "openai"
3+
import { withRetry } from "../retry"
4+
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
5+
import { ApiHandler } from "../index"
6+
import { convertToOpenAiMessages } from "../transform/openai-format"
7+
import { ApiStream } from "../transform/stream"
8+
import { convertToR1Format } from "../transform/r1-format"
9+
10+
export class RequestyHandler implements ApiHandler {
11+
private options: ApiHandlerOptions
12+
private client: OpenAI
13+
14+
constructor(options: ApiHandlerOptions) {
15+
this.options = options
16+
this.client = new OpenAI({
17+
baseURL: "https://router.requesty.ai/v1",
18+
apiKey: this.options.requestyApiKey,
19+
})
20+
}
21+
22+
@withRetry()
23+
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
24+
const modelId = this.options.requestyModelId ?? ""
25+
const isDeepseekReasoner = modelId.includes("deepseek-reasoner")
26+
27+
let openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
28+
{ role: "system", content: systemPrompt },
29+
...convertToOpenAiMessages(messages),
30+
]
31+
32+
if (isDeepseekReasoner) {
33+
openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
34+
}
35+
36+
const stream = await this.client.chat.completions.create({
37+
model: modelId,
38+
messages: openAiMessages,
39+
temperature: 0,
40+
stream: true,
41+
stream_options: { include_usage: true },
42+
})
43+
for await (const chunk of stream) {
44+
const delta = chunk.choices[0]?.delta
45+
if (delta?.content) {
46+
yield {
47+
type: "text",
48+
text: delta.content,
49+
}
50+
}
51+
52+
if (delta && "reasoning_content" in delta && delta.reasoning_content) {
53+
yield {
54+
type: "reasoning",
55+
reasoning: (delta.reasoning_content as string | undefined) || "",
56+
}
57+
}
58+
59+
if (chunk.usage) {
60+
yield {
61+
type: "usage",
62+
inputTokens: chunk.usage.prompt_tokens || 0,
63+
outputTokens: chunk.usage.completion_tokens || 0,
64+
}
65+
}
66+
}
67+
}
68+
69+
getModel(): { id: string; info: ModelInfo } {
70+
return {
71+
id: this.options.requestyModelId ?? "",
72+
info: openAiModelInfoSaneDefaults,
73+
}
74+
}
75+
}

src/core/webview/ClineProvider.ts

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ type SecretKey =
4545
| "geminiApiKey"
4646
| "openAiNativeApiKey"
4747
| "deepSeekApiKey"
48+
| "requestyApiKey"
4849
| "qwenApiKey"
4950
| "mistralApiKey"
5051
| "authToken"
@@ -80,6 +81,7 @@ type GlobalStateKey =
8081
| "liteLlmBaseUrl"
8182
| "liteLlmModelId"
8283
| "qwenApiLine"
84+
| "requestyModelId"
8385

8486
export const GlobalFileNames = {
8587
apiConversationHistory: "api_conversation_history.json",
@@ -442,6 +444,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
442444
geminiApiKey,
443445
openAiNativeApiKey,
444446
deepSeekApiKey,
447+
requestyApiKey,
448+
requestyModelId,
445449
qwenApiKey,
446450
mistralApiKey,
447451
azureApiVersion,
@@ -474,6 +478,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
474478
await this.storeSecret("geminiApiKey", geminiApiKey)
475479
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
476480
await this.storeSecret("deepSeekApiKey", deepSeekApiKey)
481+
await this.storeSecret("requestyApiKey", requestyApiKey)
477482
await this.storeSecret("qwenApiKey", qwenApiKey)
478483
await this.storeSecret("mistralApiKey", mistralApiKey)
479484
await this.updateGlobalState("azureApiVersion", azureApiVersion)
@@ -483,6 +488,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
483488
await this.updateGlobalState("liteLlmBaseUrl", liteLlmBaseUrl)
484489
await this.updateGlobalState("liteLlmModelId", liteLlmModelId)
485490
await this.updateGlobalState("qwenApiLine", qwenApiLine)
491+
await this.updateGlobalState("requestyModelId", requestyModelId)
486492
if (this.cline) {
487493
this.cline.api = buildApiHandler(message.apiConfiguration)
488494
}
@@ -1371,6 +1377,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
13711377
geminiApiKey,
13721378
openAiNativeApiKey,
13731379
deepSeekApiKey,
1380+
requestyApiKey,
1381+
requestyModelId,
13741382
qwenApiKey,
13751383
mistralApiKey,
13761384
azureApiVersion,
@@ -1414,6 +1422,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
14141422
this.getSecret("geminiApiKey") as Promise<string | undefined>,
14151423
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
14161424
this.getSecret("deepSeekApiKey") as Promise<string | undefined>,
1425+
this.getSecret("requestyApiKey") as Promise<string | undefined>,
1426+
this.getGlobalState("requestyModelId") as Promise<string | undefined>,
14171427
this.getSecret("qwenApiKey") as Promise<string | undefined>,
14181428
this.getSecret("mistralApiKey") as Promise<string | undefined>,
14191429
this.getGlobalState("azureApiVersion") as Promise<string | undefined>,
@@ -1474,6 +1484,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
14741484
geminiApiKey,
14751485
openAiNativeApiKey,
14761486
deepSeekApiKey,
1487+
requestyApiKey,
1488+
requestyModelId,
14771489
qwenApiKey,
14781490
qwenApiLine,
14791491
mistralApiKey,
@@ -1571,6 +1583,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
15711583
"geminiApiKey",
15721584
"openAiNativeApiKey",
15731585
"deepSeekApiKey",
1586+
"requestyApiKey",
15741587
"qwenApiKey",
15751588
"mistralApiKey",
15761589
"authToken",

src/shared/api.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ export type ApiProvider =
88
| "lmstudio"
99
| "gemini"
1010
| "openai-native"
11+
| "requesty"
1112
| "deepseek"
1213
| "qwen"
1314
| "mistral"
@@ -40,6 +41,8 @@ export interface ApiHandlerOptions {
4041
geminiApiKey?: string
4142
openAiNativeApiKey?: string
4243
deepSeekApiKey?: string
44+
requestyApiKey?: string
45+
requestyModelId?: string
4346
qwenApiKey?: string
4447
mistralApiKey?: string
4548
azureApiVersion?: string

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -187,6 +187,7 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
187187
<VSCodeOption value="bedrock">AWS Bedrock</VSCodeOption>
188188
<VSCodeOption value="openai-native">OpenAI</VSCodeOption>
189189
<VSCodeOption value="openai">OpenAI Compatible</VSCodeOption>
190+
<VSCodeOption value="requesty">Requesty</VSCodeOption>
190191
<VSCodeOption value="vscode-lm">VS Code LM API</VSCodeOption>
191192
<VSCodeOption value="lmstudio">LM Studio</VSCodeOption>
192193
<VSCodeOption value="ollama">Ollama</VSCodeOption>
@@ -673,6 +674,37 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage, is
673674
</div>
674675
)}
675676

677+
{selectedProvider === "requesty" && (
678+
<div>
679+
<VSCodeTextField
680+
value={apiConfiguration?.requestyApiKey || ""}
681+
style={{ width: "100%" }}
682+
type="password"
683+
onInput={handleInputChange("requestyApiKey")}
684+
placeholder="Enter API Key...">
685+
<span style={{ fontWeight: 500 }}>API Key</span>
686+
</VSCodeTextField>
687+
<VSCodeTextField
688+
value={apiConfiguration?.requestyModelId || ""}
689+
style={{ width: "100%" }}
690+
onInput={handleInputChange("requestyModelId")}
691+
placeholder={"Enter Model ID..."}>
692+
<span style={{ fontWeight: 500 }}>Model ID</span>
693+
</VSCodeTextField>
694+
<p
695+
style={{
696+
fontSize: "12px",
697+
marginTop: 3,
698+
color: "var(--vscode-descriptionForeground)",
699+
}}>
700+
<span style={{ color: "var(--vscode-errorForeground)" }}>
701+
(<span style={{ fontWeight: 500 }}>Note:</span> Cline uses complex prompts and works best with Claude
702+
models. Less capable models may not work as expected.)
703+
</span>
704+
</p>
705+
</div>
706+
)}
707+
676708
{selectedProvider === "vscode-lm" && (
677709
<div>
678710
<DropdownContainer zIndex={DROPDOWN_Z_INDEX - 2} className="dropdown-container">
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import { render, screen, fireEvent } from "@testing-library/react"
2+
import { describe, it, expect, vi } from "vitest"
3+
import ApiOptions from "../ApiOptions"
4+
import { ExtensionStateContextProvider } from "../../../context/ExtensionStateContext"
5+
6+
vi.mock("../../../context/ExtensionStateContext", async (importOriginal) => {
7+
const actual = await importOriginal()
8+
return {
9+
...actual,
10+
// your mocked methods
11+
useExtensionState: vi.fn(() => ({
12+
apiConfiguration: {
13+
apiProvider: "requesty",
14+
requestyApiKey: "",
15+
requestyModelId: "",
16+
},
17+
setApiConfiguration: vi.fn(),
18+
uriScheme: "vscode",
19+
})),
20+
}
21+
})
22+
23+
describe("ApiOptions Component", () => {
24+
vi.clearAllMocks()
25+
const mockPostMessage = vi.fn()
26+
const mockSetApiConfiguration = vi.fn()
27+
28+
beforeEach(() => {
29+
global.vscode = { postMessage: mockPostMessage } as any
30+
})
31+
32+
it("renders Requesty API Key input", () => {
33+
render(
34+
<ExtensionStateContextProvider>
35+
<ApiOptions showModelOptions={true} />
36+
</ExtensionStateContextProvider>,
37+
)
38+
const apiKeyInput = screen.getByPlaceholderText("Enter API Key...")
39+
expect(apiKeyInput).toBeInTheDocument()
40+
})
41+
42+
it("renders Requesty Model ID input", () => {
43+
render(
44+
<ExtensionStateContextProvider>
45+
<ApiOptions showModelOptions={true} />
46+
</ExtensionStateContextProvider>,
47+
)
48+
const modelIdInput = screen.getByPlaceholderText("Enter Model ID...")
49+
expect(modelIdInput).toBeInTheDocument()
50+
})
51+
})

webview-ui/src/context/ExtensionStateContext.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ export const ExtensionStateContextProvider: React.FC<{
6767
config.geminiApiKey,
6868
config.openAiNativeApiKey,
6969
config.deepSeekApiKey,
70+
config.requestyApiKey,
7071
config.qwenApiKey,
7172
config.mistralApiKey,
7273
config.vsCodeLmModelSelector,

webview-ui/src/utils/validate.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,11 @@ export function validateApiConfiguration(apiConfiguration?: ApiConfiguration): s
5353
return "You must provide a valid base URL, API key, and model ID."
5454
}
5555
break
56+
case "requesty":
57+
if (!apiConfiguration.requestyApiKey || !apiConfiguration.requestyModelId) {
58+
return "You must provide a valid API key or choose a different provider."
59+
}
60+
break
5661
case "ollama":
5762
if (!apiConfiguration.ollamaModelId) {
5863
return "You must provide a valid model ID."

0 commit comments

Comments
 (0)