Skip to content

Commit 2cdfff0

Browse files
committed
Streaming checkbox for OpenAI-compatible providers
1 parent 376ffa3 commit 2cdfff0

File tree

4 files changed

+62
-45
lines changed

4 files changed

+62
-45
lines changed

src/api/providers/openai.ts

Lines changed: 52 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -32,43 +32,65 @@ export class OpenAiHandler implements ApiHandler {
3232
}
3333
}
3434

35-
// Include stream_options for OpenAI Compatible providers if the checkbox is checked
3635
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
37-
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
38-
{ role: "system", content: systemPrompt },
39-
...convertToOpenAiMessages(messages),
40-
]
4136
const modelInfo = this.getModel().info
42-
const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = {
43-
model: this.options.openAiModelId ?? "",
44-
messages: openAiMessages,
45-
temperature: 0,
46-
stream: true,
47-
}
48-
if (this.options.includeMaxTokens) {
49-
requestOptions.max_tokens = modelInfo.maxTokens
50-
}
37+
const modelId = this.options.openAiModelId ?? ""
5138

52-
if (this.options.includeStreamOptions ?? true) {
53-
requestOptions.stream_options = { include_usage: true }
54-
}
39+
if (this.options.openAiStreamingEnabled ?? true) {
40+
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
41+
role: "system",
42+
content: systemPrompt
43+
}
44+
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
45+
model: modelId,
46+
temperature: 0,
47+
messages: [systemMessage, ...convertToOpenAiMessages(messages)],
48+
stream: true as const,
49+
stream_options: { include_usage: true },
50+
}
51+
if (this.options.includeMaxTokens) {
52+
requestOptions.max_tokens = modelInfo.maxTokens
53+
}
54+
55+
const stream = await this.client.chat.completions.create(requestOptions)
5556

56-
const stream = await this.client.chat.completions.create(requestOptions)
57-
for await (const chunk of stream) {
58-
const delta = chunk.choices[0]?.delta
59-
if (delta?.content) {
60-
yield {
61-
type: "text",
62-
text: delta.content,
57+
for await (const chunk of stream) {
58+
const delta = chunk.choices[0]?.delta
59+
if (delta?.content) {
60+
yield {
61+
type: "text",
62+
text: delta.content,
63+
}
6364
}
64-
}
65-
if (chunk.usage) {
66-
yield {
67-
type: "usage",
68-
inputTokens: chunk.usage.prompt_tokens || 0,
69-
outputTokens: chunk.usage.completion_tokens || 0,
65+
if (chunk.usage) {
66+
yield {
67+
type: "usage",
68+
inputTokens: chunk.usage.prompt_tokens || 0,
69+
outputTokens: chunk.usage.completion_tokens || 0,
70+
}
7071
}
7172
}
73+
} else {
74+
// o1 for instance doesnt support streaming, non-1 temp, or system prompt
75+
const systemMessage: OpenAI.Chat.ChatCompletionUserMessageParam = {
76+
role: "user",
77+
content: systemPrompt
78+
}
79+
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
80+
model: modelId,
81+
messages: [systemMessage, ...convertToOpenAiMessages(messages)],
82+
}
83+
const response = await this.client.chat.completions.create(requestOptions)
84+
85+
yield {
86+
type: "text",
87+
text: response.choices[0]?.message.content || "",
88+
}
89+
yield {
90+
type: "usage",
91+
inputTokens: response.usage?.prompt_tokens || 0,
92+
outputTokens: response.usage?.completion_tokens || 0,
93+
}
7294
}
7395
}
7496

src/core/webview/ClineProvider.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ type GlobalStateKey =
6666
| "lmStudioBaseUrl"
6767
| "anthropicBaseUrl"
6868
| "azureApiVersion"
69-
| "includeStreamOptions"
69+
| "openAiStreamingEnabled"
7070
| "openRouterModelId"
7171
| "openRouterModelInfo"
7272
| "openRouterUseMiddleOutTransform"
@@ -447,7 +447,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
447447
geminiApiKey,
448448
openAiNativeApiKey,
449449
azureApiVersion,
450-
includeStreamOptions,
450+
openAiStreamingEnabled,
451451
openRouterModelId,
452452
openRouterModelInfo,
453453
openRouterUseMiddleOutTransform,
@@ -478,7 +478,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
478478
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
479479
await this.storeSecret("deepSeekApiKey", message.apiConfiguration.deepSeekApiKey)
480480
await this.updateGlobalState("azureApiVersion", azureApiVersion)
481-
await this.updateGlobalState("includeStreamOptions", includeStreamOptions)
481+
await this.updateGlobalState("openAiStreamingEnabled", openAiStreamingEnabled)
482482
await this.updateGlobalState("openRouterModelId", openRouterModelId)
483483
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
484484
await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform)
@@ -1295,7 +1295,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
12951295
openAiNativeApiKey,
12961296
deepSeekApiKey,
12971297
azureApiVersion,
1298-
includeStreamOptions,
1298+
openAiStreamingEnabled,
12991299
openRouterModelId,
13001300
openRouterModelInfo,
13011301
openRouterUseMiddleOutTransform,
@@ -1345,7 +1345,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
13451345
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
13461346
this.getSecret("deepSeekApiKey") as Promise<string | undefined>,
13471347
this.getGlobalState("azureApiVersion") as Promise<string | undefined>,
1348-
this.getGlobalState("includeStreamOptions") as Promise<boolean | undefined>,
1348+
this.getGlobalState("openAiStreamingEnabled") as Promise<boolean | undefined>,
13491349
this.getGlobalState("openRouterModelId") as Promise<string | undefined>,
13501350
this.getGlobalState("openRouterModelInfo") as Promise<ModelInfo | undefined>,
13511351
this.getGlobalState("openRouterUseMiddleOutTransform") as Promise<boolean | undefined>,
@@ -1412,7 +1412,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
14121412
openAiNativeApiKey,
14131413
deepSeekApiKey,
14141414
azureApiVersion,
1415-
includeStreamOptions,
1415+
openAiStreamingEnabled,
14161416
openRouterModelId,
14171417
openRouterModelInfo,
14181418
openRouterUseMiddleOutTransform,

src/shared/api.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ export interface ApiHandlerOptions {
4141
openAiNativeApiKey?: string
4242
azureApiVersion?: string
4343
openRouterUseMiddleOutTransform?: boolean
44-
includeStreamOptions?: boolean
44+
openAiStreamingEnabled?: boolean
4545
setAzureApiVersion?: boolean
4646
deepSeekBaseUrl?: string
4747
deepSeekApiKey?: string

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -477,21 +477,16 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage }:
477477
<OpenAiModelPicker />
478478
<div style={{ display: 'flex', alignItems: 'center' }}>
479479
<VSCodeCheckbox
480-
checked={apiConfiguration?.includeStreamOptions ?? true}
480+
checked={apiConfiguration?.openAiStreamingEnabled ?? true}
481481
onChange={(e: any) => {
482482
const isChecked = e.target.checked
483483
setApiConfiguration({
484484
...apiConfiguration,
485-
includeStreamOptions: isChecked
485+
openAiStreamingEnabled: isChecked
486486
})
487487
}}>
488-
Include stream options
488+
Enable streaming
489489
</VSCodeCheckbox>
490-
<span
491-
className="codicon codicon-info"
492-
title="Stream options are for { include_usage: true }. Some providers may not support this option."
493-
style={{ marginLeft: '5px', cursor: 'help' }}
494-
></span>
495490
</div>
496491
<VSCodeCheckbox
497492
checked={azureApiVersionSelected}

0 commit comments

Comments
 (0)