Skip to content

Commit aff72ef

Browse files
committed
Add seed config
1 parent 78b2083 commit aff72ef

File tree

9 files changed

+105
-2
lines changed

9 files changed

+105
-2
lines changed

src/api/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,11 +116,13 @@ export function getModelParams({
116116
modelMaxTokens: customMaxTokens,
117117
modelMaxThinkingTokens: customMaxThinkingTokens,
118118
modelTemperature: customTemperature,
119+
modelSeed: customSeed,
119120
reasoningEffort: customReasoningEffort,
120121
} = options
121122

122123
let maxTokens = model.maxTokens ?? defaultMaxTokens
123124
let thinking: BetaThinkingConfigParam | undefined = undefined
125+
let seed = customSeed
124126
let temperature = customTemperature ?? defaultTemperature
125127
const reasoningEffort = customReasoningEffort ?? defaultReasoningEffort
126128

@@ -138,5 +140,5 @@ export function getModelParams({
138140
temperature = 1.0
139141
}
140142

141-
return { maxTokens, thinking, temperature, reasoningEffort }
143+
return { maxTokens, thinking, temperature, reasoningEffort, seed }
142144
}

src/api/providers/base-openai-compatible-provider.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
7272
model,
7373
max_tokens,
7474
temperature,
75+
seed: this.options.modelSeed,
7576
messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
7677
stream: true,
7778
stream_options: { include_usage: true },

src/api/providers/openai-native.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -230,6 +230,7 @@ export class OpenAiNativeHandler extends BaseProvider implements SingleCompletio
230230
model: model.id,
231231
messages: [{ role: "user", content: prompt }],
232232
temperature: this.options.modelTemperature ?? OPENAI_NATIVE_DEFAULT_TEMPERATURE,
233+
seed: this.options.modelSeed,
233234
}
234235
}
235236
}

src/api/providers/openai.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
142142
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
143143
model: modelId,
144144
temperature: this.options.modelTemperature ?? (deepseekReasoner ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0),
145+
seed: this.options.modelSeed,
145146
messages: convertedMessages,
146147
stream: true as const,
147148
...(isGrokXAI ? {} : { stream_options: { include_usage: true } }),

src/api/providers/openrouter.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
7878
maxTokens,
7979
thinking,
8080
temperature,
81+
seed,
8182
topP,
8283
reasoningEffort,
8384
promptCache,
@@ -108,6 +109,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
108109
model: modelId,
109110
max_tokens: maxTokens,
110111
temperature,
112+
seed,
111113
thinking, // OpenRouter is temporarily supporting this.
112114
top_p: topP,
113115
messages: openAiMessages,
@@ -193,13 +195,14 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
193195
}
194196

195197
async completePrompt(prompt: string) {
196-
let { id: modelId, maxTokens, thinking, temperature } = await this.fetchModel()
198+
let { id: modelId, maxTokens, thinking, temperature, seed } = await this.fetchModel()
197199

198200
const completionParams: OpenRouterChatCompletionParams = {
199201
model: modelId,
200202
max_tokens: maxTokens,
201203
thinking,
202204
temperature,
205+
seed,
203206
messages: [{ role: "user", content: prompt }],
204207
stream: false,
205208
}

src/schemas/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -438,6 +438,7 @@ export const providerSettingsSchema = z.object({
438438
diffEnabled: z.boolean().optional(),
439439
fuzzyMatchThreshold: z.number().optional(),
440440
modelTemperature: z.number().nullish(),
441+
modelSeed: z.number().nullish(),
441442
rateLimitSeconds: z.number().optional(),
442443
// Fake AI
443444
fakeAi: z.unknown().optional(),
@@ -528,6 +529,7 @@ const providerSettingsRecord: ProviderSettingsRecord = {
528529
diffEnabled: undefined,
529530
fuzzyMatchThreshold: undefined,
530531
modelTemperature: undefined,
532+
modelSeed: undefined,
531533
rateLimitSeconds: undefined,
532534
// Fake AI
533535
fakeAi: undefined,

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ import { DiffSettingsControl } from "./DiffSettingsControl"
5050
import { TemperatureControl } from "./TemperatureControl"
5151
import { RateLimitSecondsControl } from "./RateLimitSecondsControl"
5252
import { BedrockCustomArn } from "./providers/BedrockCustomArn"
53+
import { SeedControl } from "./SeedControl"
5354

5455
export interface ApiOptionsProps {
5556
uriScheme: string | undefined
@@ -490,6 +491,10 @@ const ApiOptions = ({
490491
value={apiConfiguration.rateLimitSeconds || 0}
491492
onChange={(value) => setApiConfigurationField("rateLimitSeconds", value)}
492493
/>
494+
<SeedControl
495+
value={apiConfiguration.modelSeed}
496+
onChange={handleInputChange("modelSeed", noTransform)}
497+
/>
493498
</>
494499
)}
495500
</div>
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
import { VSCodeCheckbox, VSCodeTextField } from "@vscode/webview-ui-toolkit/react"
2+
import { useEffect, useState } from "react"
3+
import { useAppTranslation } from "@/i18n/TranslationContext"
4+
import { useDebounce } from "react-use"
5+
6+
interface SeedControlProps {
7+
value: number | string | undefined | null
8+
onChange: (value: number | string | undefined | null) => void
9+
}
10+
11+
export const SeedControl = ({ value, onChange }: SeedControlProps) => {
12+
const { t } = useAppTranslation()
13+
const [isCustomSeed, setIsCustomSeed] = useState(value !== undefined && value !== null && value !== "")
14+
const [inputValue, setInputValue] = useState<string>(value?.toString() ?? "")
15+
16+
useDebounce(
17+
() => {
18+
if (inputValue === "") {
19+
onChange(null)
20+
} else {
21+
const numValue = parseInt(inputValue, 10)
22+
if (!isNaN(numValue)) {
23+
onChange(numValue)
24+
}
25+
}
26+
},
27+
50,
28+
[onChange, inputValue]
29+
)
30+
31+
// Sync internal state with prop changes when switching profiles.
32+
useEffect(() => {
33+
const hasCustomSeed = value !== undefined && value !== null && value !== ""
34+
setIsCustomSeed(hasCustomSeed)
35+
setInputValue(value?.toString() ?? "")
36+
}, [value])
37+
38+
const handleCheckboxChange = (e: React.ChangeEvent<HTMLInputElement>) => {
39+
const isChecked = e.target.checked
40+
setIsCustomSeed(isChecked)
41+
42+
if (!isChecked) {
43+
setInputValue("")
44+
} else {
45+
setInputValue(value?.toString() ?? "")
46+
}
47+
}
48+
49+
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
50+
setInputValue(e.target.value)
51+
}
52+
53+
return (
54+
<>
55+
<div>
56+
<VSCodeCheckbox
57+
checked={isCustomSeed}
58+
onChange={handleCheckboxChange}>
59+
<label className="block font-medium mb-1">{t("settings:seed.useCustom")}</label>
60+
</VSCodeCheckbox>
61+
<div className="text-sm text-vscode-descriptionForeground mt-1">
62+
{t("settings:seed.description")}
63+
</div>
64+
</div>
65+
66+
{isCustomSeed && (
67+
<div className="flex flex-col gap-3 pl-3 border-l-2 border-vscode-button-background">
68+
<div>
69+
<VSCodeTextField
70+
type="text"
71+
value={inputValue}
72+
onInput={handleInputChange}
73+
className="w-full"
74+
/>
75+
<div className="text-vscode-descriptionForeground text-sm mt-1">
76+
{t("settings:seed.label")}
77+
</div>
78+
</div>
79+
</div>
80+
)}
81+
</>
82+
)
83+
}

webview-ui/src/i18n/locales/en/settings.json

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -426,6 +426,11 @@
426426
"description": "Controls randomness in the model's responses.",
427427
"rangeDescription": "Higher values make output more random, lower values make it more deterministic."
428428
},
429+
"seed": {
430+
"useCustom": "Use custom seed",
431+
"description": "Controls the deterministic nature of the model's responses. Leave blank for random.",
432+
"label": "Enter a seed value"
433+
},
429434
"modelInfo": {
430435
"supportsImages": "Supports images",
431436
"noImages": "Does not support images",

0 commit comments

Comments
 (0)