diff --git a/src/platform/endpoint/test/node/openaiCompatibleEndpoint.ts b/src/platform/endpoint/test/node/openaiCompatibleEndpoint.ts index a4c35d002..d711ce609 100644 --- a/src/platform/endpoint/test/node/openaiCompatibleEndpoint.ts +++ b/src/platform/endpoint/test/node/openaiCompatibleEndpoint.ts @@ -65,6 +65,7 @@ export type IModelConfig = { max_tokens?: number | null; max_completion_tokens?: number | null; intent?: boolean | null; + reasoning_effort?: string | null; }; } @@ -194,6 +195,12 @@ export class OpenAICompatibleTestEndpoint extends ChatEndpoint { } else if (this.modelConfig.overrides.max_tokens) { body.max_tokens = this.modelConfig.overrides.max_tokens; } + + if (this.modelConfig.overrides.reasoning_effort === null) { + delete body.reasoning_effort; + } else if (this.modelConfig.overrides.reasoning_effort) { + body.reasoning_effort = this.modelConfig.overrides.reasoning_effort; + } } diff --git a/src/platform/networking/common/networking.ts b/src/platform/networking/common/networking.ts index b431493e5..4513eadb1 100644 --- a/src/platform/networking/common/networking.ts +++ b/src/platform/networking/common/networking.ts @@ -91,6 +91,7 @@ export interface IEndpointBody { /** Code search: */ scoping_query?: string; include_embeddings?: boolean; + reasoning_effort?: string; } export interface IEndpoint { diff --git a/test/simulationMain.ts b/test/simulationMain.ts index c78439cdd..2853dc79b 100644 --- a/test/simulationMain.ts +++ b/test/simulationMain.ts @@ -836,7 +836,8 @@ function parseModelConfigFile(modelConfigFilePath: string): IModelConfig[] { "snippy"?: | null, // optional, if null removes from request body "max_tokens"?: | null, // optional, if null removes from request body "max_completion_tokens"?: | null, // optional, if null removes from request body - "intent"?: | null // optional, if null removes from request body + "intent"?: | null, // optional, if null removes from request body + "reasoning_effort"?: | null // optional, if null removes from request body } }, ... @@ -909,6 +910,7 @@ function parseModelConfigFile(modelConfigFilePath: string): IModelConfig[] { checkProperty(overrides, 'intent', 'boolean', true, true); checkProperty(overrides, 'max_tokens', 'number', true, true); checkProperty(overrides, 'max_completion_tokens', 'number', true, true); + checkProperty(overrides, 'reasoning_effort', 'string', true, true); } modelConfigs.push({ @@ -945,6 +947,7 @@ function parseModelConfigFile(modelConfigFilePath: string): IModelConfig[] { intent: model.overrides?.hasOwnProperty('intent') ? model.overrides.intent : undefined, max_tokens: model.overrides?.hasOwnProperty('max_tokens') ? model.overrides.max_tokens : undefined, max_completion_tokens: model.overrides?.hasOwnProperty('max_completion_tokens') ? model.overrides.max_completion_tokens : undefined, + reasoning_effort: model.overrides?.hasOwnProperty('reasoning_effort') ? model.overrides.reasoning_effort : undefined, } }); }