Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 132 additions & 0 deletions src/api/transform/__tests__/model-params.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -885,4 +885,136 @@ describe("getModelParams", () => {
expect(result.reasoningBudget).toBe(8192) // Default thinking tokens
})
})

describe("GPT-5 temperature hardcoding", () => {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great test coverage! Consider adding a test case for potential future GPT-5 variants (e.g., "gpt-5-turbo") to ensure the startsWith("gpt-5") logic handles all GPT-5 family models correctly.

Also, for consistency, the test descriptions could follow a uniform pattern like "hardcodes temperature to 1 for..." and "preserves user temperature for..."

it("should hardcode temperature to 1 for gpt-5 models", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-5-2025-08-07",
format: "openai" as const,
settings: { modelTemperature: 0.5 }, // User setting should be overridden
model,
})

expect(result.temperature).toBe(1)
})

it("should hardcode temperature to 1 for gpt-5-mini models", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-5-mini-2025-08-07",
format: "openai" as const,
settings: { modelTemperature: 0.7 }, // User setting should be overridden
model,
})

expect(result.temperature).toBe(1)
})

it("should hardcode temperature to 1 for gpt-5-nano models", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-5-nano-2025-08-07",
format: "openai" as const,
settings: { modelTemperature: 0.3 }, // User setting should be overridden
model,
})

expect(result.temperature).toBe(1)
})

it("should hardcode temperature to 1 even when no temperature is specified for gpt-5", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-5-2025-08-07",
format: "openai" as const,
settings: {}, // No temperature specified
model,
defaultTemperature: 0.2, // Default should also be overridden
})

expect(result.temperature).toBe(1)
})

it("should not hardcode temperature for non-gpt-5 openai models", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-4-turbo",
format: "openai" as const,
settings: { modelTemperature: 0.5 },
model,
})

expect(result.temperature).toBe(0.5)
})

it("should not hardcode temperature for gpt-5 models in non-openai format", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-5-2025-08-07",
format: "openrouter" as const,
settings: { modelTemperature: 0.5 },
model,
})

expect(result.temperature).toBe(0.5) // Should not be hardcoded for openrouter
})

it("should hardcode temperature for gpt-5 with reasoning effort", () => {
const model: ModelInfo = {
...baseModel,
supportsReasoningEffort: true,
}

const result = getModelParams({
modelId: "gpt-5-2025-08-07",
format: "openai" as const,
settings: {
modelTemperature: 0.5,
reasoningEffort: "high",
},
model,
})

expect(result.temperature).toBe(1)
expect(result.reasoningEffort).toBe("high")
})

it("should hardcode temperature for gpt-5 with verbosity settings", () => {
const model: ModelInfo = {
...baseModel,
}

const result = getModelParams({
modelId: "gpt-5-2025-08-07",
format: "openai" as const,
settings: {
modelTemperature: 0.5,
verbosity: "high",
},
model,
})

expect(result.temperature).toBe(1)
expect(result.verbosity).toBe("high")
})
})
})
5 changes: 5 additions & 0 deletions src/api/transform/model-params.ts
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,11 @@ export function getModelParams({
params.temperature = undefined
}

// Hardcode temperature to 1 for GPT-5 models
if (modelId.startsWith("gpt-5")) {
params.temperature = 1
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is the placement after the o1/o3-mini handling intentional? It might be cleaner to group all OpenAI model-specific temperature handling together, perhaps in a separate helper function for better maintainability.

Also, could we add a comment explaining why temperature is hardcoded to 1 for GPT-5 models, similar to lines 143-144 for o1/o3-mini?

}

return {
format,
...params,
Expand Down
Loading