Skip to content

Commit a578952

Browse files
committed
feat: add openai/gpt-oss-120b model to Chutes provider
- Added openai/gpt-oss-120b to ChutesModelId type definition - Added model configuration with 128k context window and 32k max tokens - Added test coverage for the new model - Fixes #6973
1 parent 5f3c67f commit a578952

File tree

2 files changed

+32
-0
lines changed

2 files changed

+32
-0
lines changed

packages/types/src/providers/chutes.ts

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ export type ChutesModelId =
2929
| "zai-org/GLM-4.5-Air"
3030
| "zai-org/GLM-4.5-FP8"
3131
| "moonshotai/Kimi-K2-Instruct-75k"
32+
| "openai/gpt-oss-120b"
3233

3334
export const chutesDefaultModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1-0528"
3435

@@ -278,4 +279,13 @@ export const chutesModels = {
278279
outputPrice: 0.5926,
279280
description: "Moonshot AI Kimi K2 Instruct model with 75k context window.",
280281
},
282+
"openai/gpt-oss-120b": {
283+
maxTokens: 32768,
284+
contextWindow: 131072,
285+
supportsImages: false,
286+
supportsPromptCache: false,
287+
inputPrice: 0,
288+
outputPrice: 0,
289+
description: "OpenAI GPT OSS 120B model - latest open source coding model.",
290+
},
281291
} as const satisfies Record<string, ModelInfo>

src/api/providers/__tests__/chutes.spec.ts

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,28 @@ describe("ChutesHandler", () => {
275275
)
276276
})
277277

278+
it("should return openai/gpt-oss-120b model with correct configuration", () => {
279+
const testModelId: ChutesModelId = "openai/gpt-oss-120b"
280+
const handlerWithModel = new ChutesHandler({
281+
apiModelId: testModelId,
282+
chutesApiKey: "test-chutes-api-key",
283+
})
284+
const model = handlerWithModel.getModel()
285+
expect(model.id).toBe(testModelId)
286+
expect(model.info).toEqual(
287+
expect.objectContaining({
288+
maxTokens: 32768,
289+
contextWindow: 131072,
290+
supportsImages: false,
291+
supportsPromptCache: false,
292+
inputPrice: 0,
293+
outputPrice: 0,
294+
description: "OpenAI GPT OSS 120B model - latest open source coding model.",
295+
temperature: 0.5, // Default temperature for non-DeepSeek models
296+
}),
297+
)
298+
})
299+
278300
it("completePrompt method should return text from Chutes API", async () => {
279301
const expectedResponse = "This is a test response from Chutes"
280302
mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] })

0 commit comments

Comments
 (0)