From a578952d01e42e471a0406c2756207b2f9adb9cd Mon Sep 17 00:00:00 2001 From: Roo Code Date: Tue, 12 Aug 2025 06:51:22 +0000 Subject: [PATCH] feat: add openai/gpt-oss-120b model to Chutes provider - Added openai/gpt-oss-120b to ChutesModelId type definition - Added model configuration with 128k context window and 32k max tokens - Added test coverage for the new model - Fixes #6973 --- packages/types/src/providers/chutes.ts | 10 ++++++++++ src/api/providers/__tests__/chutes.spec.ts | 22 ++++++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/packages/types/src/providers/chutes.ts b/packages/types/src/providers/chutes.ts index 539d251f2a1..ff469036882 100644 --- a/packages/types/src/providers/chutes.ts +++ b/packages/types/src/providers/chutes.ts @@ -29,6 +29,7 @@ export type ChutesModelId = | "zai-org/GLM-4.5-Air" | "zai-org/GLM-4.5-FP8" | "moonshotai/Kimi-K2-Instruct-75k" + | "openai/gpt-oss-120b" export const chutesDefaultModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1-0528" @@ -278,4 +279,13 @@ export const chutesModels = { outputPrice: 0.5926, description: "Moonshot AI Kimi K2 Instruct model with 75k context window.", }, + "openai/gpt-oss-120b": { + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "OpenAI GPT OSS 120B model - latest open source coding model.", + }, } as const satisfies Record diff --git a/src/api/providers/__tests__/chutes.spec.ts b/src/api/providers/__tests__/chutes.spec.ts index 0596a911df6..c52d95a1289 100644 --- a/src/api/providers/__tests__/chutes.spec.ts +++ b/src/api/providers/__tests__/chutes.spec.ts @@ -275,6 +275,28 @@ describe("ChutesHandler", () => { ) }) + it("should return openai/gpt-oss-120b model with correct configuration", () => { + const testModelId: ChutesModelId = "openai/gpt-oss-120b" + const handlerWithModel = new ChutesHandler({ + apiModelId: testModelId, + chutesApiKey: "test-chutes-api-key", + }) + const model = handlerWithModel.getModel() + expect(model.id).toBe(testModelId) + expect(model.info).toEqual( + expect.objectContaining({ + maxTokens: 32768, + contextWindow: 131072, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 0, + outputPrice: 0, + description: "OpenAI GPT OSS 120B model - latest open source coding model.", + temperature: 0.5, // Default temperature for non-DeepSeek models + }), + ) + }) + it("completePrompt method should return text from Chutes API", async () => { const expectedResponse = "This is a test response from Chutes" mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] })