Skip to content

Commit 039154e

Browse files
feat: add zai-org/GLM-4.5-Air model to Chutes AI provider (#6377)
Co-authored-by: Roo Code <[email protected]>
1 parent 2c2a784 commit 039154e

File tree

2 files changed

+34
-0
lines changed

2 files changed

+34
-0
lines changed

packages/types/src/providers/chutes.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ export type ChutesModelId =
2525
| "Qwen/Qwen3-8B"
2626
| "microsoft/MAI-DS-R1-FP8"
2727
| "tngtech/DeepSeek-R1T-Chimera"
28+
| "zai-org/GLM-4.5-Air"
2829

2930
export const chutesDefaultModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1-0528"
3031

@@ -236,4 +237,14 @@ export const chutesModels = {
236237
outputPrice: 0,
237238
description: "TNGTech DeepSeek R1T Chimera model.",
238239
},
240+
"zai-org/GLM-4.5-Air": {
241+
maxTokens: 32768,
242+
contextWindow: 151329,
243+
supportsImages: false,
244+
supportsPromptCache: false,
245+
inputPrice: 0,
246+
outputPrice: 0,
247+
description:
248+
"GLM-4.5-Air model with 151,329 token context window and 106B total parameters with 12B activated.",
249+
},
239250
} as const satisfies Record<string, ModelInfo>

src/api/providers/__tests__/chutes.spec.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,29 @@ describe("ChutesHandler", () => {
185185
)
186186
})
187187

188+
it("should return zai-org/GLM-4.5-Air model with correct configuration", () => {
189+
const testModelId: ChutesModelId = "zai-org/GLM-4.5-Air"
190+
const handlerWithModel = new ChutesHandler({
191+
apiModelId: testModelId,
192+
chutesApiKey: "test-chutes-api-key",
193+
})
194+
const model = handlerWithModel.getModel()
195+
expect(model.id).toBe(testModelId)
196+
expect(model.info).toEqual(
197+
expect.objectContaining({
198+
maxTokens: 32768,
199+
contextWindow: 151329,
200+
supportsImages: false,
201+
supportsPromptCache: false,
202+
inputPrice: 0,
203+
outputPrice: 0,
204+
description:
205+
"GLM-4.5-Air model with 151,329 token context window and 106B total parameters with 12B activated.",
206+
temperature: 0.5, // Default temperature for non-DeepSeek models
207+
}),
208+
)
209+
})
210+
188211
it("completePrompt method should return text from Chutes API", async () => {
189212
const expectedResponse = "This is a test response from Chutes"
190213
mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] })

0 commit comments

Comments
 (0)