Skip to content

Commit 771d036

Browse files
roomote[bot]roomote
authored andcommitted
feat: add zai-org/GLM-4.5-FP8 model to Chutes AI provider (RooCodeInc#6441)
Co-authored-by: Roo Code <[email protected]>
1 parent b63e51d commit 771d036

File tree

2 files changed

+34
-0
lines changed

2 files changed

+34
-0
lines changed

packages/types/src/providers/chutes.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ export type ChutesModelId =
2626
| "microsoft/MAI-DS-R1-FP8"
2727
| "tngtech/DeepSeek-R1T-Chimera"
2828
| "zai-org/GLM-4.5-Air"
29+
| "zai-org/GLM-4.5-FP8"
2930

3031
export const chutesDefaultModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1-0528"
3132

@@ -247,4 +248,14 @@ export const chutesModels = {
247248
description:
248249
"GLM-4.5-Air model with 151,329 token context window and 106B total parameters with 12B activated.",
249250
},
251+
"zai-org/GLM-4.5-FP8": {
252+
maxTokens: 32768,
253+
contextWindow: 131072,
254+
supportsImages: false,
255+
supportsPromptCache: false,
256+
inputPrice: 0,
257+
outputPrice: 0,
258+
description:
259+
"GLM-4.5-FP8 model with 128k token context window, optimized for agent-based applications with MoE architecture.",
260+
},
250261
} as const satisfies Record<string, ModelInfo>

src/api/providers/__tests__/chutes.spec.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,29 @@ describe("ChutesHandler", () => {
208208
)
209209
})
210210

211+
it("should return zai-org/GLM-4.5-FP8 model with correct configuration", () => {
212+
const testModelId: ChutesModelId = "zai-org/GLM-4.5-FP8"
213+
const handlerWithModel = new ChutesHandler({
214+
apiModelId: testModelId,
215+
chutesApiKey: "test-chutes-api-key",
216+
})
217+
const model = handlerWithModel.getModel()
218+
expect(model.id).toBe(testModelId)
219+
expect(model.info).toEqual(
220+
expect.objectContaining({
221+
maxTokens: 32768,
222+
contextWindow: 131072,
223+
supportsImages: false,
224+
supportsPromptCache: false,
225+
inputPrice: 0,
226+
outputPrice: 0,
227+
description:
228+
"GLM-4.5-FP8 model with 128k token context window, optimized for agent-based applications with MoE architecture.",
229+
temperature: 0.5, // Default temperature for non-DeepSeek models
230+
}),
231+
)
232+
})
233+
211234
it("completePrompt method should return text from Chutes API", async () => {
212235
const expectedResponse = "This is a test response from Chutes"
213236
mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] })

0 commit comments

Comments
 (0)