Skip to content

Commit 9e98d1a

Browse files
feat: add GLM-4.6 model support for z.ai provider (#8408)
Co-authored-by: Roo Code <[email protected]>
1 parent 702b269 commit 9e98d1a

File tree

2 files changed

+76
-0
lines changed

2 files changed

+76
-0
lines changed

packages/types/src/providers/zai.ts

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,18 @@ export const internationalZAiModels = {
3232
description:
3333
"GLM-4.5-Air is the lightweight version of GLM-4.5. It balances performance and cost-effectiveness, and can flexibly switch to hybrid thinking models.",
3434
},
35+
"glm-4.6": {
36+
maxTokens: 98_304,
37+
contextWindow: 204_800,
38+
supportsImages: false,
39+
supportsPromptCache: true,
40+
inputPrice: 0.6,
41+
outputPrice: 2.2,
42+
cacheWritesPrice: 0,
43+
cacheReadsPrice: 0.11,
44+
description:
45+
"GLM-4.6 is Zhipu's newest model with an extended context window of up to 200k tokens, providing enhanced capabilities for processing longer documents and conversations.",
46+
},
3547
} as const satisfies Record<string, ModelInfo>
3648

3749
export type MainlandZAiModelId = keyof typeof mainlandZAiModels
@@ -101,6 +113,44 @@ export const mainlandZAiModels = {
101113
},
102114
],
103115
},
116+
"glm-4.6": {
117+
maxTokens: 98_304,
118+
contextWindow: 204_800,
119+
supportsImages: false,
120+
supportsPromptCache: true,
121+
inputPrice: 0.29,
122+
outputPrice: 1.14,
123+
cacheWritesPrice: 0,
124+
cacheReadsPrice: 0.057,
125+
description:
126+
"GLM-4.6 is Zhipu's newest model with an extended context window of up to 200k tokens, providing enhanced capabilities for processing longer documents and conversations.",
127+
tiers: [
128+
{
129+
contextWindow: 32_000,
130+
inputPrice: 0.21,
131+
outputPrice: 1.0,
132+
cacheReadsPrice: 0.043,
133+
},
134+
{
135+
contextWindow: 128_000,
136+
inputPrice: 0.29,
137+
outputPrice: 1.14,
138+
cacheReadsPrice: 0.057,
139+
},
140+
{
141+
contextWindow: 200_000,
142+
inputPrice: 0.29,
143+
outputPrice: 1.14,
144+
cacheReadsPrice: 0.057,
145+
},
146+
{
147+
contextWindow: Infinity,
148+
inputPrice: 0.29,
149+
outputPrice: 1.14,
150+
cacheReadsPrice: 0.057,
151+
},
152+
],
153+
},
104154
} as const satisfies Record<string, ModelInfo>
105155

106156
export const ZAI_DEFAULT_TEMPERATURE = 0

src/api/providers/__tests__/zai.spec.ts

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,19 @@ describe("ZAiHandler", () => {
7171
expect(model.id).toBe(testModelId)
7272
expect(model.info).toEqual(internationalZAiModels[testModelId])
7373
})
74+
75+
it("should return GLM-4.6 international model with correct configuration", () => {
76+
const testModelId: InternationalZAiModelId = "glm-4.6"
77+
const handlerWithModel = new ZAiHandler({
78+
apiModelId: testModelId,
79+
zaiApiKey: "test-zai-api-key",
80+
zaiApiLine: "international",
81+
})
82+
const model = handlerWithModel.getModel()
83+
expect(model.id).toBe(testModelId)
84+
expect(model.info).toEqual(internationalZAiModels[testModelId])
85+
expect(model.info.contextWindow).toBe(204_800)
86+
})
7487
})
7588

7689
describe("China Z AI", () => {
@@ -108,6 +121,19 @@ describe("ZAiHandler", () => {
108121
expect(model.id).toBe(testModelId)
109122
expect(model.info).toEqual(mainlandZAiModels[testModelId])
110123
})
124+
125+
it("should return GLM-4.6 China model with correct configuration", () => {
126+
const testModelId: MainlandZAiModelId = "glm-4.6"
127+
const handlerWithModel = new ZAiHandler({
128+
apiModelId: testModelId,
129+
zaiApiKey: "test-zai-api-key",
130+
zaiApiLine: "china",
131+
})
132+
const model = handlerWithModel.getModel()
133+
expect(model.id).toBe(testModelId)
134+
expect(model.info).toEqual(mainlandZAiModels[testModelId])
135+
expect(model.info.contextWindow).toBe(204_800)
136+
})
111137
})
112138

113139
describe("Default behavior", () => {

0 commit comments

Comments
 (0)