File tree Expand file tree Collapse file tree 2 files changed +32
-0
lines changed
packages/types/src/providers
src/api/providers/__tests__ Expand file tree Collapse file tree 2 files changed +32
-0
lines changed Original file line number Diff line number Diff line change @@ -29,6 +29,7 @@ export type ChutesModelId =
2929 | "tngtech/DeepSeek-R1T-Chimera"
3030 | "zai-org/GLM-4.5-Air"
3131 | "zai-org/GLM-4.5-FP8"
32+ | "zai-org/GLM-4.5-turbo"
3233 | "moonshotai/Kimi-K2-Instruct-75k"
3334 | "moonshotai/Kimi-K2-Instruct-0905"
3435 | "Qwen/Qwen3-235B-A22B-Thinking-2507"
@@ -274,6 +275,15 @@ export const chutesModels = {
274275 description :
275276 "GLM-4.5-FP8 model with 128k token context window, optimized for agent-based applications with MoE architecture." ,
276277 } ,
278+ "zai-org/GLM-4.5-turbo" : {
279+ maxTokens : 32768 ,
280+ contextWindow : 131072 ,
281+ supportsImages : false ,
282+ supportsPromptCache : false ,
283+ inputPrice : 1 ,
284+ outputPrice : 3 ,
285+ description : "GLM-4.5-turbo model with 128K token context window, optimized for fast inference." ,
286+ } ,
277287 "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8" : {
278288 maxTokens : 32768 ,
279289 contextWindow : 262144 ,
Original file line number Diff line number Diff line change @@ -253,6 +253,28 @@ describe("ChutesHandler", () => {
253253 )
254254 } )
255255
256+ it ( "should return zai-org/GLM-4.5-turbo model with correct configuration" , ( ) => {
257+ const testModelId : ChutesModelId = "zai-org/GLM-4.5-turbo"
258+ const handlerWithModel = new ChutesHandler ( {
259+ apiModelId : testModelId ,
260+ chutesApiKey : "test-chutes-api-key" ,
261+ } )
262+ const model = handlerWithModel . getModel ( )
263+ expect ( model . id ) . toBe ( testModelId )
264+ expect ( model . info ) . toEqual (
265+ expect . objectContaining ( {
266+ maxTokens : 32768 ,
267+ contextWindow : 131072 ,
268+ supportsImages : false ,
269+ supportsPromptCache : false ,
270+ inputPrice : 1 ,
271+ outputPrice : 3 ,
272+ description : "GLM-4.5-turbo model with 128K token context window, optimized for fast inference." ,
273+ temperature : 0.5 , // Default temperature for non-DeepSeek models
274+ } ) ,
275+ )
276+ } )
277+
256278 it ( "should return Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8 model with correct configuration" , ( ) => {
257279 const testModelId : ChutesModelId = "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8"
258280 const handlerWithModel = new ChutesHandler ( {
You can’t perform that action at this time.
0 commit comments