@@ -80,6 +80,8 @@ def check_transformers_version(
80
80
self ,
81
81
* ,
82
82
on_fail : Literal ["error" , "skip" ],
83
+ check_min_version : bool = True ,
84
+ check_max_version : bool = True ,
83
85
) -> None :
84
86
"""
85
87
If the installed transformers version does not meet the requirements,
@@ -96,9 +98,11 @@ def check_transformers_version(
96
98
msg = f"`transformers=={ current_version } ` installed, but `transformers"
97
99
# Only check the base version for the min/max version, otherwise preview
98
100
# models cannot be run because `x.yy.0.dev0`<`x.yy.0`
99
- if min_version and Version (cur_base_version ) < Version (min_version ):
101
+ if (check_min_version and min_version
102
+ and Version (cur_base_version ) < Version (min_version )):
100
103
msg += f">={ min_version } ` is required to run this model."
101
- elif max_version and Version (cur_base_version ) > Version (max_version ):
104
+ elif (check_max_version and max_version
105
+ and Version (cur_base_version ) > Version (max_version )):
102
106
msg += f"<={ max_version } ` is required to run this model."
103
107
else :
104
108
return
@@ -185,6 +189,8 @@ def check_available_online(
185
189
min_transformers_version = "4.53" ),
186
190
"GlmForCausalLM" : _HfExamplesInfo ("THUDM/glm-4-9b-chat-hf" ),
187
191
"Glm4ForCausalLM" : _HfExamplesInfo ("THUDM/GLM-4-9B-0414" ),
192
+ "Glm4MoeForCausalLM" : _HfExamplesInfo ("zai-org/GLM-4.5" ,
193
+ min_transformers_version = "4.54" ), # noqa: E501
188
194
"GPT2LMHeadModel" : _HfExamplesInfo ("openai-community/gpt2" ,
189
195
{"alias" : "gpt2" }),
190
196
"GPTBigCodeForCausalLM" : _HfExamplesInfo ("bigcode/starcoder" ,
@@ -378,8 +384,6 @@ def check_available_online(
378
384
trust_remote_code = True ,
379
385
hf_overrides = {"architectures" : ["GLM4VForCausalLM" ]}), # noqa: E501
380
386
"Glm4vForConditionalGeneration" : _HfExamplesInfo ("THUDM/GLM-4.1V-9B-Thinking" ), # noqa: E501
381
- "Glm4MoeForCausalLM" : _HfExamplesInfo ("zai-org/GLM-4.5" ,
382
- min_transformers_version = "4.54" ), # noqa: E501
383
387
"Glm4v_moeForConditionalGeneration" : _HfExamplesInfo ("zai-org/GLM-4.5V-Air" ,
384
388
is_available_online = False ), # noqa: E501
385
389
"H2OVLChatModel" : _HfExamplesInfo ("h2oai/h2ovl-mississippi-800m" ,
0 commit comments