@@ -186,16 +186,22 @@ def __init__(
186
186
scheduler = scheduler ,
187
187
)
188
188
189
- self .vae_spatial_compression_ratio = self .vae .spatial_compression_ratio if hasattr (self , "vae" ) else 32
190
- self .vae_temporal_compression_ratio = self .vae .temporal_compression_ratio if hasattr (self , "vae" ) else 8
191
- self .transformer_spatial_patch_size = self .transformer .config .patch_size if hasattr (self , "transformer" ) else 1
189
+ self .vae_spatial_compression_ratio = (
190
+ self .vae .spatial_compression_ratio if getattr (self , "vae" , None ) is not None else 32
191
+ )
192
+ self .vae_temporal_compression_ratio = (
193
+ self .vae .temporal_compression_ratio if getattr (self , "vae" , None ) is not None else 8
194
+ )
195
+ self .transformer_spatial_patch_size = (
196
+ self .transformer .config .patch_size if getattr (self , "transformer" , None ) is not None else 1
197
+ )
192
198
self .transformer_temporal_patch_size = (
193
- self .transformer .config .patch_size_t if hasattr (self , "transformer" ) else 1
199
+ self .transformer .config .patch_size_t if getattr (self , "transformer" ) is not None else 1
194
200
)
195
201
196
202
self .video_processor = VideoProcessor (vae_scale_factor = self .vae_spatial_compression_ratio )
197
203
self .tokenizer_max_length = (
198
- self .tokenizer .model_max_length if hasattr (self , "tokenizer" ) and self . tokenizer is not None else 128
204
+ self .tokenizer .model_max_length if getattr (self , "tokenizer" , None ) is not None else 128
199
205
)
200
206
201
207
def _get_t5_prompt_embeds (
0 commit comments