@@ -186,16 +186,22 @@ def __init__(
186186 scheduler = scheduler ,
187187 )
188188
189- self .vae_spatial_compression_ratio = self .vae .spatial_compression_ratio if hasattr (self , "vae" ) else 32
190- self .vae_temporal_compression_ratio = self .vae .temporal_compression_ratio if hasattr (self , "vae" ) else 8
191- self .transformer_spatial_patch_size = self .transformer .config .patch_size if hasattr (self , "transformer" ) else 1
189+ self .vae_spatial_compression_ratio = (
190+ self .vae .spatial_compression_ratio if getattr (self , "vae" , None ) is not None else 32
191+ )
192+ self .vae_temporal_compression_ratio = (
193+ self .vae .temporal_compression_ratio if getattr (self , "vae" , None ) is not None else 8
194+ )
195+ self .transformer_spatial_patch_size = (
196+ self .transformer .config .patch_size if getattr (self , "transformer" , None ) is not None else 1
197+ )
192198 self .transformer_temporal_patch_size = (
193- self .transformer .config .patch_size_t if hasattr (self , "transformer" ) else 1
199+ self .transformer .config .patch_size_t if getattr (self , "transformer" ) is not None else 1
194200 )
195201
196202 self .video_processor = VideoProcessor (vae_scale_factor = self .vae_spatial_compression_ratio )
197203 self .tokenizer_max_length = (
198- self .tokenizer .model_max_length if hasattr (self , "tokenizer" ) and self . tokenizer is not None else 128
204+ self .tokenizer .model_max_length if getattr (self , "tokenizer" , None ) is not None else 128
199205 )
200206
201207 def _get_t5_prompt_embeds (
0 commit comments