@@ -50,7 +50,6 @@ def from_pretrained(*args, **kwargs):
5050class CogView4LoRATests (unittest .TestCase , PeftLoraLoaderMixinTests ):
5151 pipeline_class = CogView4Pipeline
5252 scheduler_cls = FlowMatchEulerDiscreteScheduler
53- scheduler_classes = [FlowMatchEulerDiscreteScheduler ]
5453 scheduler_kwargs = {}
5554
5655 transformer_kwargs = {
@@ -124,30 +123,29 @@ def test_simple_inference_save_pretrained(self):
124123 """
125124 Tests a simple usecase where users could use saving utilities for LoRA through save_pretrained
126125 """
127- for scheduler_cls in self .scheduler_classes :
128- components , _ , _ = self .get_dummy_components (scheduler_cls )
129- pipe = self .pipeline_class (** components )
130- pipe = pipe .to (torch_device )
131- pipe .set_progress_bar_config (disable = None )
132- _ , _ , inputs = self .get_dummy_inputs (with_generator = False )
126+ components , _ , _ = self .get_dummy_components ()
127+ pipe = self .pipeline_class (** components )
128+ pipe = pipe .to (torch_device )
129+ pipe .set_progress_bar_config (disable = None )
130+ _ , _ , inputs = self .get_dummy_inputs (with_generator = False )
133131
134- output_no_lora = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
135- self .assertTrue (output_no_lora .shape == self .output_shape )
132+ output_no_lora = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
133+ self .assertTrue (output_no_lora .shape == self .output_shape )
136134
137- images_lora = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
135+ images_lora = pipe (** inputs , generator = torch .manual_seed (0 ))[0 ]
138136
139- with tempfile .TemporaryDirectory () as tmpdirname :
140- pipe .save_pretrained (tmpdirname )
137+ with tempfile .TemporaryDirectory () as tmpdirname :
138+ pipe .save_pretrained (tmpdirname )
141139
142- pipe_from_pretrained = self .pipeline_class .from_pretrained (tmpdirname )
143- pipe_from_pretrained .to (torch_device )
140+ pipe_from_pretrained = self .pipeline_class .from_pretrained (tmpdirname )
141+ pipe_from_pretrained .to (torch_device )
144142
145- images_lora_save_pretrained = pipe_from_pretrained (** inputs , generator = torch .manual_seed (0 ))[0 ]
143+ images_lora_save_pretrained = pipe_from_pretrained (** inputs , generator = torch .manual_seed (0 ))[0 ]
146144
147- self .assertTrue (
148- np .allclose (images_lora , images_lora_save_pretrained , atol = 1e-3 , rtol = 1e-3 ),
149- "Loading from saved checkpoints should give same results." ,
150- )
145+ self .assertTrue (
146+ np .allclose (images_lora , images_lora_save_pretrained , atol = 1e-3 , rtol = 1e-3 ),
147+ "Loading from saved checkpoints should give same results." ,
148+ )
151149
152150 @parameterized .expand ([("block_level" , True ), ("leaf_level" , False )])
153151 @require_torch_accelerator
0 commit comments