Skip to content

Commit 09e777a

Browse files
authored
[tests] Single scheduler in lora tests (huggingface#12315)
* single scheduler please. * up * up * up
1 parent a72bc0c commit 09e777a

14 files changed

+1044
-1143
lines changed

tests/lora/test_lora_layers_auraflow.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@
4343
class AuraFlowLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
4444
pipeline_class = AuraFlowPipeline
4545
scheduler_cls = FlowMatchEulerDiscreteScheduler
46-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
4746
scheduler_kwargs = {}
4847

4948
transformer_kwargs = {

tests/lora/test_lora_layers_cogvideox.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121

2222
from diffusers import (
2323
AutoencoderKLCogVideoX,
24-
CogVideoXDDIMScheduler,
2524
CogVideoXDPMScheduler,
2625
CogVideoXPipeline,
2726
CogVideoXTransformer3DModel,
@@ -44,7 +43,6 @@ class CogVideoXLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
4443
pipeline_class = CogVideoXPipeline
4544
scheduler_cls = CogVideoXDPMScheduler
4645
scheduler_kwargs = {"timestep_spacing": "trailing"}
47-
scheduler_classes = [CogVideoXDDIMScheduler, CogVideoXDPMScheduler]
4846

4947
transformer_kwargs = {
5048
"num_attention_heads": 4,

tests/lora/test_lora_layers_cogview4.py

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,6 @@ def from_pretrained(*args, **kwargs):
5050
class CogView4LoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
5151
pipeline_class = CogView4Pipeline
5252
scheduler_cls = FlowMatchEulerDiscreteScheduler
53-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
5453
scheduler_kwargs = {}
5554

5655
transformer_kwargs = {
@@ -124,30 +123,29 @@ def test_simple_inference_save_pretrained(self):
124123
"""
125124
Tests a simple usecase where users could use saving utilities for LoRA through save_pretrained
126125
"""
127-
for scheduler_cls in self.scheduler_classes:
128-
components, _, _ = self.get_dummy_components(scheduler_cls)
129-
pipe = self.pipeline_class(**components)
130-
pipe = pipe.to(torch_device)
131-
pipe.set_progress_bar_config(disable=None)
132-
_, _, inputs = self.get_dummy_inputs(with_generator=False)
126+
components, _, _ = self.get_dummy_components()
127+
pipe = self.pipeline_class(**components)
128+
pipe = pipe.to(torch_device)
129+
pipe.set_progress_bar_config(disable=None)
130+
_, _, inputs = self.get_dummy_inputs(with_generator=False)
133131

134-
output_no_lora = pipe(**inputs, generator=torch.manual_seed(0))[0]
135-
self.assertTrue(output_no_lora.shape == self.output_shape)
132+
output_no_lora = pipe(**inputs, generator=torch.manual_seed(0))[0]
133+
self.assertTrue(output_no_lora.shape == self.output_shape)
136134

137-
images_lora = pipe(**inputs, generator=torch.manual_seed(0))[0]
135+
images_lora = pipe(**inputs, generator=torch.manual_seed(0))[0]
138136

139-
with tempfile.TemporaryDirectory() as tmpdirname:
140-
pipe.save_pretrained(tmpdirname)
137+
with tempfile.TemporaryDirectory() as tmpdirname:
138+
pipe.save_pretrained(tmpdirname)
141139

142-
pipe_from_pretrained = self.pipeline_class.from_pretrained(tmpdirname)
143-
pipe_from_pretrained.to(torch_device)
140+
pipe_from_pretrained = self.pipeline_class.from_pretrained(tmpdirname)
141+
pipe_from_pretrained.to(torch_device)
144142

145-
images_lora_save_pretrained = pipe_from_pretrained(**inputs, generator=torch.manual_seed(0))[0]
143+
images_lora_save_pretrained = pipe_from_pretrained(**inputs, generator=torch.manual_seed(0))[0]
146144

147-
self.assertTrue(
148-
np.allclose(images_lora, images_lora_save_pretrained, atol=1e-3, rtol=1e-3),
149-
"Loading from saved checkpoints should give same results.",
150-
)
145+
self.assertTrue(
146+
np.allclose(images_lora, images_lora_save_pretrained, atol=1e-3, rtol=1e-3),
147+
"Loading from saved checkpoints should give same results.",
148+
)
151149

152150
@parameterized.expand([("block_level", True), ("leaf_level", False)])
153151
@require_torch_accelerator

tests/lora/test_lora_layers_flux.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -55,9 +55,8 @@
5555
@require_peft_backend
5656
class FluxLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
5757
pipeline_class = FluxPipeline
58-
scheduler_cls = FlowMatchEulerDiscreteScheduler()
58+
scheduler_cls = FlowMatchEulerDiscreteScheduler
5959
scheduler_kwargs = {}
60-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
6160
transformer_kwargs = {
6261
"patch_size": 1,
6362
"in_channels": 4,
@@ -282,9 +281,8 @@ def test_simple_inference_with_text_denoiser_multi_adapter_block_lora(self):
282281

283282
class FluxControlLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
284283
pipeline_class = FluxControlPipeline
285-
scheduler_cls = FlowMatchEulerDiscreteScheduler()
284+
scheduler_cls = FlowMatchEulerDiscreteScheduler
286285
scheduler_kwargs = {}
287-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
288286
transformer_kwargs = {
289287
"patch_size": 1,
290288
"in_channels": 8,

tests/lora/test_lora_layers_hunyuanvideo.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,6 @@
5151
class HunyuanVideoLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
5252
pipeline_class = HunyuanVideoPipeline
5353
scheduler_cls = FlowMatchEulerDiscreteScheduler
54-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
5554
scheduler_kwargs = {}
5655

5756
transformer_kwargs = {

tests/lora/test_lora_layers_ltx_video.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
class LTXVideoLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
3838
pipeline_class = LTXPipeline
3939
scheduler_cls = FlowMatchEulerDiscreteScheduler
40-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
4140
scheduler_kwargs = {}
4241

4342
transformer_kwargs = {

tests/lora/test_lora_layers_lumina2.py

Lines changed: 27 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@
3939
class Lumina2LoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
4040
pipeline_class = Lumina2Pipeline
4141
scheduler_cls = FlowMatchEulerDiscreteScheduler
42-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
4342
scheduler_kwargs = {}
4443

4544
transformer_kwargs = {
@@ -141,33 +140,30 @@ def test_simple_inference_with_text_lora_save_load(self):
141140
strict=False,
142141
)
143142
def test_lora_fuse_nan(self):
144-
for scheduler_cls in self.scheduler_classes:
145-
components, text_lora_config, denoiser_lora_config = self.get_dummy_components(scheduler_cls)
146-
pipe = self.pipeline_class(**components)
147-
pipe = pipe.to(torch_device)
148-
pipe.set_progress_bar_config(disable=None)
149-
_, _, inputs = self.get_dummy_inputs(with_generator=False)
150-
151-
if "text_encoder" in self.pipeline_class._lora_loadable_modules:
152-
pipe.text_encoder.add_adapter(text_lora_config, "adapter-1")
153-
self.assertTrue(
154-
check_if_lora_correctly_set(pipe.text_encoder), "Lora not correctly set in text encoder"
155-
)
156-
157-
denoiser = pipe.transformer if self.unet_kwargs is None else pipe.unet
158-
denoiser.add_adapter(denoiser_lora_config, "adapter-1")
159-
self.assertTrue(check_if_lora_correctly_set(denoiser), "Lora not correctly set in denoiser.")
160-
161-
# corrupt one LoRA weight with `inf` values
162-
with torch.no_grad():
163-
pipe.transformer.layers[0].attn.to_q.lora_A["adapter-1"].weight += float("inf")
164-
165-
# with `safe_fusing=True` we should see an Error
166-
with self.assertRaises(ValueError):
167-
pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules, safe_fusing=True)
168-
169-
# without we should not see an error, but every image will be black
170-
pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules, safe_fusing=False)
171-
out = pipe(**inputs)[0]
172-
173-
self.assertTrue(np.isnan(out).all())
143+
components, text_lora_config, denoiser_lora_config = self.get_dummy_components()
144+
pipe = self.pipeline_class(**components)
145+
pipe = pipe.to(torch_device)
146+
pipe.set_progress_bar_config(disable=None)
147+
_, _, inputs = self.get_dummy_inputs(with_generator=False)
148+
149+
if "text_encoder" in self.pipeline_class._lora_loadable_modules:
150+
pipe.text_encoder.add_adapter(text_lora_config, "adapter-1")
151+
self.assertTrue(check_if_lora_correctly_set(pipe.text_encoder), "Lora not correctly set in text encoder")
152+
153+
denoiser = pipe.transformer if self.unet_kwargs is None else pipe.unet
154+
denoiser.add_adapter(denoiser_lora_config, "adapter-1")
155+
self.assertTrue(check_if_lora_correctly_set(denoiser), "Lora not correctly set in denoiser.")
156+
157+
# corrupt one LoRA weight with `inf` values
158+
with torch.no_grad():
159+
pipe.transformer.layers[0].attn.to_q.lora_A["adapter-1"].weight += float("inf")
160+
161+
# with `safe_fusing=True` we should see an Error
162+
with self.assertRaises(ValueError):
163+
pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules, safe_fusing=True)
164+
165+
# without we should not see an error, but every image will be black
166+
pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules, safe_fusing=False)
167+
out = pipe(**inputs)[0]
168+
169+
self.assertTrue(np.isnan(out).all())

tests/lora/test_lora_layers_mochi.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
class MochiLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
3838
pipeline_class = MochiPipeline
3939
scheduler_cls = FlowMatchEulerDiscreteScheduler
40-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
4140
scheduler_kwargs = {}
4241

4342
transformer_kwargs = {

tests/lora/test_lora_layers_qwenimage.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@
3737
class QwenImageLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
3838
pipeline_class = QwenImagePipeline
3939
scheduler_cls = FlowMatchEulerDiscreteScheduler
40-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
4140
scheduler_kwargs = {}
4241

4342
transformer_kwargs = {

tests/lora/test_lora_layers_sana.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,8 @@
3131
@require_peft_backend
3232
class SanaLoRATests(unittest.TestCase, PeftLoraLoaderMixinTests):
3333
pipeline_class = SanaPipeline
34-
scheduler_cls = FlowMatchEulerDiscreteScheduler(shift=7.0)
35-
scheduler_kwargs = {}
36-
scheduler_classes = [FlowMatchEulerDiscreteScheduler]
34+
scheduler_cls = FlowMatchEulerDiscreteScheduler
35+
scheduler_kwargs = {"shift": 7.0}
3736
transformer_kwargs = {
3837
"patch_size": 1,
3938
"in_channels": 4,

0 commit comments

Comments
 (0)