Skip to content

Commit 36acdd7

Browse files
authored
[Tests] skip tests properly with unittest.skip() (huggingface#10527)
* skip tests properly. * more * more
1 parent e7db062 commit 36acdd7

15 files changed

+36
-3
lines changed

tests/models/autoencoders/test_models_vq.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,11 @@ def prepare_init_args_and_inputs_for_common(self):
6565
inputs_dict = self.dummy_input
6666
return init_dict, inputs_dict
6767

68+
@unittest.skip("Test not supported.")
6869
def test_forward_signature(self):
6970
pass
7071

72+
@unittest.skip("Test not supported.")
7173
def test_training(self):
7274
pass
7375

tests/models/unets/test_models_unet_1d.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,11 @@ def input_shape(self):
5151
def output_shape(self):
5252
return (4, 14, 16)
5353

54+
@unittest.skip("Test not supported.")
5455
def test_ema_training(self):
5556
pass
5657

58+
@unittest.skip("Test not supported.")
5759
def test_training(self):
5860
pass
5961

@@ -126,6 +128,7 @@ def test_output_pretrained(self):
126128
# fmt: on
127129
self.assertTrue(torch.allclose(output_slice, expected_output_slice, rtol=1e-3))
128130

131+
@unittest.skip("Test not supported.")
129132
def test_forward_with_norm_groups(self):
130133
# Not implemented yet for this UNet
131134
pass
@@ -205,9 +208,11 @@ def test_output(self):
205208
expected_shape = torch.Size((inputs_dict["sample"].shape[0], 1))
206209
self.assertEqual(output.shape, expected_shape, "Input and output shapes do not match")
207210

211+
@unittest.skip("Test not supported.")
208212
def test_ema_training(self):
209213
pass
210214

215+
@unittest.skip("Test not supported.")
211216
def test_training(self):
212217
pass
213218

@@ -265,6 +270,7 @@ def test_output_pretrained(self):
265270
# fmt: on
266271
self.assertTrue(torch.allclose(output, expected_output_slice, rtol=1e-3))
267272

273+
@unittest.skip("Test not supported.")
268274
def test_forward_with_norm_groups(self):
269275
# Not implemented yet for this UNet
270276
pass

tests/models/unets/test_models_unet_2d.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -383,6 +383,7 @@ def test_output_pretrained_ve_large(self):
383383

384384
self.assertTrue(torch_all_close(output_slice, expected_output_slice, rtol=1e-2))
385385

386+
@unittest.skip("Test not supported.")
386387
def test_forward_with_norm_groups(self):
387388
# not required for this model
388389
pass

tests/models/unets/test_models_unet_controlnetxs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -320,6 +320,7 @@ def test_time_embedding_mixing(self):
320320

321321
assert output.shape == output_mix_time.shape
322322

323+
@unittest.skip("Test not supported.")
323324
def test_forward_with_norm_groups(self):
324325
# UNetControlNetXSModel currently only supports StableDiffusion and StableDiffusion-XL, both of which have norm_num_groups fixed at 32. So we don't need to test different values for norm_num_groups.
325326
pass

tests/pipelines/wuerstchen/test_wuerstchen_combined.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -232,8 +232,10 @@ def test_inference_batch_single_identical(self):
232232
def test_float16_inference(self):
233233
super().test_float16_inference()
234234

235+
@unittest.skip(reason="Test not supported.")
235236
def test_callback_inputs(self):
236237
pass
237238

239+
@unittest.skip(reason="Test not supported.")
238240
def test_callback_cfg(self):
239241
pass

tests/schedulers/test_scheduler_ddim_inverse.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
import unittest
2+
13
import torch
24

35
from diffusers import DDIMInverseScheduler
@@ -95,6 +97,7 @@ def test_inference_steps(self):
9597
for t, num_inference_steps in zip([1, 10, 50], [10, 50, 500]):
9698
self.check_over_forward(time_step=t, num_inference_steps=num_inference_steps)
9799

100+
@unittest.skip("Test not supported.")
98101
def test_add_noise_device(self):
99102
pass
100103

tests/schedulers/test_scheduler_deis.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import tempfile
2+
import unittest
23

34
import torch
45

@@ -57,6 +58,7 @@ def check_over_configs(self, time_step=0, **config):
5758

5859
assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"
5960

61+
@unittest.skip("Test not supported.")
6062
def test_from_save_pretrained(self):
6163
pass
6264

tests/schedulers/test_scheduler_dpm_multi.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import tempfile
2+
import unittest
23

34
import torch
45

@@ -67,6 +68,7 @@ def check_over_configs(self, time_step=0, **config):
6768

6869
assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"
6970

71+
@unittest.skip("Test not supported.")
7072
def test_from_save_pretrained(self):
7173
pass
7274

tests/schedulers/test_scheduler_dpm_single.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import tempfile
2+
import unittest
23

34
import torch
45

@@ -65,6 +66,7 @@ def check_over_configs(self, time_step=0, **config):
6566

6667
assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"
6768

69+
@unittest.skip("Test not supported.")
6870
def test_from_save_pretrained(self):
6971
pass
7072

tests/schedulers/test_scheduler_edm_dpmsolver_multistep.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,7 @@
33

44
import torch
55

6-
from diffusers import (
7-
EDMDPMSolverMultistepScheduler,
8-
)
6+
from diffusers import EDMDPMSolverMultistepScheduler
97

108
from .test_schedulers import SchedulerCommonTest
119

@@ -63,6 +61,7 @@ def check_over_configs(self, time_step=0, **config):
6361

6462
assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"
6563

64+
@unittest.skip("Test not supported.")
6665
def test_from_save_pretrained(self):
6766
pass
6867

@@ -258,5 +257,6 @@ def test_duplicated_timesteps(self, **config):
258257
scheduler.set_timesteps(scheduler.config.num_train_timesteps)
259258
assert len(scheduler.timesteps) == scheduler.num_inference_steps
260259

260+
@unittest.skip("Test not supported.")
261261
def test_trained_betas(self):
262262
pass

0 commit comments

Comments
 (0)