Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions tests/models/autoencoders/test_models_vq.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,11 @@ def prepare_init_args_and_inputs_for_common(self):
inputs_dict = self.dummy_input
return init_dict, inputs_dict

@unittest.skip("Test not supported.")
def test_forward_signature(self):
pass

@unittest.skip("Test not supported.")
def test_training(self):
pass

Expand Down
6 changes: 6 additions & 0 deletions tests/models/unets/test_models_unet_1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,11 @@ def input_shape(self):
def output_shape(self):
return (4, 14, 16)

@unittest.skip("Test not supported.")
def test_ema_training(self):
pass

@unittest.skip("Test not supported.")
def test_training(self):
pass

Expand Down Expand Up @@ -126,6 +128,7 @@ def test_output_pretrained(self):
# fmt: on
self.assertTrue(torch.allclose(output_slice, expected_output_slice, rtol=1e-3))

@unittest.skip("Test not supported.")
def test_forward_with_norm_groups(self):
# Not implemented yet for this UNet
pass
Expand Down Expand Up @@ -205,9 +208,11 @@ def test_output(self):
expected_shape = torch.Size((inputs_dict["sample"].shape[0], 1))
self.assertEqual(output.shape, expected_shape, "Input and output shapes do not match")

@unittest.skip("Test not supported.")
def test_ema_training(self):
pass

@unittest.skip("Test not supported.")
def test_training(self):
pass

Expand Down Expand Up @@ -265,6 +270,7 @@ def test_output_pretrained(self):
# fmt: on
self.assertTrue(torch.allclose(output, expected_output_slice, rtol=1e-3))

@unittest.skip("Test not supported.")
def test_forward_with_norm_groups(self):
# Not implemented yet for this UNet
pass
1 change: 1 addition & 0 deletions tests/models/unets/test_models_unet_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,7 @@ def test_output_pretrained_ve_large(self):

self.assertTrue(torch_all_close(output_slice, expected_output_slice, rtol=1e-2))

@unittest.skip("Test not supported.")
def test_forward_with_norm_groups(self):
# not required for this model
pass
Expand Down
1 change: 1 addition & 0 deletions tests/models/unets/test_models_unet_controlnetxs.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,7 @@ def test_time_embedding_mixing(self):

assert output.shape == output_mix_time.shape

@unittest.skip("Test not supported.")
def test_forward_with_norm_groups(self):
# UNetControlNetXSModel currently only supports StableDiffusion and StableDiffusion-XL, both of which have norm_num_groups fixed at 32. So we don't need to test different values for norm_num_groups.
pass
2 changes: 2 additions & 0 deletions tests/pipelines/wuerstchen/test_wuerstchen_combined.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,8 +232,10 @@ def test_inference_batch_single_identical(self):
def test_float16_inference(self):
super().test_float16_inference()

@unittest.skip(reason="Test not supported.")
def test_callback_inputs(self):
pass

@unittest.skip(reason="Test not supported.")
def test_callback_cfg(self):
pass
3 changes: 3 additions & 0 deletions tests/schedulers/test_scheduler_ddim_inverse.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import unittest

import torch

from diffusers import DDIMInverseScheduler
Expand Down Expand Up @@ -95,6 +97,7 @@ def test_inference_steps(self):
for t, num_inference_steps in zip([1, 10, 50], [10, 50, 500]):
self.check_over_forward(time_step=t, num_inference_steps=num_inference_steps)

@unittest.skip("Test not supported.")
def test_add_noise_device(self):
pass

Expand Down
2 changes: 2 additions & 0 deletions tests/schedulers/test_scheduler_deis.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import tempfile
import unittest

import torch

Expand Down Expand Up @@ -57,6 +58,7 @@ def check_over_configs(self, time_step=0, **config):

assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down
2 changes: 2 additions & 0 deletions tests/schedulers/test_scheduler_dpm_multi.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import tempfile
import unittest

import torch

Expand Down Expand Up @@ -67,6 +68,7 @@ def check_over_configs(self, time_step=0, **config):

assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down
2 changes: 2 additions & 0 deletions tests/schedulers/test_scheduler_dpm_single.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import tempfile
import unittest

import torch

Expand Down Expand Up @@ -65,6 +66,7 @@ def check_over_configs(self, time_step=0, **config):

assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down
6 changes: 3 additions & 3 deletions tests/schedulers/test_scheduler_edm_dpmsolver_multistep.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@

import torch

from diffusers import (
EDMDPMSolverMultistepScheduler,
)
from diffusers import EDMDPMSolverMultistepScheduler

from .test_schedulers import SchedulerCommonTest

Expand Down Expand Up @@ -63,6 +61,7 @@ def check_over_configs(self, time_step=0, **config):

assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down Expand Up @@ -258,5 +257,6 @@ def test_duplicated_timesteps(self, **config):
scheduler.set_timesteps(scheduler.config.num_train_timesteps)
assert len(scheduler.timesteps) == scheduler.num_inference_steps

@unittest.skip("Test not supported.")
def test_trained_betas(self):
pass
1 change: 1 addition & 0 deletions tests/schedulers/test_scheduler_flax.py
Original file line number Diff line number Diff line change
Expand Up @@ -675,6 +675,7 @@ def check_over_configs(self, time_step=0, **config):

assert jnp.sum(jnp.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down
2 changes: 2 additions & 0 deletions tests/schedulers/test_scheduler_ipndm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import tempfile
import unittest

import torch

Expand Down Expand Up @@ -50,6 +51,7 @@ def check_over_configs(self, time_step=0, **config):

assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down
2 changes: 2 additions & 0 deletions tests/schedulers/test_scheduler_pndm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import tempfile
import unittest

import torch

Expand Down Expand Up @@ -53,6 +54,7 @@ def check_over_configs(self, time_step=0, **config):

assert torch.sum(torch.abs(output - new_output)) < 1e-5, "Scheduler outputs are not identical"

@unittest.skip("Test not supported.")
def test_from_save_pretrained(self):
pass

Expand Down
4 changes: 4 additions & 0 deletions tests/schedulers/test_scheduler_unclip.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import unittest

import torch

from diffusers import UnCLIPScheduler
Expand Down Expand Up @@ -130,8 +132,10 @@ def test_full_loop_skip_timesteps(self):
assert abs(result_sum.item() - 258.2044983) < 1e-2
assert abs(result_mean.item() - 0.3362038) < 1e-3

@unittest.skip("Test not supported.")
def test_trained_betas(self):
pass

@unittest.skip("Test not supported.")
def test_add_noise_device(self):
pass
3 changes: 3 additions & 0 deletions tests/schedulers/test_scheduler_vq_diffusion.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import unittest

import torch
import torch.nn.functional as F

Expand Down Expand Up @@ -52,5 +54,6 @@ def test_time_indices(self):
for t in [0, 50, 99]:
self.check_over_forward(time_step=t)

@unittest.skip("Test not supported.")
def test_add_noise_device(self):
pass
Loading