Skip to content

Commit c045524

Browse files
committed
apply style check
1 parent 227ea9f commit c045524

File tree

7 files changed

+15
-13
lines changed

7 files changed

+15
-13
lines changed

tests/models/autoencoders/test_models_vae.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -376,12 +376,10 @@ def prepare_init_args_and_inputs_for_common(self):
376376
return self.init_dict, self.inputs_dict()
377377

378378
@unittest.skip
379-
def test_training(self):
380-
...
379+
def test_training(self): ...
381380

382381
@unittest.skip
383-
def test_ema_training(self):
384-
...
382+
def test_ema_training(self): ...
385383

386384

387385
class AutoencoderKLTemporalDecoderFastTests(ModelTesterMixin, unittest.TestCase):

tests/pipelines/amused/test_amused.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,7 @@ def test_inference_batch_consistent(self, batch_sizes=[2]):
124124
self._test_inference_batch_consistent(batch_sizes=batch_sizes, batch_generator=False)
125125

126126
@unittest.skip("aMUSEd does not support lists of generators")
127-
def test_inference_batch_single_identical(self):
128-
...
127+
def test_inference_batch_single_identical(self): ...
129128

130129

131130
@slow

tests/pipelines/amused/test_amused_img2img.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -126,8 +126,7 @@ def test_inference_batch_consistent(self, batch_sizes=[2]):
126126
self._test_inference_batch_consistent(batch_sizes=batch_sizes, batch_generator=False)
127127

128128
@unittest.skip("aMUSEd does not support lists of generators")
129-
def test_inference_batch_single_identical(self):
130-
...
129+
def test_inference_batch_single_identical(self): ...
131130

132131

133132
@slow

tests/pipelines/amused/test_amused_inpaint.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,8 +130,7 @@ def test_inference_batch_consistent(self, batch_sizes=[2]):
130130
self._test_inference_batch_consistent(batch_sizes=batch_sizes, batch_generator=False)
131131

132132
@unittest.skip("aMUSEd does not support lists of generators")
133-
def test_inference_batch_single_identical(self):
134-
...
133+
def test_inference_batch_single_identical(self): ...
135134

136135

137136
@slow

tests/pipelines/controlnet/test_controlnet_sdxl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1019,7 +1019,7 @@ def test_conditioning_channels(self):
10191019
)
10201020

10211021
controlnet = ControlNetModel.from_unet(unet, conditioning_channels=4)
1022-
assert type(controlnet.mid_block) == UNetMidBlock2D
1022+
assert type(controlnet.mid_block) is UNetMidBlock2D
10231023
assert controlnet.conditioning_channels == 4
10241024

10251025
def get_dummy_components(self, time_cond_proj_dim=None):

tests/pipelines/deepfloyd_if/test_if.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,14 @@
2323
)
2424
from diffusers.models.attention_processor import AttnAddedKVProcessor
2525
from diffusers.utils.import_utils import is_xformers_available
26-
from diffusers.utils.testing_utils import load_numpy, require_accelerator, require_torch_gpu, skip_mps, slow, torch_device
26+
from diffusers.utils.testing_utils import (
27+
load_numpy,
28+
require_accelerator,
29+
require_torch_gpu,
30+
skip_mps,
31+
slow,
32+
torch_device,
33+
)
2734

2835
from ..pipeline_params import TEXT_TO_IMAGE_BATCH_PARAMS, TEXT_TO_IMAGE_PARAMS
2936
from ..test_pipelines_common import PipelineTesterMixin, assert_mean_pixel_difference

tests/pipelines/text_to_video_synthesis/test_text_to_video_zero_sdxl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ def test_model_cpu_offload_forward_pass(self, expected_max_diff=2e-4):
283283
@unittest.skip(reason="`num_images_per_prompt` argument is not supported for this pipeline.")
284284
def test_pipeline_call_signature(self):
285285
pass
286-
286+
287287
@unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU")
288288
@require_accelerator
289289
def test_save_load_float16(self, expected_max_diff=1e-2):

0 commit comments

Comments
 (0)