Skip to content

Commit 6e613f9

Browse files
committed
remove _save_load
1 parent 11b5fbd commit 6e613f9

13 files changed

+0
-42
lines changed

tests/pipelines/controlnet/test_controlnet_sdxl.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -210,9 +210,6 @@ def test_xformers_attention_forwardGenerator_pass(self):
210210
def test_inference_batch_single_identical(self):
211211
self._test_inference_batch_single_identical(expected_max_diff=2e-3)
212212

213-
def test_save_load_optional_components(self):
214-
self._test_save_load_optional_components()
215-
216213
@require_torch_accelerator
217214
def test_stable_diffusion_xl_offloads(self):
218215
pipes = []
@@ -831,9 +828,6 @@ def test_xformers_attention_forwardGenerator_pass(self):
831828
def test_inference_batch_single_identical(self):
832829
self._test_inference_batch_single_identical(expected_max_diff=2e-3)
833830

834-
def test_save_load_optional_components(self):
835-
self._test_save_load_optional_components()
836-
837831
def test_negative_conditions(self):
838832
components = self.get_dummy_components()
839833
pipe = self.pipeline_class(**components)

tests/pipelines/deepfloyd_if/test_if.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,6 @@ def get_dummy_inputs(self, device, seed=0):
6767

6868
return inputs
6969

70-
def test_save_load_optional_components(self):
71-
self._test_save_load_optional_components()
72-
7370
@unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU")
7471
@require_accelerator
7572
def test_save_load_float16(self):

tests/pipelines/deepfloyd_if/test_if_img2img.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -73,9 +73,6 @@ def get_dummy_inputs(self, device, seed=0):
7373

7474
return inputs
7575

76-
def test_save_load_optional_components(self):
77-
self._test_save_load_optional_components()
78-
7976
@unittest.skipIf(
8077
torch_device != "cuda" or not is_xformers_available(),
8178
reason="XFormers attention is only available with CUDA and `xformers` installed",

tests/pipelines/deepfloyd_if/test_if_img2img_superresolution.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,6 @@ def get_dummy_inputs(self, device, seed=0):
8383
def test_xformers_attention_forwardGenerator_pass(self):
8484
self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3)
8585

86-
def test_save_load_optional_components(self):
87-
self._test_save_load_optional_components()
88-
8986
@unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU")
9087
@require_accelerator
9188
def test_save_load_float16(self):

tests/pipelines/deepfloyd_if/test_if_inpainting.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,6 @@ def get_dummy_inputs(self, device, seed=0):
8383
def test_xformers_attention_forwardGenerator_pass(self):
8484
self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3)
8585

86-
def test_save_load_optional_components(self):
87-
self._test_save_load_optional_components()
88-
8986
@unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU")
9087
@require_accelerator
9188
def test_save_load_float16(self):

tests/pipelines/deepfloyd_if/test_if_inpainting_superresolution.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -85,9 +85,6 @@ def get_dummy_inputs(self, device, seed=0):
8585
def test_xformers_attention_forwardGenerator_pass(self):
8686
self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3)
8787

88-
def test_save_load_optional_components(self):
89-
self._test_save_load_optional_components()
90-
9188
@unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU")
9289
@require_accelerator
9390
def test_save_load_float16(self):

tests/pipelines/deepfloyd_if/test_if_superresolution.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,9 +78,6 @@ def get_dummy_inputs(self, device, seed=0):
7878
def test_xformers_attention_forwardGenerator_pass(self):
7979
self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3)
8080

81-
def test_save_load_optional_components(self):
82-
self._test_save_load_optional_components()
83-
8481
@unittest.skipIf(torch_device not in ["cuda", "xpu"], reason="float16 requires CUDA or XPU")
8582
@require_accelerator
8683
def test_save_load_float16(self):

tests/pipelines/pag/test_pag_controlnet_sdxl.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -212,9 +212,6 @@ def test_pag_disable_enable(self):
212212
assert np.abs(out.flatten() - out_pag_disabled.flatten()).max() < 1e-3
213213
assert np.abs(out.flatten() - out_pag_enabled.flatten()).max() > 1e-3
214214

215-
def test_save_load_optional_components(self):
216-
self._test_save_load_optional_components()
217-
218215
def test_pag_cfg(self):
219216
device = "cpu" # ensure determinism for the device-dependent torch.Generator
220217
components = self.get_dummy_components()

tests/pipelines/pag/test_pag_sdxl.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -191,9 +191,6 @@ def test_pag_disable_enable(self):
191191
assert np.abs(out.flatten() - out_pag_disabled.flatten()).max() < 1e-3
192192
assert np.abs(out.flatten() - out_pag_enabled.flatten()).max() > 1e-3
193193

194-
def test_save_load_optional_components(self):
195-
self._test_save_load_optional_components()
196-
197194
def test_pag_applied_layers(self):
198195
device = "cpu" # ensure determinism for the device-dependent torch.Generator
199196
components = self.get_dummy_components()

tests/pipelines/pag/test_pag_sdxl_img2img.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -239,9 +239,6 @@ def test_pag_disable_enable(self):
239239
assert np.abs(out.flatten() - out_pag_disabled.flatten()).max() < 1e-3
240240
assert np.abs(out.flatten() - out_pag_enabled.flatten()).max() > 1e-3
241241

242-
def test_save_load_optional_components(self):
243-
self._test_save_load_optional_components()
244-
245242
def test_pag_inference(self):
246243
device = "cpu" # ensure determinism for the device-dependent torch.Generator
247244
components = self.get_dummy_components(requires_aesthetics_score=True)

0 commit comments

Comments
 (0)