Skip to content

Commit 35d7a7a

Browse files
committed
make style
1 parent f08a849 commit 35d7a7a

File tree

4 files changed

+8
-7
lines changed

4 files changed

+8
-7
lines changed

src/diffusers/utils/testing_utils.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1093,9 +1093,9 @@ def _is_torch_fp64_available(device):
10931093
"default": None,
10941094
}
10951095
BACKEND_MAX_MEMORY_ALLOCATED = {
1096-
"cuda": torch.cuda.max_memory_allocated,
1097-
"xpu": torch.xpu.max_memory_allocated,
1098-
"default": 0,
1096+
"cuda": torch.cuda.max_memory_allocated,
1097+
"xpu": torch.xpu.max_memory_allocated,
1098+
"default": 0,
10991099
}
11001100

11011101

@@ -1198,5 +1198,3 @@ def update_mapping_from_spec(device_fn_dict: Dict[str, Callable], attribute_name
11981198
update_mapping_from_spec(BACKEND_RESET_PEAK_MEMORY_STATS, "RESET_PEAK_MEMORY_STATS_FN")
11991199
update_mapping_from_spec(BACKEND_RESET_MAX_MEMORY_ALLOCATED, "RESET_MAX_MEMORY_ALLOCATED_FN")
12001200
update_mapping_from_spec(BACKEND_MAX_MEMORY_ALLOCATED, "MAX_MEMORY_ALLOCATED_FN")
1201-
1202-

tests/pipelines/controlnet/test_controlnet.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,9 @@
3535
from diffusers.utils.import_utils import is_xformers_available
3636
from diffusers.utils.testing_utils import (
3737
backend_empty_cache,
38+
backend_max_memory_allocated,
39+
backend_reset_max_memory_allocated,
40+
backend_reset_peak_memory_stats,
3841
enable_full_determinism,
3942
get_python_version,
4043
is_torch_compile,

tests/pipelines/deepfloyd_if/test_if_inpainting.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,8 @@ def test_if_inpainting(self):
124124
pipe.unet.set_attn_processor(AttnAddedKVProcessor())
125125
pipe.enable_model_cpu_offload(device=torch_device)
126126

127-
backend_reset_max_memory_allocated(torch_device)
128127
backend_empty_cache(torch_device)
128+
backend_reset_max_memory_allocated(torch_device)
129129
backend_reset_peak_memory_stats(torch_device)
130130

131131
image = floats_tensor((1, 3, 64, 64), rng=random.Random(0)).to(torch_device)

tests/pipelines/deepfloyd_if/test_if_inpainting_superresolution.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,8 +127,8 @@ def test_if_inpainting_superresolution(self):
127127
pipe.enable_model_cpu_offload(device=torch_device)
128128

129129
# Super resolution test
130-
backend_reset_max_memory_allocated(torch_device)
131130
backend_empty_cache(torch_device)
131+
backend_reset_max_memory_allocated(torch_device)
132132
backend_reset_peak_memory_stats(torch_device)
133133

134134
generator = torch.Generator(device="cpu").manual_seed(0)

0 commit comments

Comments
 (0)