Skip to content

Commit 29ad6ae

Browse files
committed
update testpipeline
1 parent 565e51c commit 29ad6ae

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

src/diffusers/utils/testing_utils.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1077,28 +1077,28 @@ def _is_torch_fp64_available(device):
10771077
# Function definitions
10781078
BACKEND_EMPTY_CACHE = {
10791079
"cuda": torch.cuda.empty_cache,
1080-
# "xpu": torch.xpu.empty_cache,
1080+
"xpu": torch.xpu.empty_cache,
10811081
"cpu": None,
10821082
"mps": torch.mps.empty_cache,
10831083
"default": None,
10841084
}
10851085
BACKEND_DEVICE_COUNT = {
10861086
"cuda": torch.cuda.device_count,
1087-
# "xpu": torch.xpu.device_count,
1087+
"xpu": torch.xpu.device_count,
10881088
"cpu": lambda: 0,
10891089
"mps": lambda: 0,
10901090
"default": 0,
10911091
}
10921092
BACKEND_MANUAL_SEED = {
10931093
"cuda": torch.cuda.manual_seed,
1094-
# "xpu": torch.xpu.manual_seed,
1094+
"xpu": torch.xpu.manual_seed,
10951095
"cpu": torch.manual_seed,
10961096
"mps": torch.mps.manual_seed,
10971097
"default": torch.manual_seed,
10981098
}
10991099
BACKEND_RESET_PEAK_MEMORY_STATS = {
11001100
"cuda": torch.cuda.reset_peak_memory_stats,
1101-
# "xpu": getattr(torch.xpu, "reset_peak_memory_stats", None),
1101+
"xpu": getattr(torch.xpu, "reset_peak_memory_stats", None),
11021102
"cpu": None,
11031103
"mps": None,
11041104
"default": None,
@@ -1112,7 +1112,7 @@ def _is_torch_fp64_available(device):
11121112
}
11131113
BACKEND_MAX_MEMORY_ALLOCATED = {
11141114
"cuda": torch.cuda.max_memory_allocated,
1115-
# "xpu": getattr(torch.xpu, "max_memory_allocated", None),
1115+
"xpu": getattr(torch.xpu, "max_memory_allocated", None),
11161116
"cpu": 0,
11171117
"mps": 0,
11181118
"default": 0,

0 commit comments

Comments
 (0)