@@ -320,21 +320,6 @@ def require_torch_multi_gpu(test_case):
320320 return unittest .skipUnless (torch .cuda .device_count () > 1 , "test requires multiple GPUs" )(test_case )
321321
322322
323- def require_torch_multi_accelerator (test_case ):
324- """
325- Decorator marking a test that requires a multi-accelerator setup (in PyTorch). These tests are skipped on a machine
326- without multiple hardware accelerators.
327- """
328- if not is_torch_available ():
329- return unittest .skip ("test requires PyTorch" )(test_case )
330-
331- import torch
332-
333- return unittest .skipUnless (
334- torch .cuda .device_count () > 1 or torch .xpu .device_count () > 1 , "test requires multiple hardware accelerators"
335- )(test_case )
336-
337-
338323def require_torch_accelerator_with_fp16 (test_case ):
339324 """Decorator marking a test that requires an accelerator with support for the FP16 data type."""
340325 return unittest .skipUnless (_is_torch_fp16_available (torch_device ), "test requires accelerator with fp16 support" )(
@@ -369,31 +354,6 @@ def require_big_gpu_with_torch_cuda(test_case):
369354 )(test_case )
370355
371356
372- def require_big_accelerator (test_case ):
373- """
374- Decorator marking a test that requires a bigger hardware accelerator (24GB) for execution. Some example pipelines:
375- Flux, SD3, Cog, etc.
376- """
377- if not is_torch_available ():
378- return unittest .skip ("test requires PyTorch" )(test_case )
379-
380- import torch
381-
382- if not (torch .cuda .is_available () or torch .xpu .is_available ()):
383- return unittest .skip ("test requires PyTorch CUDA" )(test_case )
384-
385- if torch .xpu .is_available ():
386- device_properties = torch .xpu .get_device_properties (0 )
387- else :
388- device_properties = torch .cuda .get_device_properties (0 )
389-
390- total_memory = device_properties .total_memory / (1024 ** 3 )
391- return unittest .skipUnless (
392- total_memory >= BIG_GPU_MEMORY ,
393- f"test requires a hardware accelerator with at least { BIG_GPU_MEMORY } GB memory" ,
394- )(test_case )
395-
396-
397357def require_torch_accelerator_with_training (test_case ):
398358 """Decorator marking a test that requires an accelerator with support for training."""
399359 return unittest .skipUnless (
0 commit comments