@@ -293,8 +293,10 @@ def test_framework_raises_if_used_with_missing_package():
293293
294294
295295@pytest .mark .skipif (
296- not is_fms_accelerate_available (plugins = "peft" ),
297- reason = "Only runs if fms-accelerate is installed along with accelerated-peft plugin" ,
296+ (not is_fms_accelerate_available (plugins = "peft" ))
297+ or (not torch .cuda .is_available ()),
298+ reason = "Only runs if fms-accelerate is installed along with accelerated-peft plugin\
299+ and GPU is available" ,
298300)
299301@pytest .mark .parametrize (
300302 "bad_kwargs,peft_config,exception,exception_msg" ,
@@ -359,7 +361,9 @@ def test_framework_raises_due_to_invalid_arguments(
359361
360362@pytest .mark .skipif (
361363 not is_fms_accelerate_available (plugins = "peft" ),
362- reason = "Only runs if fms-accelerate is installed along with accelerated-peft plugin" ,
364+ or (not torch .cuda .is_available ()),
365+ reason = "Only runs if fms-accelerate is installed along with accelerated-peft plugin\
366+ and if GPU is available" ,
363367)
364368@pytest .mark .parametrize (
365369 "quantized_lora_config,model_name_or_path,mock_and_spy" ,
@@ -412,10 +416,11 @@ def test_framework_initialized_properly_peft(
412416
413417
414418@pytest .mark .skipif (
415- not is_fms_accelerate_available (plugins = ["peft" , "foak" ]),
419+ (not is_fms_accelerate_available (plugins = ["peft" , "foak" ]))
420+ or (not torch .cuda .is_available ()),
416421 reason = (
417- "Only runs if fms-accelerate is installed along with accelerated-peft "
418- " and foak plugins"
422+ "Only runs if fms-accelerate is installed along with accelerated-peft\
423+ and foak plugins, run if GPU is available "
419424 ),
420425)
421426def test_framework_initialized_properly_foak ():
@@ -484,8 +489,9 @@ def test_framework_initialized_properly_foak():
484489
485490
486491@pytest .mark .skipif (
487- not is_fms_accelerate_available (plugins = "moe" ),
488- reason = "Only runs if fms-accelerate is installed along with accelerated-moe plugin" ,
492+ (not is_fms_accelerate_available (plugins = "moe" )) or (not torch .cuda .is_available ()),
493+ reason = "Only runs if fms-accelerate is installed along with accelerated-moe plugin \
494+ and GPU is available" ,
489495)
490496def test_framework_initialized_properly_moe ():
491497 """Ensure that specifying a properly configured acceleration dataclass
@@ -538,9 +544,10 @@ def test_framework_initialized_properly_moe():
538544
539545
540546@pytest .mark .skipif (
541- not is_fms_accelerate_available (plugins = "aadp" ),
547+ (not is_fms_accelerate_available (plugins = "aadp" ))
548+ or (not torch .cuda .is_available ()),
542549 reason = "Only runs if fms-accelerate is installed along with \
543- attention_and_distributed_packing plugin" ,
550+ attention_and_distributed_packing plugin and GPU is available " ,
544551)
545552def test_framework_initialize_and_trains_with_aadp ():
546553 """
@@ -813,9 +820,10 @@ def test_error_raised_fast_moe_with_non_moe_model():
813820
814821
815822@pytest .mark .skipif (
816- not is_fms_accelerate_available (plugins = "foak" ),
823+ (not is_fms_accelerate_available (plugins = "foak" ))
824+ or (not torch .cuda .is_available ()),
817825 reason = "Only runs if fms-accelerate is installed along with \
818- fused_ops_and_kernels plugin" ,
826+ fused_ops_and_kernels plugin and GPU is available " ,
819827)
820828def test_fastkernels_with_full_finetuning_runs_successfully ():
821829 """
0 commit comments