@@ -661,22 +661,25 @@ def transpiler_test_impl(self):
661
661
662
662
class TestNCCL2Transpile (TranspilerTest ):
663
663
def test_nccl2_transpile (self ):
664
- main = fluid .Program ()
665
- startup = fluid .Program ()
666
- with fluid .program_guard (main , startup ):
667
- self .net_conf ()
668
-
669
- config = fluid .DistributeTranspilerConfig ()
670
- config .mode = "nccl2"
671
- t = fluid .DistributeTranspiler (config = config )
672
- t .transpile (
673
- 0 ,
674
- trainers = "127.0.0.1:6174,127.0.0.1:6175" ,
675
- current_endpoint = "127.0.0.1:6174" ,
676
- startup_program = startup )
677
- print ([op .type for op in startup .global_block ().ops ])
678
- self .assertEqual (startup .global_block ().ops [- 1 ].type , "gen_nccl_id" )
679
- self .assertIsNotNone (startup .global_block ().vars .get ("NCCLID" ))
664
+ if fluid .core .is_compiled_with_cuda (): #test nccl2 only with cuda
665
+ main = fluid .Program ()
666
+ startup = fluid .Program ()
667
+ with fluid .program_guard (main , startup ):
668
+ self .net_conf ()
669
+
670
+ config = fluid .DistributeTranspilerConfig ()
671
+ config .mode = "nccl2"
672
+ t = fluid .DistributeTranspiler (config = config )
673
+ t .transpile (
674
+ 0 ,
675
+ trainers = "127.0.0.1:6174,127.0.0.1:6175" ,
676
+ current_endpoint = "127.0.0.1:6174" ,
677
+ startup_program = startup )
678
+ print ([op .type for op in startup .global_block ().ops ])
679
+ self .assertEqual (startup .global_block ().ops [- 1 ].type , "gen_nccl_id" )
680
+ self .assertIsNotNone (startup .global_block ().vars .get ("NCCLID" ))
681
+ else :
682
+ pass
680
683
681
684
682
685
if __name__ == "__main__" :
0 commit comments