@@ -9673,6 +9673,11 @@ def _test_vecnorm_subproc_auto(
96739673 def rename_t(self):
96749674 return RenameTransform(in_keys=["observation"], out_keys=[("some", "obs")])
96759675
9676+ @pytest.mark.skipif(
9677+ TORCH_VERSION < version.parse("2.8.0"),
9678+ reason="VecNorm shared memory synchronization requires PyTorch >= 2.8 "
9679+ "when using spawn multiprocessing start method.",
9680+ )
96769681 @retry(AssertionError, tries=10, delay=0)
96779682 @pytest.mark.parametrize("nprc", [2, 5])
96789683 def test_vecnorm_parallel_auto(self, nprc):
@@ -9785,6 +9790,11 @@ def _run_parallelenv(parallel_env, queue_in, queue_out):
97859790 reason="Nested spawned multiprocessed is currently failing in python 3.11. "
97869791 "See https://github.com/python/cpython/pull/108568 for info and fix.",
97879792 )
9793+ @pytest.mark.skipif(
9794+ TORCH_VERSION < version.parse("2.8.0"),
9795+ reason="VecNorm shared memory synchronization requires PyTorch >= 2.8 "
9796+ "when using spawn multiprocessing start method.",
9797+ )
97889798 def test_parallelenv_vecnorm(self):
97899799 if _has_gym:
97909800 make_env = EnvCreator(
@@ -10051,6 +10061,11 @@ def _test_vecnorm_subproc_auto(
1005110061 def rename_t(self):
1005210062 return RenameTransform(in_keys=["observation"], out_keys=[("some", "obs")])
1005310063
10064+ @pytest.mark.skipif(
10065+ TORCH_VERSION < version.parse("2.8.0"),
10066+ reason="VecNorm shared memory synchronization requires PyTorch >= 2.8 "
10067+ "when using spawn multiprocessing start method.",
10068+ )
1005410069 @retry(AssertionError, tries=10, delay=0)
1005510070 @pytest.mark.parametrize("nprc", [2, 5])
1005610071 def test_vecnorm_parallel_auto(self, nprc):
@@ -10170,6 +10185,11 @@ def _run_parallelenv(parallel_env, queue_in, queue_out):
1017010185 reason="Nested spawned multiprocessed is currently failing in python 3.11. "
1017110186 "See https://github.com/python/cpython/pull/108568 for info and fix.",
1017210187 )
10188+ @pytest.mark.skipif(
10189+ TORCH_VERSION < version.parse("2.8.0"),
10190+ reason="VecNorm shared memory synchronization requires PyTorch >= 2.8 "
10191+ "when using spawn multiprocessing start method.",
10192+ )
1017310193 def test_parallelenv_vecnorm(self):
1017410194 if _has_gym:
1017510195 make_env = EnvCreator(
0 commit comments