From 2daf48fa2d7d66e0493753930d5209623b1495c4 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 20 Sep 2024 17:47:56 +1000 Subject: [PATCH 1/3] fixes to unittests after typing refactor of fileformats into BinaryFile and UnicodeFile --- pydra/engine/tests/test_node_task.py | 16 +++++++--------- pydra/utils/tests/utils.py | 8 ++++---- 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/pydra/engine/tests/test_node_task.py b/pydra/engine/tests/test_node_task.py index bceaf97402..e6b8f0c9c5 100644 --- a/pydra/engine/tests/test_node_task.py +++ b/pydra/engine/tests/test_node_task.py @@ -4,11 +4,9 @@ import typing as ty import numpy as np import time -from unittest import mock from pathlib import Path import pytest -import time -from fileformats.generic import File +from fileformats.generic import BinaryFile import pydra.mark from .utils import ( @@ -1606,7 +1604,7 @@ def test_task_files_cachelocations(plugin_dask_opt, tmp_path): assert not nn2.output_dir.exists() -class OverriddenContentsFile(File): +class OverriddenContentsFile(BinaryFile): """A class for testing purposes, to that enables you to override the contents of the file to allow you to check whether the persistent cache is used.""" @@ -1614,22 +1612,22 @@ def __init__( self, fspaths: ty.Iterator[Path], contents: ty.Optional[bytes] = None, - metadata: ty.Dict[str, ty.Any] = None, + metadata: ty.Optional[ty.Dict[str, ty.Any]] = None, ): super().__init__(fspaths, metadata=metadata) self._contents = contents - def byte_chunks(self, **kwargs) -> ty.Generator[ty.Tuple[str, bytes], None, None]: + def byte_chunks(self, **kwargs) -> ty.Generator[ty.Tuple[str, ty.Iterator[bytes]], None, None]: # type: ignore[override] if self._contents is not None: yield (str(self.fspath), iter([self._contents])) else: yield from super().byte_chunks(**kwargs) @property - def contents(self): + def raw_contents(self) -> bytes: # type: ignore[override] if self._contents is not None: return self._contents - return super().contents + return super().raw_contents def test_task_files_persistentcache(tmp_path): @@ -1645,7 +1643,7 @@ def test_task_files_persistentcache(tmp_path): @pydra.mark.task def read_contents(x: OverriddenContentsFile) -> bytes: - return x.contents + return x.raw_contents assert ( read_contents(x=test_file, cache_dir=cache_dir)(plugin="serial").output.out diff --git a/pydra/utils/tests/utils.py b/pydra/utils/tests/utils.py index 3582fa9eda..8a110af3cc 100644 --- a/pydra/utils/tests/utils.py +++ b/pydra/utils/tests/utils.py @@ -1,16 +1,16 @@ -from fileformats.generic import File +from fileformats.generic import BinaryFile, File from fileformats.core.mixin import WithSeparateHeader, WithMagicNumber from pydra import mark from pydra.engine.task import ShellCommandTask from pydra.engine import specs -class MyFormat(WithMagicNumber, File): +class MyFormat(WithMagicNumber, BinaryFile): ext = ".my" magic_number = b"MYFORMAT" -class MyHeader(File): +class MyHeader(BinaryFile): ext = ".hdr" @@ -18,7 +18,7 @@ class MyFormatX(WithSeparateHeader, MyFormat): header_type = MyHeader -class MyOtherFormatX(WithMagicNumber, WithSeparateHeader, File): +class MyOtherFormatX(WithMagicNumber, WithSeparateHeader, BinaryFile): magic_number = b"MYFORMAT" ext = ".my" header_type = MyHeader From f54640b97eb039cd33ed824c07b7948780b7bb4a Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 1 Oct 2024 10:23:07 +1000 Subject: [PATCH 2/3] updated flaky kwargs to latest version (reruns -> max_runs) --- pydra/engine/tests/test_boutiques.py | 10 +++--- pydra/engine/tests/test_node_task.py | 18 +++++----- pydra/engine/tests/test_shelltask.py | 6 ++-- pydra/engine/tests/test_submitter.py | 10 +++--- pydra/engine/tests/test_workflow.py | 54 ++++++++++++++-------------- pydra/utils/tests/test_messenger.py | 2 +- 6 files changed, 50 insertions(+), 50 deletions(-) diff --git a/pydra/engine/tests/test_boutiques.py b/pydra/engine/tests/test_boutiques.py index 48f484b687..679da975b7 100644 --- a/pydra/engine/tests/test_boutiques.py +++ b/pydra/engine/tests/test_boutiques.py @@ -21,7 +21,7 @@ @no_win @need_bosh_docker -@pytest.mark.flaky(reruns=3) # need for travis +@pytest.mark.flaky(max_runs=3) # need for travis @pytest.mark.parametrize( "maskfile", ["test_brain.nii.gz", "test_brain", "test_brain.nii"] ) @@ -45,7 +45,7 @@ def test_boutiques_1(maskfile, plugin, results_function, tmpdir, data_tests_dir) @no_win @need_bosh_docker -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_boutiques_spec_1(data_tests_dir): """testing spec: providing input/output fields names""" btask = BoshTask( @@ -70,7 +70,7 @@ def test_boutiques_spec_1(data_tests_dir): @no_win @need_bosh_docker -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_boutiques_spec_2(data_tests_dir): """testing spec: providing partial input/output fields names""" btask = BoshTask( @@ -93,7 +93,7 @@ def test_boutiques_spec_2(data_tests_dir): @no_win @need_bosh_docker -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) @pytest.mark.parametrize( "maskfile", ["test_brain.nii.gz", "test_brain", "test_brain.nii"] ) @@ -125,7 +125,7 @@ def test_boutiques_wf_1(maskfile, plugin, tmpdir, infile): @no_win @need_bosh_docker -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) @pytest.mark.xfail(reason="issues with bosh for 4472771") @pytest.mark.parametrize( "maskfile", ["test_brain.nii.gz", "test_brain", "test_brain.nii"] diff --git a/pydra/engine/tests/test_node_task.py b/pydra/engine/tests/test_node_task.py index e6b8f0c9c5..e99d7257f4 100644 --- a/pydra/engine/tests/test_node_task.py +++ b/pydra/engine/tests/test_node_task.py @@ -358,7 +358,7 @@ def test_odir_init(): # Tests for tasks without state (i.e. no splitter) -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_nostate_1(plugin_dask_opt, tmp_path): """task without splitter""" nn = fun_addtwo(name="NA", a=3) @@ -399,7 +399,7 @@ def test_task_nostate_1_call(): assert nn.output_dir.exists() -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_nostate_1_call_subm(plugin_dask_opt, tmp_path): """task without splitter""" nn = fun_addtwo(name="NA", a=3) @@ -417,7 +417,7 @@ def test_task_nostate_1_call_subm(plugin_dask_opt, tmp_path): assert nn.output_dir.exists() -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_nostate_1_call_plug(plugin_dask_opt, tmp_path): """task without splitter""" nn = fun_addtwo(name="NA", a=3) @@ -549,7 +549,7 @@ def test_task_nostate_7(): # Testing caching for tasks without states -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_nostate_cachedir(plugin_dask_opt, tmp_path): """task with provided cache_dir using pytest tmp_path""" cache_dir = tmp_path / "test_task_nostate" @@ -566,7 +566,7 @@ def test_task_nostate_cachedir(plugin_dask_opt, tmp_path): assert results.output.out == 5 -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_nostate_cachedir_relativepath(tmp_path, plugin_dask_opt): """task with provided cache_dir as relative path""" os.chdir(tmp_path) @@ -587,7 +587,7 @@ def test_task_nostate_cachedir_relativepath(tmp_path, plugin_dask_opt): shutil.rmtree(cache_dir) -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_nostate_cachelocations(plugin_dask_opt, tmp_path): """ Two identical tasks with provided cache_dir; @@ -729,7 +729,7 @@ def test_task_nostate_cachelocations_updated(plugin, tmp_path): # Tests for tasks with states (i.e. with splitter) -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask @pytest.mark.parametrize("input_type", ["list", "array"]) def test_task_state_1(plugin_dask_opt, input_type, tmp_path): """task with the simplest splitter""" @@ -1074,7 +1074,7 @@ def test_task_state_6a(plugin, tmp_path): assert odir.exists() -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_state_comb_1(plugin_dask_opt, tmp_path): """task with the simplest splitter and combiner""" nn = fun_addtwo(name="NA").split(a=[3, 5], splitter="a").combine(combiner="a") @@ -1451,7 +1451,7 @@ def test_task_state_comb_contdim_2(tmp_path): # Testing caching for tasks with states -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_task_state_cachedir(plugin_dask_opt, tmp_path): """task with a state and provided cache_dir using pytest tmp_path""" cache_dir = tmp_path / "test_task_nostate" diff --git a/pydra/engine/tests/test_shelltask.py b/pydra/engine/tests/test_shelltask.py index 4857db094f..638eeb1f78 100644 --- a/pydra/engine/tests/test_shelltask.py +++ b/pydra/engine/tests/test_shelltask.py @@ -26,7 +26,7 @@ pytest.skip("SLURM not available in windows", allow_module_level=True) -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask @pytest.mark.parametrize("results_function", [result_no_submitter, result_submitter]) def test_shell_cmd_1(plugin_dask_opt, results_function, tmp_path): """simple command, no arguments""" @@ -107,7 +107,7 @@ def test_shell_cmd_2b(plugin, results_function, tmp_path): # tests with State -@pytest.mark.flaky(reruns=2) +@pytest.mark.flaky(max_runs=2) def test_shell_cmd_3(plugin_dask_opt, tmp_path): """commands without arguments splitter = executable @@ -2174,7 +2174,7 @@ def test_shell_cmd_inputspec_copyfile_state_1(plugin, results_function, tmp_path # customised input_spec in Workflow -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_wf_shell_cmd_2(plugin_dask_opt, tmp_path): """a workflow with input with defined output_file_template (str) that requires wf.lzin diff --git a/pydra/engine/tests/test_submitter.py b/pydra/engine/tests/test_submitter.py index 298e7e74b4..f8a54db4a4 100644 --- a/pydra/engine/tests/test_submitter.py +++ b/pydra/engine/tests/test_submitter.py @@ -134,7 +134,7 @@ def test_wf_in_wf(plugin, tmpdir): assert res.output.out == 7 -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_wf2(plugin_dask_opt, tmpdir): """workflow as a node workflow-node with one task and no splitter @@ -156,7 +156,7 @@ def test_wf2(plugin_dask_opt, tmpdir): assert res.output.out == 3 -@pytest.mark.flaky(reruns=2) # when dask +@pytest.mark.flaky(max_runs=2) # when dask def test_wf_with_state(plugin_dask_opt, tmpdir): wf = Workflow(name="wf_with_state", input_spec=["x"]) wf.add(sleep_add_one(name="taska", x=wf.lzin.x)) @@ -235,7 +235,7 @@ def test_slurm_wf_state(tmpdir): @need_slurm -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_slurm_max_jobs(tmpdir): wf = Workflow("new_wf", input_spec=["x", "y"], cache_dir=tmpdir) wf.inputs.x = 5 @@ -338,7 +338,7 @@ def cancel(job_name_part): return proc.stderr.decode("utf-8").strip() -@pytest.mark.flaky(reruns=1) +@pytest.mark.flaky(max_runs=1) @need_slurm def test_slurm_cancel_rerun_1(tmpdir): """testing that tasks run with slurm is re-queue @@ -371,7 +371,7 @@ def test_slurm_cancel_rerun_1(tmpdir): assert script_dir.exists() -@pytest.mark.flaky(reruns=1) +@pytest.mark.flaky(max_runs=1) @need_slurm def test_slurm_cancel_rerun_2(tmpdir): """testing that tasks run with slurm that has --no-requeue diff --git a/pydra/engine/tests/test_workflow.py b/pydra/engine/tests/test_workflow.py index c6aab6544f..de7195419a 100644 --- a/pydra/engine/tests/test_workflow.py +++ b/pydra/engine/tests/test_workflow.py @@ -381,7 +381,7 @@ def test_wf_2d_outpasdict(plugin, tmpdir): assert wf.output_dir.exists() -@pytest.mark.flaky(reruns=3) # when dask +@pytest.mark.flaky(max_runs=3) # when dask def test_wf_3(plugin_dask_opt, tmpdir): """testing None value for an input""" wf = Workflow(name="wf_3", input_spec=["x", "y"]) @@ -1203,7 +1203,7 @@ def test_wf_3sernd_ndst_1a(plugin, tmpdir): # workflows with structures A -> C, B -> C -@pytest.mark.flaky(reruns=3) # when dask +@pytest.mark.flaky(max_runs=3) # when dask def test_wf_3nd_st_1(plugin_dask_opt, tmpdir): """workflow with three tasks, third one connected to two previous tasks, splitter on the workflow level @@ -1231,7 +1231,7 @@ def test_wf_3nd_st_1(plugin_dask_opt, tmpdir): assert odir.exists() -@pytest.mark.flaky(reruns=3) # when dask +@pytest.mark.flaky(max_runs=3) # when dask def test_wf_3nd_ndst_1(plugin_dask_opt, tmpdir): """workflow with three tasks, third one connected to two previous tasks, splitter on the tasks levels @@ -2495,7 +2495,7 @@ def test_wfasnd_wfst_4(plugin, tmpdir): # Testing caching -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachedir(plugin, tmpdir): """wf with provided cache_dir using pytest tmpdir""" cache_dir = tmpdir.mkdir("test_wf_cache_1") @@ -2517,7 +2517,7 @@ def test_wf_nostate_cachedir(plugin, tmpdir): shutil.rmtree(cache_dir) -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachedir_relativepath(tmpdir, plugin): """wf with provided cache_dir as relative path""" tmpdir.chdir() @@ -2541,7 +2541,7 @@ def test_wf_nostate_cachedir_relativepath(tmpdir, plugin): shutil.rmtree(cache_dir) -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations(plugin, tmpdir): """ Two identical wfs with provided cache_dir; @@ -2596,7 +2596,7 @@ def test_wf_nostate_cachelocations(plugin, tmpdir): assert not wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_a(plugin, tmpdir): """ the same as previous test, but workflows names differ; @@ -2655,7 +2655,7 @@ def test_wf_nostate_cachelocations_a(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_b(plugin, tmpdir): """ the same as previous test, but the 2nd workflows has two outputs @@ -2716,7 +2716,7 @@ def test_wf_nostate_cachelocations_b(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_setoutputchange(plugin, tmpdir): """ the same as previous test, but wf output names differ, @@ -2776,7 +2776,7 @@ def test_wf_nostate_cachelocations_setoutputchange(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_setoutputchange_a(plugin, tmpdir): """ the same as previous test, but wf names and output names differ, @@ -2832,7 +2832,7 @@ def test_wf_nostate_cachelocations_setoutputchange_a(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_forcererun(plugin, tmpdir): """ Two identical wfs with provided cache_dir; @@ -2890,7 +2890,7 @@ def test_wf_nostate_cachelocations_forcererun(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_wftaskrerun_propagateTrue(plugin, tmpdir): """ Two identical wfs with provided cache_dir and cache_locations for the second one; @@ -2952,7 +2952,7 @@ def test_wf_nostate_cachelocations_wftaskrerun_propagateTrue(plugin, tmpdir): assert abs(t1 - t2) < t1 / 2 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_wftaskrerun_propagateFalse(plugin, tmpdir): """ Two identical wfs with provided cache_dir and cache_locations for the second one; @@ -3017,7 +3017,7 @@ def test_wf_nostate_cachelocations_wftaskrerun_propagateFalse(plugin, tmpdir): assert len(list(Path(cache_dir2).glob("F*"))) == 0 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_taskrerun_wfrerun_propagateFalse(plugin, tmpdir): """ Two identical wfs with provided cache_dir, and cache_locations for the second wf; @@ -3080,7 +3080,7 @@ def test_wf_nostate_cachelocations_taskrerun_wfrerun_propagateFalse(plugin, tmpd assert t2 > 2 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_nodecachelocations(plugin, tmpdir): """ Two wfs with different input, but the second node has the same input; @@ -3129,7 +3129,7 @@ def test_wf_nostate_nodecachelocations(plugin, tmpdir): assert len(list(Path(cache_dir2).glob("F*"))) == 1 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_nodecachelocations_upd(plugin, tmpdir): """ Two wfs with different input, but the second node has the same input; @@ -3175,7 +3175,7 @@ def test_wf_nostate_nodecachelocations_upd(plugin, tmpdir): assert len(list(Path(cache_dir2).glob("F*"))) == 1 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_state_cachelocations(plugin, tmpdir): """ Two identical wfs (with states) with provided cache_dir; @@ -3238,7 +3238,7 @@ def test_wf_state_cachelocations(plugin, tmpdir): assert not odir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_state_cachelocations_forcererun(plugin, tmpdir): """ Two identical wfs (with states) with provided cache_dir; @@ -3302,7 +3302,7 @@ def test_wf_state_cachelocations_forcererun(plugin, tmpdir): assert odir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_state_cachelocations_updateinp(plugin, tmpdir): """ Two identical wfs (with states) with provided cache_dir; @@ -3368,7 +3368,7 @@ def test_wf_state_cachelocations_updateinp(plugin, tmpdir): assert not odir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_state_n_nostate_cachelocations(plugin, tmpdir): """ Two wfs with provided cache_dir, the first one has no state, the second has; @@ -3477,7 +3477,7 @@ def test_wf_nostate_cachelocations_updated(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_cachelocations_recompute(plugin, tmpdir): """ Two wfs with the same inputs but slightly different graph; @@ -3530,7 +3530,7 @@ def test_wf_nostate_cachelocations_recompute(plugin, tmpdir): assert len(list(Path(cache_dir2).glob("F*"))) == 1 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_ndstate_cachelocations(plugin, tmpdir): """ Two wfs with identical inputs and node states; @@ -3594,7 +3594,7 @@ def test_wf_ndstate_cachelocations(plugin, tmpdir): assert not wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_ndstate_cachelocations_forcererun(plugin, tmpdir): """ Two wfs with identical inputs and node states; @@ -3658,7 +3658,7 @@ def test_wf_ndstate_cachelocations_forcererun(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_ndstate_cachelocations_updatespl(plugin, tmpdir): """ Two wfs with identical inputs and node state (that is set after adding the node!); @@ -3721,7 +3721,7 @@ def test_wf_ndstate_cachelocations_updatespl(plugin, tmpdir): assert not wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_ndstate_cachelocations_recompute(plugin, tmpdir): """ Two wfs (with nodes with states) with provided cache_dir; @@ -3785,7 +3785,7 @@ def test_wf_ndstate_cachelocations_recompute(plugin, tmpdir): assert wf2.output_dir.exists() -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_wf_nostate_runtwice_usecache(plugin, tmpdir): """ running workflow (without state) twice, @@ -4207,7 +4207,7 @@ def test_wf_upstream_error2(plugin, tmpdir): assert "raised an error" in str(excinfo.value) -@pytest.mark.flaky(reruns=2) # when slurm +@pytest.mark.flaky(max_runs=2) # when slurm def test_wf_upstream_error3(plugin, tmpdir): """task2 dependent on task1, task1 errors, task-level split on task 1 goal - workflow finish running, one output errors but the other doesn't diff --git a/pydra/utils/tests/test_messenger.py b/pydra/utils/tests/test_messenger.py index 5abbf85924..a0b554719d 100644 --- a/pydra/utils/tests/test_messenger.py +++ b/pydra/utils/tests/test_messenger.py @@ -35,7 +35,7 @@ def test_file_messenger(tmpdir): assert len(glob(str(tmpdir / "messages" / "*.jsonld"))) == 2 -@pytest.mark.flaky(reruns=3) +@pytest.mark.flaky(max_runs=3) def test_collect_messages(tmpdir): tmpdir.chdir() From 0deadc431512202f609074751ca6d3438c66458f Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 1 Oct 2024 10:24:13 +1000 Subject: [PATCH 3/3] added flaky, pyld and pympler to dev deps --- pyproject.toml | 39 ++++++++++++--------------------------- 1 file changed, 12 insertions(+), 27 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ba862339cd..6148734e5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,19 +18,12 @@ dependencies = [ "typing_extensions >=4.6.3; python_version < '3.10'", "typing_utils >=0.1.0; python_version < '3.10'", ] -license = {file = "LICENSE"} -authors = [ - {name = "Nipype developers", email = "neuroimaging@python.org"}, -] +license = { file = "LICENSE" } +authors = [{ name = "Nipype developers", email = "neuroimaging@python.org" }] maintainers = [ - {name = "Nipype developers", email = "neuroimaging@python.org"}, -] -keywords = [ - "brainweb", - "dataflow", - "neuroimaging", - "pydra", + { name = "Nipype developers", email = "neuroimaging@python.org" }, ] +keywords = ["brainweb", "dataflow", "neuroimaging", "pydra"] classifiers = [ "Development Status :: 3 - Alpha", "Environment :: Console", @@ -49,18 +42,9 @@ classifiers = [ dynamic = ["version"] [project.optional-dependencies] -psij = [ - "psij-python", -] -dask = [ - "dask", - "distributed", -] -dev = [ - "black", - "pre-commit", - "pydra[test]", -] +psij = ["psij-python"] +dask = ["dask", "distributed"] +dev = ["black", "pre-commit", "pydra[test]"] doc = [ "packaging", "sphinx ==6.2.1", @@ -69,10 +53,13 @@ doc = [ "sphinxcontrib-versioning", ] test = [ + "flaky", + "pympler", + "pyld", "pytest >=6.2.5", "pytest-cov", "pytest-env", - "pytest-xdist <2.0", + "pytest-xdist", "pytest-rerunfailures", "pytest-timeout", "codecov", @@ -84,9 +71,7 @@ test = [ "boutiques", "pympler", ] -jupyter = [ - "nest_asyncio" -] +jupyter = ["nest_asyncio"] # Aliases tests = ["pydra[test]"] docs = ["pydra[doc]"]