Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .github/workflows/build-test-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,24 +65,24 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
python-version: ["3.10", "3.11", "3.12", "3.13"]
dependencies: ["latest", "pre"]
marks: ["fast"]
include:
- python-version: "3.9"
- python-version: "3.10"
dependencies: "min"
marks: "fast"
- python-version: "3.9"
- python-version: "3.10"
dependencies: "latest"
marks: "slow"
- python-version: "3.12"
- python-version: "3.13"
dependencies: "latest"
marks: "veryslow"
exclude:
- python-version: "3.9"
dependencies: "pre"
- python-version: "3.10"
dependencies: "pre"
- python-version: "3.11"
dependencies: "pre"

steps:
- uses: actions/checkout@v5
Expand Down
20 changes: 10 additions & 10 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,26 @@ classifiers = [
"Topic :: Scientific/Engineering :: Image Recognition",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
license = "Apache-2.0"
requires-python = ">=3.9"
requires-python = ">=3.10"
dependencies = [
"acres >= 0.2.0",
"attrs >= 20.1.0",
"nibabel >= 3.0",
"nipype >= 1.8.5",
"nibabel >= 5.1.1",
"nipype >= 1.9.0",
"migas >= 0.4.0",
"nireports >= 25.0.1",
"niworkflows >= 1.11.0",
"nitransforms >= 24.1.0",
"numpy >= 1.23",
"nitransforms >= 25.0.1",
"numpy >= 2.0",
"pybids >= 0.16.4",
"scikit-image >= 0.18",
"scipy >= 1.8.1",
"scikit-image >= 0.23",
"scipy >= 1.10",
"templateflow >= 23.1",
"toml >= 0.10",
]
Expand Down Expand Up @@ -70,8 +69,8 @@ dev = [
]

test = [
"coverage[toml] >=5.2.1",
"pytest >= 6",
"coverage[toml] >=7",
"pytest >= 8.1",
"pytest-cov >= 2.11",
"pytest-env",
"pytest-xdist >= 2.5",
Expand Down Expand Up @@ -194,6 +193,7 @@ ignore = [
"B019",
"SIM108",
"C901",
"UP038",
]

[tool.ruff.lint.flake8-quotes]
Expand Down
3 changes: 2 additions & 1 deletion sdcflows/interfaces/epi.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,14 +120,15 @@ def _run_interface(self, runtime):
blips,
self.inputs.readout_times,
self.inputs.in_data,
strict=False,
)
)

(
self._results['pe_dirs_fsl'],
self._results['readout_times'],
self._results['out_data'],
) = zip(*sorted_inputs)
) = zip(*sorted_inputs, strict=False)

# Put sign back last
self._results['pe_dirs_fsl'] = [
Expand Down
2 changes: 1 addition & 1 deletion sdcflows/interfaces/fmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def _run_interface(self, runtime):
fmap_imgs = [nb.load(fname) for fname in fmap_files]

# Baseline check: paired magnitude/phase maps are basically the same
for mag, fmap in zip(mag_imgs, fmap_imgs):
for mag, fmap in zip(mag_imgs, fmap_imgs, strict=False):
msg = _check_gross_geometry(mag, fmap)
if msg is not None:
LOGGER.critical(msg)
Expand Down
12 changes: 9 additions & 3 deletions sdcflows/interfaces/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,13 +81,15 @@ class Flatten(SimpleInterface):

def _run_interface(self, runtime):
self._results['out_list'] = _flatten(
zip(self.inputs.in_data, self.inputs.in_meta),
zip(self.inputs.in_data, self.inputs.in_meta, strict=False),
max_trs=self.inputs.max_trs,
out_dir=runtime.cwd,
)

# Unzip out_data, out_meta outputs.
self._results['out_data'], self._results['out_meta'] = zip(*self._results['out_list'])
self._results['out_data'], self._results['out_meta'] = zip(
*self._results['out_list'], strict=False
)
return runtime


Expand Down Expand Up @@ -449,7 +451,11 @@ def _deoblique(in_file, in_affine=None, newpath=None):
if in_affine is None:
orientation = nb.aff2axcodes(nii.affine)
directions = (
np.array([int(l1 == l2) for l1, l2 in zip(orientation, 'RAS')], dtype='float32') * 2
np.array(
[int(l1 == l2) for l1, l2 in zip(orientation, 'RAS', strict=False)],
dtype='float32',
)
* 2
- 1
)
newaff = np.eye(4)
Expand Down
6 changes: 4 additions & 2 deletions sdcflows/tests/test_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def generate_oracle(
data[19:22, ...] = 0
data = np.pad(data + nd.binary_erosion(data, ball(3)), 8)

zooms = [z if not f else -z for z, f in zip(zooms, flip)]
zooms = [z if not f else -z for z, f in zip(zooms, flip, strict=False)]
affine = np.diag(zooms + [1])
affine[:3, 3] = -affine[:3, :3] @ ((np.array(data.shape) - 1) * 0.5)

Expand Down Expand Up @@ -123,7 +123,9 @@ def test_displacements_field(tmpdir, testdata_dir, outdir, pe_dir, rotation, fli
assert np.all((np.sqrt(((ours - theirs) ** 2).sum()) / ours.size) < 1e-1)

if outdir:
orientation = ''.join([ax[bool(f)] for ax, f in zip(('RL', 'AP', 'SI'), flip)])
orientation = ''.join(
[ax[bool(f)] for ax, f in zip(('RL', 'AP', 'SI'), flip, strict=False)]
)

SimpleBeforeAfter(
after_label='Theirs (ANTs)',
Expand Down
7 changes: 3 additions & 4 deletions sdcflows/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,9 @@

import asyncio
import os
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from functools import partial
from pathlib import Path
from typing import Callable
from warnings import warn

import attr
Expand Down Expand Up @@ -492,7 +491,7 @@ def apply(
ro_time *= n_volumes

pe_info = []
for vol_pe_dir, vol_ro_time in zip(pe_dir, ro_time):
for vol_pe_dir, vol_ro_time in zip(pe_dir, ro_time, strict=False):
pe_axis = 'ijk'.index(vol_pe_dir[0])
# Displacements are reversed if either is true (after ensuring positive cosines)
flip = (axcodes[pe_axis] in 'LPI') ^ vol_pe_dir.endswith('-')
Expand All @@ -502,7 +501,7 @@ def apply(
# Reference image's voxel coordinates (in voxel units)
voxcoords = (
nt.linear.Affine(reference=moving)
.reference.ndindex.reshape((ndim, *data.shape[:ndim]))
.reference.ndindex.T.reshape((ndim, *data.shape[:ndim]))
.astype('float32')
)

Expand Down
2 changes: 1 addition & 1 deletion sdcflows/utils/wrangler.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ def find_estimators(
targets = all_targets
intent_map = [[target] for target in all_targets]

for target, intent in zip(targets, intent_map):
for target, intent in zip(targets, intent_map, strict=False):
logger.debug('Found single PE target %s', target.relpath)
# The new estimator is IntendedFor the individual targets,
# even if the EPI file is IntendedFor multiple
Expand Down
5 changes: 4 additions & 1 deletion sdcflows/workflows/fit/tests/test_syn.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,10 @@ def test_mm2vox(tmp_path, fixed_ornt, moving_ornt, ijk, index):

vox_params = _mm2vox(str(moving_path), str(fixed_path), ijk, config)
vox_values = [level[2] for level in vox_params]
assert [mm_level[:2] == vox_level[:2] for mm_level, vox_level in zip(params, vox_params)]
assert all(
mm_level[:2] == vox_level[:2]
for mm_level, vox_level in zip(params, vox_params, strict=False)
)
assert np.array_equal(vox_values, mm_values / [2, 3, 4][index])


Expand Down
4 changes: 2 additions & 2 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ requires =
tox>=4
tox-uv
envlist =
py3{9,10,11,12,13}-latest-{fast,slow,veryslow}
py39-min-fast
py3{10,11,12,13}-latest-{fast,slow,veryslow}
py310-min-fast
py3{11,12,13}-pre-{fast,slow,veryslow}
style
spellcheck
Expand Down
Loading