diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 6697c6af..b6d8fe78 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,4 @@ # isort addition 6f809fa9950654cbb7f755922c45dffbc88dc45d +# Ruff 0.9.2 +03e848c6f7c102cc9e0907a38c311afe5c357e77 diff --git a/.readthedocs.yml b/.readthedocs.yml index a85becf3..5de4a207 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,19 +1,21 @@ version: 2 +sphinx: + configuration: docs/conf.py build: os: ubuntu-22.04 tools: python: "3.11" jobs: post_checkout: - - git fetch --unshallow + - git fetch --unshallow python: install: - - requirements: docs/requirements.txt - - method: pip - path: . - extra_requirements: - - doc - - method: pip - path: wrapper/ + - requirements: docs/requirements.txt + - method: pip + path: . + extra_requirements: + - doc + - method: pip + path: wrapper/ diff --git a/nibabies/__about__.py b/nibabies/__about__.py deleted file mode 100644 index 54176d01..00000000 --- a/nibabies/__about__.py +++ /dev/null @@ -1,19 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Base module variables.""" - -try: - from ._version import __version__ -except ImportError: - __version__ = '0+unknown' - -__org__ = 'nipreps' -__packagename__ = 'nibabies' -__copyright__ = 'Copyright 2023, Center for Reproducible Neuroscience, Stanford University' -__credits__ = ( - 'Contributors: please check the ``.zenodo.json`` file at the top-level folder' - 'of the repository' -) -__url__ = f'https://github.com/{__org__}/{__packagename__}' - -DOWNLOAD_URL = f'https://github.com/{__org__}/{__packagename__}/archive/{__version__}.tar.gz' diff --git a/nibabies/__init__.py b/nibabies/__init__.py index aa4b8275..2afc7fea 100644 --- a/nibabies/__init__.py +++ b/nibabies/__init__.py @@ -1 +1,28 @@ -from .__about__ import __version__ # noqa: F401 +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +# +# Copyright The NiPreps Developers +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# We support and encourage derived works from this project, please read +# about our expectations at +# +# https://www.nipreps.org/community/licensing/ +# +"""Top-module metadata.""" + +try: + from ._version import __version__ +except ImportError: + __version__ = '0+unknown' diff --git a/nibabies/types.py b/nibabies/_types.py similarity index 100% rename from nibabies/types.py rename to nibabies/_types.py diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 4b73662a..36647bb5 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -32,8 +32,8 @@ class DeprecatedAction(Action): def __call__(self, parser, namespace, values, option_string=None): new_opt, rem_vers = deprecations.get(self.dest, (None, None)) msg = ( - f"{self.option_strings} has been deprecated and will be removed in " - f"{rem_vers or 'a later version'}." + f'{self.option_strings} has been deprecated and will be removed in ' + f'{rem_vers or "a later version"}.' ) if new_opt: msg += f' Please use `{new_opt}` instead.' @@ -140,7 +140,7 @@ def _slice_time_ref(value, parser): value = float(value) except ValueError as e: raise parser.error( - "Slice time reference must be number, 'start', or 'middle'. " f'Received {value}.' + f"Slice time reference must be number, 'start', or 'middle'. Received {value}." ) from e if not 0 <= value <= 1: raise parser.error(f'Slice time reference must be in range 0-1. Received {value}.') @@ -182,7 +182,7 @@ def _str_none(val): 'output_dir', action='store', type=Path, - help='the output path for the outcomes of preprocessing and visual ' 'reports', + help='the output path for the outcomes of preprocessing and visual reports', ) parser.add_argument( 'analysis_level', @@ -297,7 +297,7 @@ def _str_none(val): g_perfm.add_argument( '--low-mem', action='store_true', - help='attempt to reduce memory usage (will increase disk usage ' 'in working directory)', + help='attempt to reduce memory usage (will increase disk usage in working directory)', ) g_perfm.add_argument( '--use-plugin', @@ -497,8 +497,7 @@ def _str_none(val): action='store', default=0.5, type=float, - help='Threshold for flagging a frame as an outlier on the basis of framewise ' - 'displacement', + help='Threshold for flagging a frame as an outlier on the basis of framewise displacement', ) g_confounds.add_argument( '--dvars-spike-threshold', @@ -507,7 +506,7 @@ def _str_none(val): action='store', default=1.5, type=float, - help='Threshold for flagging a frame as an outlier on the basis of standardised ' 'DVARS', + help='Threshold for flagging a frame as an outlier on the basis of standardised DVARS', ) # ANTs options @@ -657,7 +656,7 @@ def _str_none(val): '--stop-on-first-crash', action='store_true', default=False, - help='Force stopping on first crash, even if a work directory' ' was specified.', + help='Force stopping on first crash, even if a work directory was specified.', ) g_other.add_argument( '--notrack', @@ -875,8 +874,7 @@ def parse_args(args=None, namespace=None): from ..utils.bids import validate_input_dir build_log.info( - 'Making sure the input data is BIDS compliant (warnings can be ignored in most ' - 'cases).' + 'Making sure the input data is BIDS compliant (warnings can be ignored in most cases).' ) validate_input_dir(config.environment.exec_env, opts.bids_dir, opts.participant_label) @@ -896,8 +894,8 @@ def parse_args(args=None, namespace=None): missing_subjects = participant_label - set(all_subjects) if missing_subjects: parser.error( - "One or more participant labels were not found in the BIDS directory: " - f"{', '.join(missing_subjects)}." + 'One or more participant labels were not found in the BIDS directory: ' + f'{", ".join(missing_subjects)}.' ) config.execution.participant_label = sorted(participant_label) diff --git a/nibabies/config.py b/nibabies/config.py index f8c4cc83..08154068 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -413,7 +413,7 @@ class execution(_Config): """Force usage of this anatomical scan as the structural reference.""" reports_only = False """Only build the reports, based on the reportlets found in a cached working directory.""" - run_uuid = f"{strftime('%Y%m%d-%H%M%S')}_{uuid4()}" + run_uuid = f'{strftime("%Y%m%d-%H%M%S")}_{uuid4()}' """Unique identifier of this particular run.""" segmentation_atlases_dir = None """Directory with atlases to use for JLF segmentations""" @@ -622,7 +622,7 @@ class workflow(_Config): class loggers: """Keep loggers easily accessible (see :py:func:`init`).""" - _fmt = '%(asctime)s,%(msecs)d %(name)-2s ' '%(levelname)-2s:\n\t %(message)s' + _fmt = '%(asctime)s,%(msecs)d %(name)-2s %(levelname)-2s:\n\t %(message)s' _datefmt = '%y%m%d-%H:%M:%S' default = logging.getLogger() diff --git a/nibabies/interfaces/metric.py b/nibabies/interfaces/metric.py index a67efb7e..f9f56c90 100644 --- a/nibabies/interfaces/metric.py +++ b/nibabies/interfaces/metric.py @@ -78,8 +78,7 @@ class MetricDilateInputSpec(CommandLineInputSpec): argstr='-exponent %f ', position=9, default=6.0, - desc='exponent n to use in (area / (distance ^ n)) as the ' - 'weighting function (default 6)', + desc='exponent n to use in (area / (distance ^ n)) as the weighting function (default 6)', ) corrected_areas = File( @@ -246,16 +245,14 @@ class MetricResample(WBCommand): def _format_arg(self, opt, spec, val): if opt in ['current_area', 'new_area']: if not self.inputs.area_surfs and not self.inputs.area_metrics: - raise ValueError( - f'{opt} was set but neither area_surfs or' ' area_metrics were set' - ) + raise ValueError(f'{opt} was set but neither area_surfs or area_metrics were set') if opt == 'method': if ( val == 'ADAP_BARY_AREA' and not self.inputs.area_surfs and not self.inputs.area_metrics ): - raise ValueError('Exactly one of area_surfs or area_metrics' ' must be specified') + raise ValueError('Exactly one of area_surfs or area_metrics must be specified') if opt == 'valid_roi_out' and val: # generate a filename and add it to argstr roi_out = self._gen_filename(self.inputs.in_file, suffix='_roi') diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index ecdba754..ff330330 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -41,7 +41,9 @@ def write_bidsignore(deriv_dir): def write_derivative_description(bids_dir, deriv_dir, dataset_links=None): - from ..__about__ import DOWNLOAD_URL, __packagename__, __version__ + from nibabies import __version__ + + DOWNLOAD_URL = f'https://github.com/nipreps/nibabies/archive/{__version__}.tar.gz' bids_dir = Path(bids_dir) deriv_dir = Path(deriv_dir) @@ -51,7 +53,7 @@ def write_derivative_description(bids_dir, deriv_dir, dataset_links=None): 'DatasetType': 'derivative', 'GeneratedBy': [ { - 'Name': __packagename__, + 'Name': 'NiBabies', 'Version': __version__, 'CodeURL': DOWNLOAD_URL, } @@ -63,7 +65,7 @@ def write_derivative_description(bids_dir, deriv_dir, dataset_links=None): if 'NIBABIES_DOCKER_TAG' in os.environ: desc['GeneratedBy'][0]['Container'] = { 'Type': 'docker', - 'Tag': f"nipreps/nibabies:{os.environ['NIBABIES_DOCKER_TAG']}", + 'Tag': f'nipreps/nibabies:{os.environ["NIBABIES_DOCKER_TAG"]}', } if 'NIBABIES_SINGULARITY_URL' in os.environ: desc['GeneratedBy'][0]['Container'] = { diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index c692863c..1f59dc3b 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -170,11 +170,11 @@ def init_infant_anat_fit_wf( # Stage 2 - Anatomicals t1w_buffer = pe.Node( - niu.IdentityInterface(fields=['t1w_preproc', 't1w_mask' 't1w_brain']), + niu.IdentityInterface(fields=['t1w_preproc', 't1w_maskt1w_brain']), name='t1w_buffer', ) t2w_buffer = pe.Node( - niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask' 't2w_brain', 't2w_probmap']), + niu.IdentityInterface(fields=['t2w_preproc', 't2w_maskt2w_brain', 't2w_probmap']), name='t2w_buffer', ) anat_buffer = pe.Node( @@ -453,7 +453,7 @@ def init_infant_anat_fit_wf( t2w_mask = precomputed.get('t2w_mask') anat_mask = precomputed.get(f'{anat}_mask') refine_mask = False - # T1w masking - define pre-emptively + # T1w masking - define preemptively apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') apply_t2w_mask = apply_t1w_mask.clone(name='apply_t2w_mask') diff --git a/nibabies/workflows/anatomical/registration.py b/nibabies/workflows/anatomical/registration.py index 1a0aa8e4..e404d673 100644 --- a/nibabies/workflows/anatomical/registration.py +++ b/nibabies/workflows/anatomical/registration.py @@ -537,9 +537,11 @@ def _load_intermediate_xfms(intermediate, std): def _create_inverse_composite(in_file, out_file='inverse_composite.h5'): - """Build a composite transform with SimpleITK. + """ + Build a composite transform with SimpleITK. This serves as a workaround for a bug in ANTs's CompositeTransformUtil + https://github.com/ANTsX/ANTs/issues/1827 where composite transforms cannot be created with a displacement field placed first. Parameters @@ -553,6 +555,8 @@ def _create_inverse_composite(in_file, out_file='inverse_composite.h5'): ------- out_file : str Absolute path to the composite transform. + """ + from pathlib import Path import SimpleITK as sitk diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index 3dc09a9b..b538c2fd 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -288,7 +288,7 @@ def _to_dtype(in_file, dtype='uint8'): import numpy as np img = nb.load(in_file) - out_file = Path(f"labels{''.join(Path(in_file).suffixes)}").absolute() + out_file = Path(f'labels{"".join(Path(in_file).suffixes)}').absolute() new_data = np.asanyarray(img.get_fdata(), dtype=dtype) img.set_data_dtype(dtype) diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 488162ba..383186c7 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -118,7 +118,7 @@ def init_nibabies_wf(subworkflows_list: list[SubjectSession]): freesurfer_home=os.getenv('FREESURFER_HOME'), spaces=execution_spaces.get_fs_spaces(), ), - name=f"fsdir_run_{config.execution.run_uuid.replace('-', '_')}", + name=f'fsdir_run_{config.execution.run_uuid.replace("-", "_")}', run_without_submitting=True, ) if config.execution.fs_subjects_dir is not None: @@ -291,9 +291,9 @@ def init_single_subject_wf( if subject_data['roi']: warnings.warn( - f"Lesion mask {subject_data['roi']} found. " - "Future versions of NiBabies will use alternative conventions. " - "Please refer to the documentation before upgrading.", + f'Lesion mask {subject_data["roi"]} found. ' + 'Future versions of NiBabies will use alternative conventions. ' + 'Please refer to the documentation before upgrading.', FutureWarning, stacklevel=1, ) @@ -939,7 +939,7 @@ def map_fieldmap_estimation( for bold_file, estimator_key in all_estimators.items(): if len(estimator_key) > 1: config.loggers.workflow.warning( - f"Several fieldmaps <{', '.join(estimator_key)}> are " + f'Several fieldmaps <{", ".join(estimator_key)}> are ' f"'IntendedFor' <{bold_file}>, using {estimator_key[0]}" ) estimator_key[1:] = [] diff --git a/nibabies/workflows/bold/base.py b/nibabies/workflows/bold/base.py index fc75c6e0..745cbb16 100644 --- a/nibabies/workflows/bold/base.py +++ b/nibabies/workflows/bold/base.py @@ -37,8 +37,8 @@ from niworkflows.utils.connections import listify from nibabies import config +from nibabies._types import Anatomical from nibabies.interfaces import DerivativesDataSink -from nibabies.types import Anatomical from nibabies.utils.misc import estimate_bold_mem_usage # BOLD workflows diff --git a/nibabies/workflows/bold/fit.py b/nibabies/workflows/bold/fit.py index 4ee2a1be..4b9e8da6 100644 --- a/nibabies/workflows/bold/fit.py +++ b/nibabies/workflows/bold/fit.py @@ -34,13 +34,13 @@ from sdcflows.workflows.apply.registration import init_coeff2epi_wf from nibabies import config +from nibabies._types import Anatomical from nibabies.interfaces.reports import FunctionalSummary from nibabies.interfaces.resampling import ( DistortionParameters, ReconstructFieldmap, ResampleSeries, ) -from nibabies.types import Anatomical from nibabies.utils.bids import extract_entities from nibabies.utils.misc import estimate_bold_mem_usage diff --git a/nibabies/workflows/bold/outputs.py b/nibabies/workflows/bold/outputs.py index 36884c4a..24cdd4a5 100644 --- a/nibabies/workflows/bold/outputs.py +++ b/nibabies/workflows/bold/outputs.py @@ -31,10 +31,10 @@ from niworkflows.utils.images import dseg_label from nibabies import config +from nibabies._types import Anatomical from nibabies.config import DEFAULT_DISMISS_ENTITIES, DEFAULT_MEMORY_MIN_GB, dismiss_echo from nibabies.interfaces import DerivativesDataSink from nibabies.interfaces.bids import BIDSURI -from nibabies.types import Anatomical def prepare_timing_parameters(metadata: dict): diff --git a/nibabies/workflows/bold/registration.py b/nibabies/workflows/bold/registration.py index 14b9c6ba..e3e788a5 100644 --- a/nibabies/workflows/bold/registration.py +++ b/nibabies/workflows/bold/registration.py @@ -20,7 +20,7 @@ from nipype.pipeline import engine as pe from nibabies import config, data -from nibabies.types import AffineDOF, RegistrationInit +from nibabies._types import AffineDOF, RegistrationInit DEFAULT_MEMORY_MIN_GB = config.DEFAULT_MEMORY_MIN_GB LOGGER = logging.getLogger('nipype.workflow') diff --git a/nibabies/workflows/bold/stc.py b/nibabies/workflows/bold/stc.py index 92c0fe53..949ec87b 100644 --- a/nibabies/workflows/bold/stc.py +++ b/nibabies/workflows/bold/stc.py @@ -115,7 +115,7 @@ def init_bold_stc_wf(metadata, name='bold_stc_wf'): slice_timing_correction = pe.Node( TShift( outputtype='NIFTI_GZ', - tr=f"{metadata['RepetitionTime']}s", + tr=f'{metadata["RepetitionTime"]}s', slice_timing=metadata['SliceTiming'], slice_encoding_direction=metadata.get('SliceEncodingDirection', 'k'), tzero=tzero, diff --git a/scripts/fetch_templates.py b/scripts/fetch_templates.py index 2ccfd1f7..445e0fa3 100755 --- a/scripts/fetch_templates.py +++ b/scripts/fetch_templates.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -'Pre-emptive caching of commonly used TemplateFlow templates' +'Preemptive caching of commonly used TemplateFlow templates' import templateflow.api as tf