`_.
"""
-from os import environ, path as op
+from importlib.resources import files
+from os import environ
from re import findall, search
from typing import Optional
@@ -32,7 +33,7 @@
str(row[2]) if row[2] else None,
)
for row in loadtxt(
- op.join(op.dirname(__file__), "BIDS_identifiers.tsv"),
+ str(files("CPAC").joinpath("resources/templates/BIDS_identifiers.tsv")),
dtype="str",
delimiter="\t",
)
diff --git a/CPAC/resources/tests/test_templates.py b/CPAC/resources/tests/test_templates.py
index 13a4f72745..048cbe9b1c 100644
--- a/CPAC/resources/tests/test_templates.py
+++ b/CPAC/resources/tests/test_templates.py
@@ -19,6 +19,7 @@
import os
import pytest
+import nipype.pipeline.engine as pe
from CPAC.pipeline import ALL_PIPELINE_CONFIGS
from CPAC.pipeline.engine import ingress_pipeconfig_paths, ResourcePool
@@ -29,11 +30,11 @@
@pytest.mark.parametrize("pipeline", ALL_PIPELINE_CONFIGS)
def test_packaged_path_exists(pipeline):
"""
- Check that all local templates are included in image at at
- least one resolution.
+ Check that all local templates are included in image at atleast one resolution.
"""
- rpool = ingress_pipeconfig_paths(
- Preconfiguration(pipeline), ResourcePool(), "pytest"
+ wf = pe.Workflow(name="test")
+ wf, rpool = ingress_pipeconfig_paths(
+ wf, Preconfiguration(pipeline), ResourcePool(), "pytest"
)
for resource in rpool.rpool.values():
node = next(iter(resource.values())).get("data")[0]
diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py
index f769cf14b3..69db6d60cb 100644
--- a/CPAC/seg_preproc/seg_preproc.py
+++ b/CPAC/seg_preproc/seg_preproc.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2023 C-PAC Developers
+# Copyright (C) 2012-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -35,7 +35,6 @@
from CPAC.utils.interfaces.function.seg_preproc import (
pick_tissue_from_labels_file_interface,
)
-from CPAC.utils.utils import check_prov_for_regtool
def process_segment_map(wf_name, use_priors, use_custom_threshold, reg_tool):
@@ -536,7 +535,6 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None):
# triggered by 'segments' boolean input (-g or --segments)
# 'probability_maps' output is a list of individual probability maps
# triggered by 'probability_maps' boolean input (-p)
-
segment = pe.Node(
interface=fsl.FAST(),
name=f"segment_{pipe_num}",
@@ -596,10 +594,8 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None):
xfm = "from-template_to-T1w_mode-image_desc-linear_xfm"
if "space-longitudinal" in resource:
xfm = "from-template_to-longitudinal_mode-image_desc-linear_xfm"
- xfm_prov = strat_pool.get_cpac_provenance(xfm)
- reg_tool = check_prov_for_regtool(xfm_prov)
+ reg_tool = strat_pool.reg_tool(xfm)
else:
- xfm_prov = None
reg_tool = None
xfm = None
@@ -752,10 +748,7 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None):
outputs=["label-CSF_mask", "label-GM_mask", "label-WM_mask"],
)
def tissue_seg_T1_template_based(wf, cfg, strat_pool, pipe_num, opt=None):
- xfm_prov = strat_pool.get_cpac_provenance(
- "from-template_to-T1w_mode-image_desc-linear_xfm"
- )
- reg_tool = check_prov_for_regtool(xfm_prov)
+ reg_tool = strat_pool.reg_tool("from-template_to-T1w_mode-image_desc-linear_xfm")
use_ants = reg_tool == "ants"
csf_template2t1 = tissue_mask_template_to_t1(f"CSF_{pipe_num}", use_ants)
@@ -806,10 +799,9 @@ def tissue_seg_T1_template_based(wf, cfg, strat_pool, pipe_num, opt=None):
],
)
def tissue_seg_EPI_template_based(wf, cfg, strat_pool, pipe_num, opt=None):
- xfm_prov = strat_pool.get_cpac_provenance(
+ reg_tool = strat_pool.reg_tool(
"from-EPItemplate_to-bold_mode-image_desc-linear_xfm"
)
- reg_tool = check_prov_for_regtool(xfm_prov)
use_ants = reg_tool == "ants"
csf_template2t1 = tissue_mask_template_to_t1("CSF", use_ants)
diff --git a/CPAC/seg_preproc/tests/__init__.py b/CPAC/seg_preproc/tests/__init__.py
new file mode 100644
index 0000000000..788d202e81
--- /dev/null
+++ b/CPAC/seg_preproc/tests/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Tests for segmentation utilities."""
diff --git a/CPAC/seg_preproc/tests/test_utils.py b/CPAC/seg_preproc/tests/test_utils.py
new file mode 100644
index 0000000000..de5521aa4c
--- /dev/null
+++ b/CPAC/seg_preproc/tests/test_utils.py
@@ -0,0 +1,34 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Tests for segmentation utilities."""
+
+import subprocess
+
+
+def test_ants_joint_label_fusion_script() -> None:
+ """Test antsJointLabelFusion.sh script can run in this environment."""
+ try:
+ subprocess.run(
+ ["antsJointLabelFusion.sh"],
+ check=True,
+ capture_output=True,
+ )
+ except subprocess.CalledProcessError as e:
+ # There's no explicit 'help' option, but if the script can run,
+ # the error message does not contain the string "Error".
+ if "Error" in e.stderr.decode():
+ raise e
diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py
index 1defe4e2d1..017ce4d604 100644
--- a/CPAC/surface/surf_preproc.py
+++ b/CPAC/surface/surf_preproc.py
@@ -928,7 +928,7 @@ def run_surface(
[
"space-template_desc-head_T1w",
"space-template_desc-brain_T1w",
- "space-template_desc-T1w_mask",
+ "space-template_desc-brain_mask",
],
[
"from-T1w_to-template_mode-image_xfm",
@@ -1202,7 +1202,7 @@ def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None):
space_temp = [
"space-template_desc-head_T1w",
"space-template_desc-brain_T1w",
- "space-template_desc-T1w_mask",
+ "space-template_desc-brain_mask",
]
atlas_xfm = [
"from-T1w_to-template_mode-image_xfm",
diff --git a/CPAC/surface/tests/test_config.py b/CPAC/surface/tests/test_config.py
index 046ea8fb55..97d248ed3e 100644
--- a/CPAC/surface/tests/test_config.py
+++ b/CPAC/surface/tests/test_config.py
@@ -1,31 +1,41 @@
+# Copyright (C) 2022-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
"""Tests for surface configuration."""
-import os
+from pathlib import Path
+from typing import cast
-import pkg_resources as p
import pytest
import yaml
from CPAC.pipeline.cpac_pipeline import run_workflow
+from CPAC.resources.configs import CONFIGS_PATH
from CPAC.utils.configuration import Configuration
@pytest.mark.skip(reason="timing out for unrelated reasons")
@pytest.mark.timeout(60)
-def test_duplicate_freesurfer(tmp_path):
+def test_duplicate_freesurfer(tmp_path: Path) -> None:
"""The pipeline should build fast if freesurfer is not self-duplicating."""
config = Configuration(yaml.safe_load("FROM: abcd-options"))
- with open(
- p.resource_filename(
- "CPAC",
- os.path.join("resources", "configs", "data_config_S3-BIDS-ABIDE.yml"),
- ),
- "r",
- ) as data_config:
+ with (CONFIGS_PATH / "data_config_S3-BIDS-ABIDE.yml").open("r") as data_config:
sub_dict = yaml.safe_load(data_config)[0]
for directory in ["output", "working", "log", "crash_log"]:
directory_key = ["pipeline_setup", f"{directory}_directory", "path"]
- config[directory_key] = os.path.join(
- tmp_path, config[directory_key].lstrip("/")
- )
+ item = cast(str, config[directory_key])
+ config[directory_key] = str(tmp_path / item.lstrip("/"))
run_workflow(sub_dict, config, False, test_config=True)
diff --git a/CPAC/surface/tests/test_installation.py b/CPAC/surface/tests/test_installation.py
index 0af0a9621a..3f53330435 100644
--- a/CPAC/surface/tests/test_installation.py
+++ b/CPAC/surface/tests/test_installation.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2023 C-PAC Developers
+# Copyright (C) 2023-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -17,18 +17,27 @@
"""Tests for requisite surface prerequisites."""
import os
+from typing import Literal
import pytest
from CPAC.utils.tests.test_utils import _installation_check
-@pytest.mark.parametrize("executable", ["bc", "csh"])
-@pytest.mark.skipif(
- "FREESURFER_HOME" not in os.environ
- or not os.path.exists(os.environ["FREESURFER_HOME"]),
- reason="We don't need these dependencies if we don't have FreeSurfer.",
+@pytest.mark.parametrize(
+ "executable",
+ [
+ "bc",
+ pytest.param(
+ "csh",
+ marks=pytest.mark.skipif(
+ "FREESURFER_HOME" not in os.environ
+ or not os.path.exists(os.environ["FREESURFER_HOME"]),
+ reason="We don't need this dependency if we don't have FreeSurfer.",
+ ),
+ ),
+ ],
)
-def test_executable(executable):
+def test_executable(executable: Literal["bc"] | Literal["csh"]) -> None:
"""Make sure executable is installed."""
_installation_check(executable, "--version")
diff --git a/CPAC/utils/bids_utils.py b/CPAC/utils/bids_utils.py
index 34e72d430e..4ed9e45e7c 100755
--- a/CPAC/utils/bids_utils.py
+++ b/CPAC/utils/bids_utils.py
@@ -14,10 +14,13 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
+from base64 import b64decode
+from collections.abc import Iterable
import json
import os
import re
import sys
+from typing import Any, Callable, Optional
from warnings import warn
from botocore.exceptions import BotoCoreError
@@ -26,6 +29,16 @@
from CPAC.utils.monitoring import UTLOGGER
+class SpecifiedBotoCoreError(BotoCoreError):
+ """Specified :py:class:`~botocore.exceptions.BotoCoreError`."""
+
+ def __init__(self, msg: str, *args, **kwargs) -> None:
+ """Initialize BotoCoreError with message."""
+ msg = msg.format(**kwargs)
+ Exception.__init__(self, msg)
+ self.kwargs = kwargs
+
+
def bids_decode_fname(file_path, dbg=False, raise_error=True):
f_dict = {}
@@ -842,7 +855,7 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=""):
f"Error retrieving {s3_obj.key.replace(prefix, '')}"
f" ({e.message})"
)
- raise BotoCoreError(msg) from e
+ raise SpecifiedBotoCoreError(msg) from e
elif "nii" in str(s3_obj.key):
file_paths.append(
str(s3_obj.key).replace(prefix, "").lstrip("/")
@@ -868,9 +881,15 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=""):
): json.load(open(os.path.join(root, f), "r"))
}
)
- except UnicodeDecodeError:
+ except UnicodeDecodeError as unicode_decode_error:
msg = f"Could not decode {os.path.join(root, f)}"
- raise UnicodeDecodeError(msg)
+ raise UnicodeDecodeError(
+ unicode_decode_error.encoding,
+ unicode_decode_error.object,
+ unicode_decode_error.start,
+ unicode_decode_error.end,
+ msg,
+ )
if not file_paths and not config_dict:
msg = (
@@ -983,15 +1002,35 @@ def insert_entity(resource, key, value):
return "_".join([*new_entities[0], f"{key}-{value}", *new_entities[1], suff])
-def load_yaml_config(config_filename, aws_input_creds):
+def apply_modifications(
+ yaml_contents: str, modifications: Optional[list[Callable[[str], str]]]
+) -> str:
+ """Apply modification functions to YAML contents"""
+ if modifications:
+ for modification in modifications:
+ yaml_contents = modification(yaml_contents)
+ return yaml_contents
+
+
+def load_yaml_config(
+ config_filename: str,
+ aws_input_creds,
+ modifications: Optional[list[Callable[[str], str]]] = None,
+) -> dict | list | str:
+ """Load a YAML config file, possibly from AWS, with modifications applied.
+
+ `modifications` should be a list of functions that take a single string argument (the loaded YAML contents) and return a single string argument (the modified YAML contents).
+ """
if config_filename.lower().startswith("data:"):
try:
- header, encoded = config_filename.split(",", 1)
- config_content = b64decode(encoded)
+ _header, encoded = config_filename.split(",", 1)
+ config_content = apply_modifications(
+ b64decode(encoded).decode("utf-8"), modifications
+ )
return yaml.safe_load(config_content)
- except:
+ except Exception:
msg = f"Error! Could not find load config from data URI {config_filename}"
- raise BotoCoreError(msg)
+ raise SpecifiedBotoCoreError(msg=msg)
if config_filename.lower().startswith("s3://"):
# s3 paths begin with s3://bucket/
@@ -1013,7 +1052,8 @@ def load_yaml_config(config_filename, aws_input_creds):
config_filename = os.path.realpath(config_filename)
try:
- return yaml.safe_load(open(config_filename, "r"))
+ with open(config_filename, "r") as _f:
+ return yaml.safe_load(apply_modifications(_f.read(), modifications))
except IOError:
msg = f"Error! Could not find config file {config_filename}"
raise FileNotFoundError(msg)
@@ -1110,6 +1150,25 @@ def create_cpac_data_config(
return sub_list
+def _check_value_type(
+ sub_list: list[dict[str, Any]],
+ keys: list[str] = ["subject_id", "unique_id"],
+ value_type: type = int,
+ any_or_all: Callable[[Iterable], bool] = any,
+) -> bool:
+ """Check if any or all of a key in a sub_list is of a given type."""
+ return any_or_all(
+ isinstance(sub.get(key), value_type) for key in keys for sub in sub_list
+ )
+
+
+def coerce_data_config_strings(contents: str) -> str:
+ """Coerge `subject_id` and `unique_id` to be strings."""
+ for key in ["subject_id: ", "unique_id: "]:
+ contents = re.sub(f"{key}(?!!!)", f"{key}!!str ", contents)
+ return contents.replace(": !!str !!", ": !!")
+
+
def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds):
"""
Loads the file as a check to make sure it is available and readable.
@@ -1127,7 +1186,9 @@ def load_cpac_data_config(data_config_file, participant_labels, aws_input_creds)
-------
list
"""
- sub_list = load_yaml_config(data_config_file, aws_input_creds)
+ sub_list: list[dict[str, str]] = load_yaml_config(
+ data_config_file, aws_input_creds, modifications=[coerce_data_config_strings]
+ )
if participant_labels:
sub_list = [
diff --git a/CPAC/utils/build_data_config.py b/CPAC/utils/build_data_config.py
index 8be6c6b234..6d1e2d9f0f 100644
--- a/CPAC/utils/build_data_config.py
+++ b/CPAC/utils/build_data_config.py
@@ -16,14 +16,12 @@
# License along with C-PAC. If not, see .
"""Build a C-PAC data configuration."""
-from logging import basicConfig, INFO
from pathlib import Path
from typing import Any
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.utils.data-config")
-basicConfig(format="%(message)s", level=INFO)
def _cannot_write(file_name: Path | str) -> None:
@@ -1828,8 +1826,7 @@ def util_copy_template(template_type=None):
import os
import shutil
- import pkg_resources as p
-
+ from CPAC.resources.configs import CONFIGS_PATH
from CPAC.utils.configuration import preconfig_yaml
template_type = "data_settings" if not template_type else template_type
@@ -1837,10 +1834,7 @@ def util_copy_template(template_type=None):
settings_template = (
preconfig_yaml("default")
if (template_type == "pipeline_config")
- else p.resource_filename(
- "CPAC",
- os.path.join("resources", "configs", f"{template_type}_template.yml"),
- )
+ else str(CONFIGS_PATH / f"{template_type}_template.yml")
)
settings_file = os.path.join(os.getcwd(), f"{template_type}.yml")
diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py
index 8444cce105..0d94752487 100644
--- a/CPAC/utils/configuration/configuration.py
+++ b/CPAC/utils/configuration/configuration.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2022-2024 C-PAC Developers
+# Copyright (C) 2022-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -22,9 +22,9 @@
from warnings import warn
from click import BadParameter
-import pkg_resources as p
import yaml
+from CPAC.resources.configs import CONFIGS_PATH
from .diff import dct_diff
CONFIG_KEY_TYPE = str | list[str]
@@ -50,7 +50,10 @@ class Configuration:
will form the base of the Configuration object with the values in
the given dictionary overriding matching keys in the base at any
depth. If no ``FROM`` key is included, the base Configuration is
- the default Configuration.
+ the blank preconfiguration.
+
+ .. versionchanged:: 1.8.5
+ From version 1.8.0 to version 1.8.5, unspecified keys were based on the default configuration rather than the blank preconfiguration.
``FROM`` accepts either the name of a preconfigured pipleine or a
path to a YAML file.
@@ -734,10 +737,7 @@ def preconfig_yaml(preconfig_name="default", load=False):
if load:
with open(preconfig_yaml(preconfig_name), "r", encoding="utf-8") as _f:
return yaml.safe_load(_f)
- return p.resource_filename(
- "CPAC",
- os.path.join("resources", "configs", f"pipeline_config_{preconfig_name}.yml"),
- )
+ return str(CONFIGS_PATH / f"pipeline_config_{preconfig_name}.yml")
class Preconfiguration(Configuration):
diff --git a/CPAC/utils/create_fsl_flame_preset.py b/CPAC/utils/create_fsl_flame_preset.py
index 856c10a3b4..848fe5e9fe 100644
--- a/CPAC/utils/create_fsl_flame_preset.py
+++ b/CPAC/utils/create_fsl_flame_preset.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018-2024 C-PAC Developers
+# Copyright (C) 2018-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -1092,20 +1092,6 @@ def run(
import os
- import pandas as pd
- import pkg_resources as p
-
- # make life easy
- keys_csv = p.resource_filename("CPAC", "resources/cpac_outputs.csv")
- try:
- pd.read_csv(keys_csv)
- except Exception as e:
- err = (
- "\n[!] Could not access or read the cpac_outputs.csv "
- f"resource file:\n{keys_csv}\n\nError details {e}\n"
- )
- raise Exception(err)
-
if derivative_list == "all":
derivative_list = [
"alff",
diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py
index 008e674c2d..89416edd15 100644
--- a/CPAC/utils/datasource.py
+++ b/CPAC/utils/datasource.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2024 C-PAC Developers
+# Copyright (C) 2012-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -20,6 +20,7 @@
import json
from pathlib import Path
import re
+from typing import Any, Optional
from voluptuous import RequiredFieldInvalid
from nipype.interfaces import utility as util
@@ -376,6 +377,8 @@ def create_fmap_datasource(fmap_dct, wf_name="fmap_datasource"):
def get_fmap_phasediff_metadata(data_config_scan_params):
"""Return the scan parameters for a field map phasediff scan."""
+ from CPAC.utils.utils import get_fmap_type
+
if (
not isinstance(data_config_scan_params, dict)
and ".json" in data_config_scan_params
@@ -397,7 +400,12 @@ def get_fmap_phasediff_metadata(data_config_scan_params):
dwell_time = data_config_scan_params.get("DwellTime")
pe_direction = data_config_scan_params.get("PhaseEncodingDirection")
total_readout = data_config_scan_params.get("TotalReadoutTime")
+ if "EffectiveEchoSpacing" in data_config_scan_params:
+ effective_echo_spacing = data_config_scan_params.get("EffectiveEchoSpacing")
+ else:
+ effective_echo_spacing = None
+ fmap_type = get_fmap_type(data_config_scan_params)
return (
dwell_time,
pe_direction,
@@ -405,6 +413,8 @@ def get_fmap_phasediff_metadata(data_config_scan_params):
echo_time,
echo_time_one,
echo_time_two,
+ effective_echo_spacing,
+ fmap_type,
)
@@ -463,12 +473,12 @@ def gather_echo_times(echotime_1, echotime_2, echotime_3=None, echotime_4=None):
def match_epi_fmaps(
- bold_pedir,
- epi_fmap_one,
- epi_fmap_params_one,
- epi_fmap_two=None,
- epi_fmap_params_two=None,
-):
+ bold_pedir: str,
+ epi_fmap_one: str,
+ epi_fmap_params_one: dict[str, Any],
+ epi_fmap_two: Optional[str] = None,
+ epi_fmap_params_two: Optional[dict[str, Any]] = None,
+) -> tuple[str, str]:
"""Match EPI field maps to the BOLD scan.
Parse the field map files in the data configuration and determine which
@@ -504,13 +514,41 @@ def match_epi_fmaps(
with open(scan_params, "r") as f:
scan_params = json.load(f)
if "PhaseEncodingDirection" in scan_params:
- epi_pedir = scan_params["PhaseEncodingDirection"]
+ epi_pedir: str | bytes = scan_params["PhaseEncodingDirection"]
+ if isinstance(epi_pedir, bytes):
+ epi_pedir = epi_pedir.decode("utf-8")
if epi_pedir == bold_pedir:
same_pe_epi = epi_scan
elif epi_pedir[0] == bold_pedir[0]:
opposite_pe_epi = epi_scan
- return (opposite_pe_epi, same_pe_epi)
+ if same_pe_epi is None:
+ msg = f"Same phase encoding EPI: {bold_pedir}"
+ raise FileNotFoundError(msg)
+ if opposite_pe_epi is None:
+ msg = f"Opposite phase encoding EPI: {bold_pedir}"
+ raise FileNotFoundError(msg)
+
+ return opposite_pe_epi, same_pe_epi
+
+
+def match_epi_fmaps_function_node(name: str = "match_epi_fmaps"):
+ """Return a Function node for `~CPAC.utils.datasource.match_epi_fmaps`."""
+ return pe.Node(
+ Function(
+ input_names=[
+ "bold_pedir",
+ "epi_fmap_one",
+ "epi_fmap_params_one",
+ "epi_fmap_two",
+ "epi_fmap_params_two",
+ ],
+ output_names=["opposite_pe_epi", "same_pe_epi"],
+ function=match_epi_fmaps,
+ as_module=True,
+ ),
+ name=name,
+ )
def ingress_func_metadata(
@@ -524,18 +562,104 @@ def ingress_func_metadata(
num_strat=None,
):
"""Ingress metadata for functional scans."""
+ from CPAC.utils.utils import get_fmap_build_info, get_fmap_metadata_at_build_time
+
name_suffix = ""
for suffix_part in (unique_id, num_strat):
if suffix_part is not None:
name_suffix += f"_{suffix_part}"
- # Grab field maps
+
+ scan_params = pe.Node(
+ Function(
+ input_names=[
+ "data_config_scan_params",
+ "subject_id",
+ "scan",
+ "pipeconfig_tr",
+ "pipeconfig_tpattern",
+ "pipeconfig_start_indx",
+ "pipeconfig_stop_indx",
+ ],
+ output_names=[
+ "tr",
+ "tpattern",
+ "template",
+ "ref_slice",
+ "start_indx",
+ "stop_indx",
+ "pe_direction",
+ "effective_echo_spacing",
+ ],
+ function=get_scan_params,
+ ),
+ name=f"bold_scan_params_{subject_id}{name_suffix}",
+ )
+ scan_params.inputs.subject_id = subject_id
+ scan_params.inputs.set(
+ pipeconfig_start_indx=cfg.functional_preproc["truncation"]["start_tr"],
+ pipeconfig_stop_indx=cfg.functional_preproc["truncation"]["stop_tr"],
+ )
+
+ node, out = rpool.get("scan")["['scan:func_ingress']"]["data"]
+ wf.connect(node, out, scan_params, "scan")
+
+ # Workaround for extracting metadata with ingress
+ if rpool.check_rpool("derivatives-dir"):
+ selectrest_json = pe.Node(
+ function.Function(
+ input_names=["scan", "rest_dict", "resource"],
+ output_names=["file_path"],
+ function=get_rest,
+ as_module=True,
+ ),
+ name="selectrest_json",
+ )
+ selectrest_json.inputs.rest_dict = sub_dict
+ selectrest_json.inputs.resource = "scan_parameters"
+ wf.connect(node, out, selectrest_json, "scan")
+ wf.connect(selectrest_json, "file_path", scan_params, "data_config_scan_params")
+ else:
+ # wire in the scan parameter workflow
+ node, out = rpool.get("scan-params")["['scan-params:scan_params_ingress']"][
+ "data"
+ ]
+ wf.connect(node, out, scan_params, "data_config_scan_params")
+
+ rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress")
+ rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress")
+ rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress")
+ rpool.set_data(
+ "start-tr", scan_params, "start_indx", {}, "", "func_metadata_ingress"
+ )
+ rpool.set_data("stop-tr", scan_params, "stop_indx", {}, "", "func_metadata_ingress")
+ rpool.set_data(
+ "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress"
+ )
+ rpool.set_data(
+ "effectiveEchoSpacing",
+ scan_params,
+ "effective_echo_spacing",
+ {},
+ "",
+ "func_metadata_ingress",
+ )
+
diff = False
blip = False
fmap_rp_list = []
fmap_TE_list = []
+
if "fmap" in sub_dict:
second = False
for orig_key in sub_dict["fmap"]:
+ fmap_metadata = get_fmap_metadata_at_build_time(
+ sub_dict,
+ orig_key,
+ input_creds_path,
+ cfg.pipeline_setup["working_directory"]["path"],
+ )
+ build_info = get_fmap_build_info(fmap_metadata)
+
gather_fmap = create_fmap_datasource(
sub_dict["fmap"], f"fmap_gather_{orig_key}_{subject_id}"
)
@@ -565,7 +689,10 @@ def ingress_func_metadata(
fmap_rp_list.append(key)
- get_fmap_metadata_imports = ["import json"]
+ get_fmap_metadata_imports = [
+ "import json",
+ "from CPAC.utils.utils import get_fmap_type",
+ ]
get_fmap_metadata = pe.Node(
Function(
input_names=["data_config_scan_params"],
@@ -576,6 +703,8 @@ def ingress_func_metadata(
"echo_time",
"echo_time_one",
"echo_time_two",
+ "effective_echo_spacing",
+ "fmap_type",
],
function=get_fmap_phasediff_metadata,
imports=get_fmap_metadata_imports,
@@ -590,14 +719,16 @@ def ingress_func_metadata(
"data_config_scan_params",
)
- if "phase" in key:
- # leave it open to all three options, in case there is a
- # phasediff image with either a single EchoTime field (which
- # usually matches one of the magnitude EchoTimes), OR
- # a phasediff with an EchoTime1 and EchoTime2
+ rpool.set_data(
+ f"{key}-fmap-type",
+ get_fmap_metadata,
+ "fmap_type",
+ {},
+ "",
+ "fmap_type_ingress",
+ )
- # at least one of these rpool keys will have a None value,
- # which will be sorted out in gather_echo_times below
+ if build_info["needs_echo_times"]:
rpool.set_data(
f"{key}-TE",
get_fmap_metadata,
@@ -628,16 +759,11 @@ def ingress_func_metadata(
)
fmap_TE_list.append(f"{key}-TE2")
- elif "magnitude" in key:
- rpool.set_data(
- f"{key}-TE",
- get_fmap_metadata,
- "echo_time",
- {},
- "",
- "fmap_TE_ingress",
- )
- fmap_TE_list.append(f"{key}-TE")
+ if build_info["needs_phasediff_processing"]:
+ diff = True
+
+ if build_info["is_epi"] or re.match("epi_[AP]{2}", orig_key):
+ blip = True
rpool.set_data(
f"{key}-dwell",
@@ -664,23 +790,8 @@ def ingress_func_metadata(
"fmap_readout_ingress",
)
- if "phase" in key or "mag" in key:
- diff = True
-
- if re.match("epi_[AP]{2}", orig_key):
- blip = True
-
- if diff:
- calc_delta_ratio = pe.Node(
- Function(
- input_names=["effective_echo_spacing", "echo_times"],
- output_names=["deltaTE", "ees_asym_ratio"],
- function=calc_delta_te_and_asym_ratio,
- imports=["from typing import Optional"],
- ),
- name=f"diff_distcor_calc_delta{name_suffix}",
- )
-
+ # Set up phasediff processing workflow if needed
+ if diff and fmap_TE_list:
gather_echoes = pe.Node(
Function(
input_names=[
@@ -695,7 +806,8 @@ def ingress_func_metadata(
name="fugue_gather_echo_times",
)
- for idx, fmap_file in enumerate(fmap_TE_list, start=1):
+ # Connect available echo times
+ for idx, fmap_file in enumerate(fmap_TE_list[:4], start=1):
try:
node, out_file = rpool.get(fmap_file)[
f"['{fmap_file}:fmap_TE_ingress']"
@@ -704,103 +816,34 @@ def ingress_func_metadata(
except KeyError:
pass
- wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times")
-
- # Add in nodes to get parameters from configuration file
- # a node which checks if scan_parameters are present for each scan
- scan_params = pe.Node(
- Function(
- input_names=[
- "data_config_scan_params",
- "subject_id",
- "scan",
- "pipeconfig_tr",
- "pipeconfig_tpattern",
- "pipeconfig_start_indx",
- "pipeconfig_stop_indx",
- ],
- output_names=[
- "tr",
- "tpattern",
- "template",
- "ref_slice",
- "start_indx",
- "stop_indx",
- "pe_direction",
- "effective_echo_spacing",
- ],
- function=get_scan_params,
- ),
- name=f"bold_scan_params_{subject_id}{name_suffix}",
- )
- scan_params.inputs.subject_id = subject_id
- scan_params.inputs.set(
- pipeconfig_start_indx=cfg.functional_preproc["truncation"]["start_tr"],
- pipeconfig_stop_indx=cfg.functional_preproc["truncation"]["stop_tr"],
- )
-
- node, out = rpool.get("scan")["['scan:func_ingress']"]["data"]
- wf.connect(node, out, scan_params, "scan")
-
- # Workaround for extracting metadata with ingress
- if rpool.check_rpool("derivatives-dir"):
- selectrest_json = pe.Node(
- function.Function(
- input_names=["scan", "rest_dict", "resource"],
- output_names=["file_path"],
- function=get_rest,
- as_module=True,
- ),
- name="selectrest_json",
- )
- selectrest_json.inputs.rest_dict = sub_dict
- selectrest_json.inputs.resource = "scan_parameters"
- wf.connect(node, out, selectrest_json, "scan")
- wf.connect(selectrest_json, "file_path", scan_params, "data_config_scan_params")
+ calc_delta_ratio = pe.Node(
+ Function(
+ input_names=["effective_echo_spacing", "echo_times"],
+ output_names=["deltaTE", "ees_asym_ratio"],
+ function=calc_delta_te_and_asym_ratio,
+ imports=["from typing import Optional"],
+ ),
+ name=f"diff_distcor_calc_delta{name_suffix}",
+ )
- else:
- # wire in the scan parameter workflow
- node, out = rpool.get("scan-params")["['scan-params:scan_params_ingress']"][
- "data"
- ]
- wf.connect(node, out, scan_params, "data_config_scan_params")
+ wf.connect(gather_echoes, "echotime_list", calc_delta_ratio, "echo_times")
- rpool.set_data("TR", scan_params, "tr", {}, "", "func_metadata_ingress")
- rpool.set_data("tpattern", scan_params, "tpattern", {}, "", "func_metadata_ingress")
- rpool.set_data("template", scan_params, "template", {}, "", "func_metadata_ingress")
- rpool.set_data(
- "start-tr", scan_params, "start_indx", {}, "", "func_metadata_ingress"
- )
- rpool.set_data("stop-tr", scan_params, "stop_indx", {}, "", "func_metadata_ingress")
- rpool.set_data(
- "pe-direction", scan_params, "pe_direction", {}, "", "func_metadata_ingress"
- )
+ node, out_file = rpool.get("effectiveEchoSpacing")[
+ "['effectiveEchoSpacing:func_metadata_ingress']"
+ ]["data"]
+ wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing")
- if diff:
- # Connect EffectiveEchoSpacing from functional metadata
- rpool.set_data(
- "effectiveEchoSpacing",
- scan_params,
- "effective_echo_spacing",
- {},
- "",
- "func_metadata_ingress",
- )
- node, out_file = rpool.get("effectiveEchoSpacing")[
- "['effectiveEchoSpacing:func_metadata_ingress']"
- ]["data"]
- wf.connect(node, out_file, calc_delta_ratio, "effective_echo_spacing")
- rpool.set_data(
- "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress"
- )
- rpool.set_data(
- "ees-asym-ratio",
- calc_delta_ratio,
- "ees_asym_ratio",
- {},
- "",
- "ees_asym_ratio_ingress",
- )
+ rpool.set_data(
+ "deltaTE", calc_delta_ratio, "deltaTE", {}, "", "deltaTE_ingress"
+ )
+ rpool.set_data(
+ "ees-asym-ratio",
+ calc_delta_ratio,
+ "ees_asym_ratio",
+ {},
+ "",
+ "ees_asym_ratio_ingress",
+ )
return wf, rpool, diff, blip, fmap_rp_list
@@ -885,12 +928,15 @@ def check_for_s3(
):
"""Check if passed-in file is on S3."""
# Import packages
+ from importlib.resources import files
import os
import botocore.exceptions
import nibabel as nib
from indi_aws import fetch_creds
+ from CPAC.resources import templates
+
# Init variables
s3_str = "s3://"
if creds_path:
@@ -974,12 +1020,9 @@ def check_for_s3(
if not os.path.exists(local_path):
# alert users to 2020-07-20 Neuroparc atlas update (v0 to v1)
ndmg_atlases = {}
- with open(
- os.path.join(
- os.path.dirname(os.path.dirname(__file__)),
- "resources/templates/ndmg_atlases.csv",
- )
- ) as ndmg_atlases_file:
+ with (
+ files(templates).joinpath("ndmg_atlases.csv").open("r") as ndmg_atlases_file
+ ):
ndmg_atlases["v0"], ndmg_atlases["v1"] = zip(
*[
(
@@ -1156,7 +1199,7 @@ def res_string_to_tuple(resolution):
return (float(resolution.replace("mm", "")),) * 3
-def resolve_resolution(resolution, template, template_name, tag=None):
+def resolve_resolution(orientation, resolution, template, template_name, tag=None):
"""Resample a template to a given resolution."""
from nipype.interfaces import afni
@@ -1203,6 +1246,7 @@ def resolve_resolution(resolution, template, template_name, tag=None):
resample.inputs.resample_mode = "Cu"
resample.inputs.in_file = local_path
resample.base_dir = "."
+ resample.inputs.orientation = orientation
resampled_template = resample.run()
local_path = resampled_template.outputs.out_file
diff --git a/CPAC/utils/interfaces/afni.py b/CPAC/utils/interfaces/afni.py
index af7f4e56b2..20f2c8ae57 100644
--- a/CPAC/utils/interfaces/afni.py
+++ b/CPAC/utils/interfaces/afni.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2023 C-PAC Developers
+# Copyright (C) 2023-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -31,8 +31,6 @@
_major, _minor, _patch = [int(part) for part in AFNI_SEMVER.split(".")]
AFNI_SEMVER = str(semver.Version.parse(f"{_major}.{_minor}.{_patch}"))
del _major, _minor, _patch
-AFNI_GTE_21_1_1 = semver.compare(AFNI_SEMVER, "21.1.1") >= 0
-"""AFNI version >= 21.1.1?"""
class ECMInputSpec(_ECMInputSpec):
@@ -51,4 +49,4 @@ class ECM(_ECM):
input_spec = ECMInputSpec
-__all__ = ["AFNI_GTE_21_1_1", "ECM"]
+__all__ = ["ECM"]
diff --git a/CPAC/utils/interfaces/conftest.py b/CPAC/utils/interfaces/conftest.py
index bcf92c7dfc..bff7d64abe 100644
--- a/CPAC/utils/interfaces/conftest.py
+++ b/CPAC/utils/interfaces/conftest.py
@@ -35,18 +35,19 @@
"""
from contextlib import contextmanager
+from importlib.resources import as_file, files
from os import chdir, getcwd
from pathlib import Path
from shutil import copytree, rmtree
from tempfile import mkdtemp
from pytest import fixture
-import nipype
-NIPYPE_DATADIR = Path(nipype.__file__).parent / "testing/data"
-TEMP_FOLDER = Path(mkdtemp())
-DATA_DIR = TEMP_FOLDER / "data"
-copytree(NIPYPE_DATADIR, DATA_DIR, symlinks=True)
+with as_file(files("nipype").joinpath("testing/data")) as data_path:
+ NIPYPE_DATADIR = data_path
+ TEMP_FOLDER = Path(mkdtemp())
+ DATA_DIR = TEMP_FOLDER / "data"
+ copytree(NIPYPE_DATADIR, DATA_DIR, symlinks=True)
@contextmanager
diff --git a/CPAC/utils/interfaces/function/function.py b/CPAC/utils/interfaces/function/function.py
index 34d01373d5..2e8c764242 100644
--- a/CPAC/utils/interfaces/function/function.py
+++ b/CPAC/utils/interfaces/function/function.py
@@ -5,6 +5,7 @@
# * Adds `as_module` argument and property
# * Adds `sig_imports` decorator
# * Automatically imports global Nipype loggers in function nodes
+# * Specify type of `output_names` as `str | list[str]` instead of `str`
# ORIGINAL WORK'S ATTRIBUTION NOTICE:
# Copyright (c) 2009-2016, Nipype developers
@@ -23,7 +24,7 @@
# Prior to release 0.12, Nipype was licensed under a BSD license.
-# Modifications Copyright (C) 2018-2024 C-PAC Developers
+# Modifications Copyright (C) 2018-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -157,7 +158,7 @@ class Function(NipypeFunction):
def __init__(
self,
input_names=None,
- output_names="out",
+ output_names: str | list[str] = "out",
function=None,
imports=None,
as_module=False,
diff --git a/CPAC/utils/interfaces/netcorr.py b/CPAC/utils/interfaces/netcorr.py
index aee9a4d13d..6af44a15ab 100644
--- a/CPAC/utils/interfaces/netcorr.py
+++ b/CPAC/utils/interfaces/netcorr.py
@@ -19,6 +19,61 @@
class NetCorr(NipypeNetCorr): # noqa: D101
input_spec = NetCorrInputSpec
+ def _list_outputs(self):
+ """``nipype.interfaces.afni.preprocess.NetCorr._list_outputs`` with a bugfix.
+
+ Notes
+ -----
+ This method can be removed once nipy/nipype#3697 is merged and a release
+ including that PR is included in the C-PAC image.
+ """
+ # STATEMENT OF CHANGES:
+ # This function is derived from sources licensed under the Apache-2.0 terms,
+ # and this function has been changed.
+
+ # CHANGES:
+ # * Includes changes from https://github.com/nipy/nipype/pull/3697 prior to all commits between https://github.com/nipy/nipype/tree/1.8.6 and that PR being merged and released.
+
+ # ORIGINAL WORK'S ATTRIBUTION NOTICE:
+ # Copyright (c) 2009-2016, Nipype developers
+
+ # Licensed under the Apache License, Version 2.0 (the "License");
+ # you may not use this file except in compliance with the License.
+ # You may obtain a copy of the License at
+
+ # http://www.apache.org/licenses/LICENSE-2.0
+
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+
+ # Prior to release 0.12, Nipype was licensed under a BSD license.
+
+ # Modifications copyright (C) 2024 C-PAC Developers
+ import glob
+ import os
+
+ from nipype.interfaces.base.traits_extension import isdefined
+
+ outputs = self.output_spec().get()
+
+ if not isdefined(self.inputs.out_file):
+ prefix = self._gen_fname(self.inputs.in_file, suffix="_netcorr")
+ else:
+ prefix = self.inputs.out_file
+
+ # All outputs should be in the same directory as the prefix
+ odir = os.path.dirname(os.path.abspath(prefix))
+ outputs["out_corr_matrix"] = glob.glob(os.path.join(odir, "*.netcc"))[0]
+
+ if self.inputs.ts_wb_corr or self.inputs.ts_wb_Z:
+ corrdir = os.path.join(odir, prefix + "_000_INDIV")
+ outputs["out_corr_maps"] = glob.glob(os.path.join(corrdir, "*.nii.gz"))
+
+ return outputs
+
NetCorr.__doc__ = f"""{NipypeNetCorr.__doc__}
`CPAC.utils.interfaces.netcorr.NetCorr` adds an additional optional input, `automask_off`
diff --git a/CPAC/utils/io.py b/CPAC/utils/io.py
new file mode 100644
index 0000000000..12d7d7f5d1
--- /dev/null
+++ b/CPAC/utils/io.py
@@ -0,0 +1,37 @@
+# Copyright (C) 2012-2024 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Utilities for inputs and outputs."""
+
+from pathlib import Path
+
+from yaml import safe_load, YAMLError
+
+
+def load_yaml(
+ path: Path | str, desc: str = "YAML file", encoding="utf8"
+) -> dict | list | str:
+ """Try to load a YAML file to a Python object."""
+ path = Path(path).absolute()
+ try:
+ with path.open("r", encoding=encoding) as _yaml:
+ result = safe_load(_yaml)
+ except FileNotFoundError as error:
+ raise error
+ except Exception as error:
+ msg = f"{desc} is not in proper YAML format. Please check {path}"
+ raise YAMLError(msg) from error
+ return result
diff --git a/CPAC/utils/monitoring/custom_logging.py b/CPAC/utils/monitoring/custom_logging.py
index abd6b63438..3d8d1b842a 100644
--- a/CPAC/utils/monitoring/custom_logging.py
+++ b/CPAC/utils/monitoring/custom_logging.py
@@ -21,6 +21,7 @@
import subprocess
from sys import exc_info as sys_exc_info
from traceback import print_exception
+from typing import Optional, Sequence
from nipype import logging as nipype_logging
@@ -59,7 +60,14 @@ def getLogger(name): # pylint: disable=invalid-name
if name in MOCK_LOGGERS:
return MOCK_LOGGERS[name]
logger = nipype_logging.getLogger(name)
- return logging.getLogger(name) if logger is None else logger
+ if logger is None:
+ logger = logging.getLogger(name)
+ if not logger.handlers:
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("%(message)s"))
+ logger.setLevel(logging.INFO)
+ logger.addHandler(handler)
+ return logger
# Nipype built-in loggers
@@ -171,7 +179,9 @@ def _log(message, *items, exc_info=False):
logging, level.upper(), logging.NOTSET
):
with open(
- self.handlers[0].baseFilename, "a", encoding="utf-8"
+ MockLogger._get_first_file_handler(self.handlers).baseFilename,
+ "a",
+ encoding="utf-8",
) as log_file:
if exc_info and isinstance(message, Exception):
value, traceback = sys_exc_info()[1:]
@@ -190,6 +200,16 @@ def delete(self):
"""Delete the mock logger from memory."""
del MOCK_LOGGERS[self.name]
+ @staticmethod
+ def _get_first_file_handler(
+ handlers: Sequence[logging.Handler | MockHandler],
+ ) -> Optional[logging.FileHandler | MockHandler]:
+ """Given a list of Handlers, return the first FileHandler found or return None."""
+ for handler in handlers:
+ if isinstance(handler, (logging.FileHandler, MockHandler)):
+ return handler
+ return None
+
def _lazy_sub(message, *items):
"""Given lazy-logging syntax, return string with substitutions.
@@ -252,12 +272,12 @@ def set_up_logger(
Examples
--------
>>> lg = set_up_logger('test')
- >>> lg.handlers[0].baseFilename.split('/')[-1]
+ >>> MockLogger._get_first_file_handler(lg.handlers).baseFilename.split('/')[-1]
'test.log'
>>> lg.level
0
>>> lg = set_up_logger('second_test', 'specific_filename.custom', 'debug')
- >>> lg.handlers[0].baseFilename.split('/')[-1]
+ >>> MockLogger._get_first_file_handler(lg.handlers).baseFilename.split('/')[-1]
'specific_filename.custom'
>>> lg.level
10
diff --git a/CPAC/utils/monitoring/draw_gantt_chart.py b/CPAC/utils/monitoring/draw_gantt_chart.py
index 089e9fdd39..2fc6dce651 100644
--- a/CPAC/utils/monitoring/draw_gantt_chart.py
+++ b/CPAC/utils/monitoring/draw_gantt_chart.py
@@ -23,7 +23,7 @@
# Prior to release 0.12, Nipype was licensed under a BSD license.
-# Modifications Copyright (C) 2021-2023 C-PAC Developers
+# Modifications Copyright (C) 2021-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -39,19 +39,20 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-"""Module to draw an html gantt chart from logfile produced by
-``CPAC.utils.monitoring.log_nodes_cb()``.
+"""Module to draw an html gantt chart from logfile produced by `~CPAC.utils.monitoring.log_nodes_cb`.
See https://nipype.readthedocs.io/en/latest/api/generated/nipype.utils.draw_gantt_chart.html
"""
from collections import OrderedDict
-from datetime import datetime
+from datetime import datetime, timedelta
import random
from warnings import warn
from nipype.utils.draw_gantt_chart import draw_lines, draw_resource_bar, log_to_dict
+from CPAC.utils.monitoring.monitoring import _NoTime, DatetimeWithSafeNone
+
def create_event_dict(start_time, nodes_list):
"""
@@ -401,34 +402,39 @@ def generate_gantt_chart(
return
for node in nodes_list:
- if "duration" not in node:
- node["duration"] = (node["finish"] - node["start"]).total_seconds()
+ if "duration" not in node and (node["start"] and node["finish"]):
+ _duration = node["finish"] - node["start"]
+ assert isinstance(_duration, timedelta)
+ node["duration"] = _duration.total_seconds()
# Create the header of the report with useful information
start_node = nodes_list[0]
last_node = nodes_list[-1]
- duration = (last_node["finish"] - start_node["start"]).total_seconds()
+ start = DatetimeWithSafeNone(start_node["start"])
+ finish = DatetimeWithSafeNone(last_node["finish"])
+ if isinstance(start, _NoTime) or isinstance(finish, _NoTime):
+ return
+ start, finish = DatetimeWithSafeNone.sync_tz(start, finish)
+ try:
+ duration = (finish - start).total_seconds()
+ except TypeError:
+ # no duration
+ return
# Get events based dictionary of node run stats
- events = create_event_dict(start_node["start"], nodes_list)
+ events = create_event_dict(start, nodes_list)
# Summary strings of workflow at top
- html_string += (
- "Start: " + start_node["start"].strftime("%Y-%m-%d %H:%M:%S") + "
"
- )
- html_string += (
- "Finish: " + last_node["finish"].strftime("%Y-%m-%d %H:%M:%S") + "
"
- )
+ html_string += "Start: " + start.strftime("%Y-%m-%d %H:%M:%S") + "
"
+ html_string += "Finish: " + finish.strftime("%Y-%m-%d %H:%M:%S") + "
"
html_string += "Duration: " + f"{duration / 60:.2f}" + " minutes
"
html_string += "Nodes: " + str(len(nodes_list)) + "
"
html_string += "Cores: " + str(cores) + "
"
html_string += close_header
# Draw nipype nodes Gantt chart and runtimes
- html_string += draw_lines(
- start_node["start"], duration, minute_scale, space_between_minutes
- )
+ html_string += draw_lines(start, duration, minute_scale, space_between_minutes)
html_string += draw_nodes(
- start_node["start"],
+ start,
nodes_list,
cores,
minute_scale,
@@ -442,8 +448,8 @@ def generate_gantt_chart(
# Plot gantt chart
resource_offset = 120 + 30 * cores
html_string += draw_resource_bar(
- start_node["start"],
- last_node["finish"],
+ start,
+ finish,
estimated_mem_ts,
space_between_minutes,
minute_scale,
@@ -452,8 +458,8 @@ def generate_gantt_chart(
"Memory",
)
html_string += draw_resource_bar(
- start_node["start"],
- last_node["finish"],
+ start,
+ finish,
runtime_mem_ts,
space_between_minutes,
minute_scale,
@@ -467,8 +473,8 @@ def generate_gantt_chart(
runtime_threads_ts = calculate_resource_timeseries(events, "runtime_threads")
# Plot gantt chart
html_string += draw_resource_bar(
- start_node["start"],
- last_node["finish"],
+ start,
+ finish,
estimated_threads_ts,
space_between_minutes,
minute_scale,
@@ -477,8 +483,8 @@ def generate_gantt_chart(
"Threads",
)
html_string += draw_resource_bar(
- start_node["start"],
- last_node["finish"],
+ start,
+ finish,
runtime_threads_ts,
space_between_minutes,
minute_scale,
@@ -629,7 +635,7 @@ def _timing(nodes_list):
for node in nodes_list
if "start" in node and "finish" in node
]
- except ValueError:
+ except (TypeError, ValueError):
# Drop any problematic nodes
new_node_list = []
for node in nodes_list:
@@ -656,12 +662,14 @@ def _timing_timestamp(node):
msg = "No logged nodes have timing information."
raise ProcessLookupError(msg)
return {
- k: (
+ k: DatetimeWithSafeNone(
datetime.strptime(v, "%Y-%m-%dT%H:%M:%S.%f")
if "." in v
else datetime.fromisoformat(v)
)
if (k in {"start", "finish"} and isinstance(v, str))
+ else DatetimeWithSafeNone(v)
+ if k in {"start", "finish"}
else v
for k, v in node.items()
}
diff --git a/CPAC/utils/monitoring/monitoring.py b/CPAC/utils/monitoring/monitoring.py
index 950f419b95..8d715b82b8 100644
--- a/CPAC/utils/monitoring/monitoring.py
+++ b/CPAC/utils/monitoring/monitoring.py
@@ -1,9 +1,31 @@
+# Copyright (C) 2018-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Monitoring utilities for C-PAC."""
+
+from datetime import datetime, timedelta, timezone
import glob
import json
import math
import os
import socketserver
+import struct
import threading
+from typing import Any, Optional, overload, TypeAlias
+from zoneinfo import available_timezones, ZoneInfo
import networkx as nx
from traits.trait_base import Undefined
@@ -13,8 +35,260 @@
from .custom_logging import getLogger
-# Log initial information from all the nodes
+def _safe_none_diff(
+ self: "DatetimeWithSafeNone | _NoTime", other: "DatetimeWithSafeNone | _NoTime"
+) -> datetime | timedelta:
+ """Subtract between a datetime or timedelta or None."""
+ if isinstance(self, _NoTime):
+ return timedelta(0)
+ if isinstance(other, DatetimeWithSafeNone):
+ if isinstance(other, _NoTime):
+ return timedelta(0)
+ return self - other
+ if isinstance(other, (datetime, timedelta)):
+ return self._dt - other
+ msg = f"Cannot subtract {type(other)} from {type(self)}"
+ raise NotImplementedError(msg)
+
+
+class _NoTime:
+ """A wrapper for None values that can be used in place of a datetime object."""
+
+ def __bool__(self) -> bool:
+ """Return False for _NoTime."""
+ return False
+
+ def __int__(self) -> int:
+ """Return 0 for _NoTime."""
+ return 0
+
+ def __repr__(self) -> str:
+ """Return 'NoTime' for _NoTime."""
+ return "NoTime"
+
+ def __str__(self) -> str:
+ """Return 'NoTime' for _NoTime."""
+ return "NoTime"
+
+ def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta:
+ """Subtract between None and a datetime or timedelta or None."""
+ return _safe_none_diff(self, other)
+
+ def isoformat(self) -> str:
+ """Return an ISO 8601-like string of 0s for display."""
+ return "0000-00-00"
+
+
+NoTime = _NoTime()
+"""A singleton None that can be used in place of a datetime object."""
+
+
+class DatetimeWithSafeNone(datetime, _NoTime):
+ """Time class that can be None or a time value.
+
+ Examples
+ --------
+ >>> from datetime import datetime
+ >>> DatetimeWithSafeNone(datetime(2025, 6, 18, 21, 6, 43, 730004)).isoformat()
+ '2025-06-18T21:06:43.730004'
+ >>> DatetimeWithSafeNone("2025-06-18T21:06:43.730004").isoformat()
+ '2025-06-18T21:06:43.730004'
+ >>> DatetimeWithSafeNone(b"\\x07\\xe9\\x06\\x12\\x10\\x18\\x1c\\x88\\x6d\\x01").isoformat()
+ '2025-06-18T16:24:28.028040+00:00'
+ >>> DatetimeWithSafeNone(b'\\x07\\xe9\\x06\\x12\\x10\\x18\\x1c\\x88m\\x00').isoformat()
+ '2025-06-18T16:24:28.028040'
+ >>> DatetimeWithSafeNone(DatetimeWithSafeNone("2025-06-18")).isoformat()
+ '2025-06-18T00:00:00'
+ >>> DatetimeWithSafeNone(None)
+ NoTime
+ >>> DatetimeWithSafeNone(None).isoformat()
+ '0000-00-00'
+ """
+
+ @overload
+ def __new__(
+ cls,
+ year: "OptionalDatetime",
+ month: None = None,
+ day: None = None,
+ hour: None = None,
+ minute: None = None,
+ second: None = None,
+ microsecond: None = None,
+ tzinfo: None = None,
+ *,
+ fold: None = None,
+ ) -> "DatetimeWithSafeNone | _NoTime": ...
+ @overload
+ def __new__(
+ cls,
+ year: int,
+ month: Optional[int] = None,
+ day: Optional[int] = None,
+ hour: int = 0,
+ minute: int = 0,
+ second: int = 0,
+ microsecond: int = 0,
+ tzinfo: Optional[timezone | ZoneInfo] = None,
+ *,
+ fold: int = 0,
+ ) -> "DatetimeWithSafeNone": ...
+
+ def __new__(
+ cls,
+ year: "int | OptionalDatetime",
+ month: Optional[int] = None,
+ day: Optional[int] = None,
+ hour: Optional[int] = 0,
+ minute: Optional[int] = 0,
+ second: Optional[int] = 0,
+ microsecond: Optional[int] = 0,
+ tzinfo: Optional[timezone | ZoneInfo] = None,
+ *,
+ fold: Optional[int] = 0,
+ ) -> "DatetimeWithSafeNone | _NoTime":
+ """Create a new instance of the class."""
+ if (
+ isinstance(year, int)
+ and isinstance(month, int)
+ and isinstance(day, int)
+ and isinstance(hour, int)
+ and isinstance(minute, int)
+ and isinstance(second, int)
+ and isinstance(microsecond, int)
+ and isinstance(fold, int)
+ ):
+ return datetime.__new__(
+ cls,
+ year,
+ month,
+ day,
+ hour,
+ minute,
+ second,
+ microsecond,
+ tzinfo,
+ fold=fold,
+ )
+ else:
+ dt = year
+ if dt is None:
+ return NoTime
+ if isinstance(dt, datetime):
+ return datetime.__new__(
+ cls,
+ dt.year,
+ dt.month,
+ dt.day,
+ dt.hour,
+ dt.minute,
+ dt.second,
+ dt.microsecond,
+ dt.tzinfo,
+ )
+ if isinstance(dt, bytes):
+ try:
+ tzflag: Optional[int]
+ year, month, day, hour, minute, second = struct.unpack(">H5B", dt[:7])
+ microsecond, tzflag = struct.unpack(" bool:
+ """Return True if not NoTime."""
+ return self is not NoTime
+
+ def __sub__(self, other: "DatetimeWithSafeNone | _NoTime") -> datetime | timedelta: # type: ignore[reportIncompatibleMethodOverride]
+ """Subtract between a datetime or timedelta or None."""
+ return _safe_none_diff(self, other)
+
+ def __repr__(self) -> str:
+ """Return the string representation of the datetime or NoTime."""
+ if self:
+ return datetime.__repr__(self)
+ return "NoTime"
+
+ def __str__(self) -> str:
+ """Return the string representation of the datetime or NoTime."""
+ return super().__str__()
+
+ @staticmethod
+ def sync_tz(
+ one: "DatetimeWithSafeNone", two: "DatetimeWithSafeNone"
+ ) -> tuple[datetime, datetime]:
+ """Add timezone to other if one datetime is aware and other isn't ."""
+ if one.tzinfo is None and two.tzinfo is not None:
+ return one.replace(tzinfo=two.tzinfo), two
+ if one.tzinfo is not None and two.tzinfo is None:
+ return one, two.replace(tzinfo=one.tzinfo)
+ return one, two
+
+
+class DatetimeJSONEncoder(json.JSONEncoder):
+ """JSON encoder that handles DatetimeWithSafeNone instances."""
+
+ def default(self, o: Any) -> str:
+ """Convert datetime objects to ISO format."""
+ if isinstance(o, datetime):
+ return o.isoformat()
+ if o is None or o is NoTime:
+ return ""
+ return super().default(o)
+
+
+def json_dumps(obj: Any, **kwargs) -> str:
+ """Convert an object to a JSON string."""
+ return json.dumps(obj, cls=DatetimeJSONEncoder, **kwargs)
+
+
+OptionalDatetime: TypeAlias = Optional[
+ datetime | str | bytes | DatetimeWithSafeNone | _NoTime
+]
+"""Type alias for a datetime, ISO-format string or None."""
+
+
def recurse_nodes(workflow, prefix=""):
+ """Log initial information from all the nodes."""
for node in nx.topological_sort(workflow._graph):
if isinstance(node, pe.Workflow):
for subnode in recurse_nodes(node, prefix + workflow.name + "."):
@@ -29,7 +303,7 @@ def recurse_nodes(workflow, prefix=""):
def log_nodes_initial(workflow):
logger = getLogger("callback")
for node in recurse_nodes(workflow):
- logger.debug(json.dumps(node))
+ logger.debug(json_dumps(node))
def log_nodes_cb(node, status):
@@ -111,8 +385,8 @@ def log_nodes_cb(node, status):
status_dict = {
"id": str(node),
"hash": node.inputs.get_hashval()[1],
- "start": getattr(runtime, "startTime", None),
- "finish": getattr(runtime, "endTime", None),
+ "start": DatetimeWithSafeNone(getattr(runtime, "startTime", None)),
+ "finish": DatetimeWithSafeNone(getattr(runtime, "endTime", None)),
"runtime_threads": runtime_threads,
"runtime_memory_gb": getattr(runtime, "mem_peak_gb", "N/A"),
"estimated_memory_gb": node.mem_gb,
@@ -122,10 +396,12 @@ def log_nodes_cb(node, status):
if hasattr(node, "input_data_shape") and node.input_data_shape is not Undefined:
status_dict["input_data_shape"] = node.input_data_shape
- if status_dict["start"] is None or status_dict["finish"] is None:
+ if any(
+ not isinstance(status_dict[label], datetime) for label in ["start", "finish"]
+ ):
status_dict["error"] = True
- logger.debug(json.dumps(status_dict))
+ logger.debug(json_dumps(status_dict))
log_nodes_cb.__doc__ = f"""{_nipype_log_nodes_cb.__doc__}
@@ -155,7 +431,7 @@ def handle(self):
with open(callback_file, "rb") as lf:
for l in lf.readlines(): # noqa: E741
- l = l.strip() # noqa: E741
+ l = l.strip() # noqa: E741,PLW2901
try:
node = json.loads(l)
if node["id"] not in tree[subject]:
@@ -182,7 +458,7 @@ def handle(self):
tree = {s: t for s, t in tree.items() if t}
headers = "HTTP/1.1 200 OK\nConnection: close\n\n"
- self.request.sendall(headers + json.dumps(tree) + "\n")
+ self.request.sendall(headers + json_dumps(tree) + "\n")
class LoggingHTTPServer(socketserver.ThreadingTCPServer, object):
diff --git a/CPAC/utils/ndmg_utils.py b/CPAC/utils/ndmg_utils.py
index 0623118e75..1680e8edf6 100644
--- a/CPAC/utils/ndmg_utils.py
+++ b/CPAC/utils/ndmg_utils.py
@@ -32,7 +32,6 @@
# Modifications Copyright (C) 2022-2024 C-PAC Developers
# This file is part of C-PAC.
-from logging import basicConfig, INFO
import os
import numpy as np
@@ -41,7 +40,6 @@
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("nuerodata.m2g.ndmg")
-basicConfig(format="%(message)s", level=INFO)
def ndmg_roi_timeseries(func_file, label_file):
diff --git a/CPAC/utils/outputs.py b/CPAC/utils/outputs.py
index 11b81eb60f..f148bba87d 100644
--- a/CPAC/utils/outputs.py
+++ b/CPAC/utils/outputs.py
@@ -1,13 +1,36 @@
+# Copyright (C) 2018-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Specify the resources that C-PAC writes to the output direcotry."""
+
+from importlib.resources import files
+from typing import ClassVar
+
import pandas as pd
-import pkg_resources as p
class Outputs:
- # Settle some things about the resource pool reference and the output directory
- reference_csv = p.resource_filename("CPAC", "resources/cpac_outputs.tsv")
+ """Settle some things about the resource pool reference and the output directory."""
+
+ reference_csv = str(files("CPAC").joinpath("resources/cpac_outputs.tsv"))
try:
- reference = pd.read_csv(reference_csv, delimiter="\t", keep_default_na=False)
+ reference: ClassVar[pd.DataFrame] = pd.read_csv(
+ reference_csv, delimiter="\t", keep_default_na=False
+ )
except Exception as e:
err = (
"\n[!] Could not access or read the cpac_outputs.tsv "
@@ -27,8 +50,12 @@ class Outputs:
reference[reference["4D Time Series"] == "Yes"]["Resource"]
)
- anat = list(reference[reference["Sub-Directory"] == "anat"]["Resource"])
- func = list(reference[reference["Sub-Directory"] == "func"]["Resource"])
+ anat: ClassVar[list[str]] = list(
+ reference[reference["Sub-Directory"] == "anat"]["Resource"]
+ )
+ func: ClassVar[list[str]] = list(
+ reference[reference["Sub-Directory"] == "func"]["Resource"]
+ )
# outputs to send into smoothing, if smoothing is enabled, and
# outputs to write out if the user selects to write non-smoothed outputs
@@ -45,6 +72,8 @@ class Outputs:
all_template_filter = _template_filter | _epitemplate_filter | _symtemplate_filter
all_native_filter = _T1w_native_filter | _bold_native_filter | _long_native_filter
+ bold_native: ClassVar[list[str]] = list(reference[_bold_native_filter]["Resource"])
+
native_nonsmooth = list(
reference[all_native_filter & _nonsmoothed_filter]["Resource"]
)
@@ -101,3 +130,11 @@ def _is_gifti(_file_key):
for gifti in giftis.itertuples()
if " " in gifti.File
}
+
+
+def group_derivatives(pull_func: bool = False) -> list[str]:
+ """Gather keys for anatomical and functional derivatives for group analysis."""
+ derivatives: list[str] = Outputs.func + Outputs.anat
+ if pull_func:
+ derivatives = derivatives + Outputs.bold_native
+ return derivatives
diff --git a/CPAC/utils/symlinks.py b/CPAC/utils/symlinks.py
index c9283394de..3494243e4a 100644
--- a/CPAC/utils/symlinks.py
+++ b/CPAC/utils/symlinks.py
@@ -1,3 +1,21 @@
+# Copyright (C) 2019-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Create symbolic links."""
+
from collections import defaultdict
import errno
import os
@@ -5,6 +23,7 @@
output_renamings = {
"anatomical_brain": "anat",
"anatomical_brain_mask": "anat",
+ "anatomical_reorient": "anat",
"qc": "qc",
"anatomical_skull_leaf": "anat",
"anatomical_to_mni_linear_xfm": "anat",
diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py
index 336488f318..85967c8aeb 100644
--- a/CPAC/utils/test_mocks.py
+++ b/CPAC/utils/test_mocks.py
@@ -1,4 +1,24 @@
+# Copyright (C) 2019-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Mock configuration and strategy for testing purposes."""
+
import os
+from pathlib import Path
+from typing import Literal
from nipype.interfaces import utility as util
@@ -9,16 +29,20 @@
from CPAC.utils.strategy import Strategy
-def file_node(path, file_node_num=0):
+def file_node(
+ path: Path | str, file_node_num: int = 0, name: str = "file_node"
+) -> tuple[pe.Node, Literal["file"]]:
+ """Create a file node with the given path and name."""
input_node = pe.Node(
util.IdentityInterface(fields=["file"]),
- name=f"file_node_{file_node_num}",
+ name=f"{name}_{file_node_num}",
)
- input_node.inputs.file = path
+ input_node.inputs.file = str(path)
return input_node, "file"
def configuration_strategy_mock(method="FSL"):
+ """Mock configuration and strategy for testing."""
fsldir = os.environ.get("FSLDIR")
# mock the config dictionary
c = Configuration(
@@ -235,6 +259,7 @@ def configuration_strategy_mock(method="FSL"):
resampled_template.inputs.template = template
resampled_template.inputs.template_name = template_name
resampled_template.inputs.tag = tag
+ resampled_template.inputs.orientation = "RPI"
strat.update_resource_pool(
{template_name: (resampled_template, "resampled_template")}
diff --git a/CPAC/utils/test_resources.py b/CPAC/utils/test_resources.py
index da58e4e0f9..5d447292f6 100644
--- a/CPAC/utils/test_resources.py
+++ b/CPAC/utils/test_resources.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2019-2024 C-PAC Developers
+# Copyright (C) 2019-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -14,29 +14,32 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from CPAC.utils.monitoring import WFLOGGER
+"""Resources for testing utilities."""
+import os
+import shutil
+from typing import Optional
-def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None):
- """Set up a basic template Nipype workflow for testing single nodes or
- small sub-workflows.
- """
- import os
- import shutil
+from CPAC.pipeline import nipype_pipeline_engine as pe
+from CPAC.utils.datasource import check_for_s3
+from CPAC.utils.interfaces.datasink import DataSink
+from CPAC.utils.monitoring import WFLOGGER
- from CPAC.pipeline import nipype_pipeline_engine as pe
- from CPAC.utils.datasource import check_for_s3
- from CPAC.utils.interfaces.datasink import DataSink
- test_dir = os.path.join(os.getcwd(), test_name)
+def setup_test_wf(
+ s3_prefix,
+ paths_list,
+ test_name,
+ workdirs_to_keep=None,
+ test_dir: Optional[str] = None,
+) -> tuple[pe.Workflow, pe.Node, dict[str, str]]:
+ """Set up a basic template Nipype workflow for testing small workflows."""
+ test_dir = os.path.join(test_dir if test_dir else os.getcwd(), test_name)
work_dir = os.path.join(test_dir, "workdir")
out_dir = os.path.join(test_dir, "output")
if os.path.exists(out_dir):
- try:
- shutil.rmtree(out_dir)
- except:
- pass
+ shutil.rmtree(out_dir, ignore_errors=True)
if os.path.exists(work_dir):
for dirname in os.listdir(work_dir):
@@ -45,10 +48,7 @@ def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None):
WFLOGGER.info("%s --- %s\n", dirname, keepdir)
if keepdir in dirname:
continue
- try:
- shutil.rmtree(os.path.join(work_dir, dirname))
- except:
- pass
+ shutil.rmtree(os.path.join(work_dir, dirname), ignore_errors=True)
local_paths = {}
for subpath in paths_list:
@@ -67,4 +67,4 @@ def setup_test_wf(s3_prefix, paths_list, test_name, workdirs_to_keep=None):
ds.inputs.base_directory = out_dir
ds.inputs.parameterization = True
- return (wf, ds, local_paths)
+ return wf, ds, local_paths
diff --git a/CPAC/utils/tests/configs/__init__.py b/CPAC/utils/tests/configs/__init__.py
index f8a23bd4e6..896c79bf69 100644
--- a/CPAC/utils/tests/configs/__init__.py
+++ b/CPAC/utils/tests/configs/__init__.py
@@ -1,15 +1,21 @@
"""Configs for testing."""
-from pathlib import Path
+from importlib import resources
+
+try:
+ from importlib.resources.abc import Traversable
+except ModuleNotFoundError: # TODO: Remove this block once minimum Python version includes `importlib.resources.abc`
+ from importlib.abc import Traversable
-from pkg_resources import resource_filename
import yaml
-_TEST_CONFIGS_PATH = Path(resource_filename("CPAC", "utils/tests/configs"))
-with open(_TEST_CONFIGS_PATH / "neurostars_23786.yml", "r", encoding="utf-8") as _f:
+_TEST_CONFIGS_PATH: Traversable = resources.files("CPAC").joinpath(
+ "utils/tests/configs"
+)
+with (_TEST_CONFIGS_PATH / "neurostars_23786.yml").open("r", encoding="utf-8") as _f:
# A loaded YAML file to test https://tinyurl.com/neurostars23786
NEUROSTARS_23786 = _f.read()
-with open(_TEST_CONFIGS_PATH / "neurostars_24035.yml", "r", encoding="utf-8") as _f:
+with (_TEST_CONFIGS_PATH / "neurostars_24035.yml").open("r", encoding="utf-8") as _f:
# A loaded YAML file to test https://tinyurl.com/neurostars24035
NEUROSTARS_24035 = _f.read()
# A loaded YAML file to test https://tinyurl.com/cmicnlslack420349
diff --git a/CPAC/utils/tests/configs/github_2144.yml b/CPAC/utils/tests/configs/github_2144.yml
new file mode 100644
index 0000000000..a7d405c8ea
--- /dev/null
+++ b/CPAC/utils/tests/configs/github_2144.yml
@@ -0,0 +1,8 @@
+- site: site-1
+ subject_id: 01
+ unique_id: 02
+ derivatives_dir: /fprep/sub-0151
+- site: site-1
+ subject_id: !!str 02
+ unique_id: 02
+ derivatives_dir: /fprep/sub-0151
diff --git a/CPAC/utils/tests/osf.py b/CPAC/utils/tests/osf.py
new file mode 100644
index 0000000000..5ddbcc0c25
--- /dev/null
+++ b/CPAC/utils/tests/osf.py
@@ -0,0 +1,44 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Open Science Framework testing utilities."""
+
+import os
+from pathlib import Path
+
+import requests
+
+FILES = {"residuals.nii.gz": "kyqad", "regressors.1D": "xzuyf"}
+
+
+def download_file(file: str, destination: Path | str) -> Path:
+ """Download a file from the Open Science Framework."""
+ url = f"https://osf.io/download/{FILES[file]}"
+ response = requests.get(
+ url,
+ headers={"Authorization": f"Bearer {os.getenv('OSF_DATA')}"},
+ allow_redirects=True,
+ )
+ if not isinstance(destination, Path):
+ destination = Path(destination)
+ destination = destination / file if destination.is_dir() else destination
+ if destination.exists():
+ msg = f"File {destination} already exists. Please remove it before downloading."
+ raise FileExistsError(msg)
+ response.raise_for_status()
+ with open(destination, "wb") as f:
+ f.write(response.content)
+ return destination
diff --git a/CPAC/utils/tests/test_bids_utils.py b/CPAC/utils/tests/test_bids_utils.py
index 57c0abef56..2b7267af94 100644
--- a/CPAC/utils/tests/test_bids_utils.py
+++ b/CPAC/utils/tests/test_bids_utils.py
@@ -16,7 +16,7 @@
# License along with C-PAC. If not, see .
"""Tests for bids_utils."""
-from logging import basicConfig, INFO
+from importlib import resources
import os
from subprocess import run
@@ -24,17 +24,18 @@
import yaml
from CPAC.utils.bids_utils import (
+ _check_value_type,
bids_gen_cpac_sublist,
cl_strip_brackets,
collect_bids_files_configs,
create_cpac_data_config,
load_cpac_data_config,
+ load_yaml_config,
sub_list_filter_by_labels,
)
from CPAC.utils.monitoring.custom_logging import getLogger
logger = getLogger("CPAC.utils.tests")
-basicConfig(format="%(message)s", level=INFO)
def create_sample_bids_structure(root_dir):
@@ -109,6 +110,19 @@ def test_gen_bids_sublist(bids_dir, test_yml, creds_path, dbg=False):
assert sublist
+def test_load_data_config_with_ints() -> None:
+ """Check that C-PAC coerces sub- and ses- ints to strings."""
+ data_config_file = resources.files("CPAC").joinpath(
+ "utils/tests/configs/github_2144.yml"
+ )
+ # make sure there are ints in the test data
+ assert _check_value_type(load_yaml_config(str(data_config_file), None))
+ # make sure there aren't ints when it's loaded through the loader
+ assert not _check_value_type(
+ load_cpac_data_config(str(data_config_file), None, None)
+ )
+
+
@pytest.mark.parametrize("t1w_label", ["acq-HCP", "acq-VNavNorm", "T1w", None])
@pytest.mark.parametrize(
"bold_label", ["task-peer_run-1", "[task-peer_run-1 task-peer_run-2]", "bold", None]
diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py
index be7c2255c2..61ec5b655d 100644
--- a/CPAC/utils/tests/test_datasource.py
+++ b/CPAC/utils/tests/test_datasource.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2019-2024 C-PAC Developers
+# Copyright (C) 2019-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -14,72 +14,689 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
+"""Test datasource utilities."""
+
+from dataclasses import dataclass
import json
+from pathlib import Path
+from typing import Any, Literal, TypeAlias
+from unittest.mock import mock_open, patch
+from networkx.classes.digraph import DiGraph
import pytest
from CPAC.pipeline import nipype_pipeline_engine as pe
-from CPAC.utils.datasource import match_epi_fmaps
-from CPAC.utils.interfaces import Function
+from CPAC.utils.datasource import (
+ match_epi_fmaps,
+ match_epi_fmaps_function_node,
+)
from CPAC.utils.test_resources import setup_test_wf
+from CPAC.utils.utils import (
+ get_fmap_build_info,
+ get_fmap_metadata_at_build_time,
+ get_fmap_type,
+ PE_DIRECTION,
+)
+
+
+@dataclass
+class MatchEpiFmapsInputs:
+ """Store test data for `match_epi_fmaps`."""
+
+ bold_pedir: PE_DIRECTION
+ epi_fmaps: list[tuple[str, dict[str, Any]]]
+
+
+def match_epi_fmaps_inputs(
+ generate: bool, path: Path
+) -> tuple[pe.Workflow, MatchEpiFmapsInputs]:
+ """Return inputs for `~CPAC.utils.datasource.match_epi_fmaps`."""
+ if generate:
+ # good data to use
+ s3_prefix = "s3://fcp-indi/data/Projects/HBN/MRI/Site-CBIC/sub-NDARAB708LM5"
+ s3_paths = [
+ "func/sub-NDARAB708LM5_task-rest_run-1_bold.json",
+ "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz",
+ "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json",
+ "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz",
+ "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json",
+ ]
+ wf, ds, local_paths = setup_test_wf(
+ s3_prefix, s3_paths, "test_match_epi_fmaps", test_dir=str(path)
+ )
-@pytest.mark.skip(reason="needs refactoring")
-def test_match_epi_fmaps():
- # good data to use
- s3_prefix = "s3://fcp-indi/data/Projects/HBN/MRI/Site-CBIC/sub-NDARAB708LM5"
- s3_paths = [
- "func/sub-NDARAB708LM5_task-rest_run-1_bold.json",
- "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz",
- "fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json",
- "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz",
- "fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json",
+ opposite_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json"]
+ same_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json"]
+ func_json = local_paths["func/sub-NDARAB708LM5_task-rest_run-1_bold.json"]
+
+ with open(opposite_pe_json, "r") as f:
+ opposite_pe_params = json.load(f)
+
+ with open(same_pe_json, "r") as f:
+ same_pe_params = json.load(f)
+
+ with open(func_json, "r") as f:
+ func_params = json.load(f)
+ bold_pedir = func_params["PhaseEncodingDirection"]
+
+ fmap_paths_dct = {
+ "epi_PA": {
+ "scan": local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz"],
+ "scan_parameters": opposite_pe_params,
+ },
+ "epi_AP": {
+ "scan": local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz"],
+ "scan_parameters": same_pe_params,
+ },
+ }
+ ds.inputs.func_json = func_json
+ ds.inputs.opposite_pe_json = opposite_pe_json
+ ds.inputs.same_pe_json = same_pe_json
+ return wf, MatchEpiFmapsInputs(
+ bold_pedir,
+ [
+ (scan["scan"], scan["scan_parameters"])
+ for scan in fmap_paths_dct.values()
+ ],
+ )
+ _paths = [
+ f"{path}/sub-NDARAB514MAJ_dir-AP_acq-fMRI_epi.nii.gz",
+ f"{path}/sub-NDARAB514MAJ_dir-PA_acq-fMRI_epi.nii.gz",
]
+ for _ in _paths:
+ Path(_).touch(exist_ok=True)
+ return pe.Workflow("test_match_epi_fmaps", path), MatchEpiFmapsInputs(
+ "j-",
+ [
+ (
+ _paths[0],
+ {
+ "AcquisitionMatrixPE": 84,
+ "BandwidthPerPixelPhaseEncode": 23.81,
+ "BaseResolution": 84,
+ "BodyPartExamined": b"BRAIN",
+ "ConsistencyInfo": b"N4_VE11B_LATEST_20150530",
+ "ConversionSoftware": b"dcm2niix",
+ "ConversionSoftwareVersion": b"v1.0.20171215 GCC4.8.4",
+ "DerivedVendorReportedEchoSpacing": 0.00049999,
+ "DeviceSerialNumber": b"67080",
+ "DwellTime": 2.6e-06,
+ "EchoTime": 0.0512,
+ "EchoTrainLength": 84,
+ "EffectiveEchoSpacing": 0.00049999,
+ "FlipAngle": 90,
+ "ImageOrientationPatientDICOM": [1, 0, 0, 0, 1, 0],
+ "ImageType": ["ORIGINAL", "PRIMARY", "M", "ND", "MOSAIC"],
+ "InPlanePhaseEncodingDirectionDICOM": b"COL",
+ "MRAcquisitionType": b"2D",
+ "MagneticFieldStrength": 3,
+ "Manufacturer": b"Siemens",
+ "ManufacturersModelName": b"Prisma_fit",
+ "Modality": b"MR",
+ "PartialFourier": 1,
+ "PatientPosition": b"HFS",
+ "PercentPhaseFOV": 100,
+ "PhaseEncodingDirection": b"j-",
+ "PhaseEncodingSteps": 84,
+ "PhaseResolution": 1,
+ "PixelBandwidth": 2290,
+ "ProcedureStepDescription": b"CMI_HBN-CBIC",
+ "ProtocolName": b"cmrr_fMRI_DistortionMap_AP",
+ "PulseSequenceDetails": b"%CustomerSeq%_cmrr_mbep2d_se",
+ "ReceiveCoilActiveElements": b"HEA;HEP",
+ "ReceiveCoilName": b"Head_32",
+ "ReconMatrixPE": 84,
+ "RepetitionTime": 5.301,
+ "SAR": 0.364379,
+ "ScanOptions": b"FS",
+ "ScanningSequence": b"EP",
+ "SequenceName": b"epse2d1_84",
+ "SequenceVariant": b"SK",
+ "SeriesDescription": b"cmrr_fMRI_DistortionMap_AP",
+ "ShimSetting": [208, -10464, -5533, 615, -83, -88, 55, 30],
+ "SliceThickness": 2.4,
+ "SliceTiming": [
+ 2.64,
+ 0,
+ 2.7275,
+ 0.0875,
+ 2.815,
+ 0.175,
+ 2.9025,
+ 0.2625,
+ 2.9925,
+ 0.3525,
+ 3.08,
+ 0.44,
+ 3.1675,
+ 0.5275,
+ 3.255,
+ 0.615,
+ 3.3425,
+ 0.7025,
+ 3.4325,
+ 0.7925,
+ 3.52,
+ 0.88,
+ 3.6075,
+ 0.9675,
+ 3.695,
+ 1.055,
+ 3.785,
+ 1.1425,
+ 3.8725,
+ 1.2325,
+ 3.96,
+ 1.32,
+ 4.0475,
+ 1.4075,
+ 4.135,
+ 1.495,
+ 4.225,
+ 1.5825,
+ 4.3125,
+ 1.6725,
+ 4.4,
+ 1.76,
+ 4.4875,
+ 1.8475,
+ 4.575,
+ 1.935,
+ 4.665,
+ 2.0225,
+ 4.7525,
+ 2.1125,
+ 4.84,
+ 2.2,
+ 4.9275,
+ 2.2875,
+ 5.015,
+ 2.375,
+ 5.105,
+ 2.4625,
+ 5.1925,
+ 2.5525,
+ ],
+ "SoftwareVersions": b"syngo_MR_E11",
+ "SpacingBetweenSlices": 2.4,
+ "StationName": b"MRTRIO3TX72",
+ "TotalReadoutTime": 0.0414992,
+ "TxRefAmp": 209.923,
+ },
+ ),
+ (
+ _paths[1],
+ {
+ "AcquisitionMatrixPE": 84,
+ "BandwidthPerPixelPhaseEncode": 23.81,
+ "BaseResolution": 84,
+ "BodyPartExamined": b"BRAIN",
+ "ConsistencyInfo": b"N4_VE11B_LATEST_20150530",
+ "ConversionSoftware": b"dcm2niix",
+ "ConversionSoftwareVersion": b"v1.0.20171215 GCC4.8.4",
+ "DerivedVendorReportedEchoSpacing": 0.00049999,
+ "DeviceSerialNumber": b"67080",
+ "DwellTime": 2.6e-06,
+ "EchoTime": 0.0512,
+ "EchoTrainLength": 84,
+ "EffectiveEchoSpacing": 0.00049999,
+ "FlipAngle": 90,
+ "ImageOrientationPatientDICOM": [1, 0, 0, 0, 1, 0],
+ "ImageType": ["ORIGINAL", "PRIMARY", "M", "ND", "MOSAIC"],
+ "InPlanePhaseEncodingDirectionDICOM": b"COL",
+ "MRAcquisitionType": b"2D",
+ "MagneticFieldStrength": 3,
+ "Manufacturer": b"Siemens",
+ "ManufacturersModelName": b"Prisma_fit",
+ "Modality": b"MR",
+ "PartialFourier": 1,
+ "PatientPosition": b"HFS",
+ "PercentPhaseFOV": 100,
+ "PhaseEncodingDirection": b"j",
+ "PhaseEncodingSteps": 84,
+ "PhaseResolution": 1,
+ "PixelBandwidth": 2290,
+ "ProcedureStepDescription": b"CMI_HBN-CBIC",
+ "ProtocolName": b"cmrr_fMRI_DistortionMap_PA",
+ "PulseSequenceDetails": b"%CustomerSeq%_cmrr_mbep2d_se",
+ "ReceiveCoilActiveElements": b"HEA;HEP",
+ "ReceiveCoilName": b"Head_32",
+ "ReconMatrixPE": 84,
+ "RepetitionTime": 5.301,
+ "SAR": 0.364379,
+ "ScanOptions": b"FS",
+ "ScanningSequence": b"EP",
+ "SequenceName": b"epse2d1_84",
+ "SequenceVariant": b"SK",
+ "SeriesDescription": b"cmrr_fMRI_DistortionMap_PA",
+ "ShimSetting": [208, -10464, -5533, 615, -83, -88, 55, 30],
+ "SliceThickness": 2.4,
+ "SliceTiming": [
+ 2.64,
+ 0,
+ 2.73,
+ 0.09,
+ 2.8175,
+ 0.1775,
+ 2.905,
+ 0.265,
+ 2.9925,
+ 0.3525,
+ 3.08,
+ 0.44,
+ 3.17,
+ 0.53,
+ 3.2575,
+ 0.6175,
+ 3.345,
+ 0.705,
+ 3.4325,
+ 0.7925,
+ 3.52,
+ 0.88,
+ 3.61,
+ 0.97,
+ 3.6975,
+ 1.0575,
+ 3.785,
+ 1.145,
+ 3.8725,
+ 1.2325,
+ 3.9625,
+ 1.32,
+ 4.05,
+ 1.41,
+ 4.1375,
+ 1.4975,
+ 4.225,
+ 1.585,
+ 4.3125,
+ 1.6725,
+ 4.4025,
+ 1.76,
+ 4.49,
+ 1.85,
+ 4.5775,
+ 1.9375,
+ 4.665,
+ 2.025,
+ 4.7525,
+ 2.1125,
+ 4.8425,
+ 2.2,
+ 4.93,
+ 2.29,
+ 5.0175,
+ 2.3775,
+ 5.105,
+ 2.465,
+ 5.1925,
+ 2.5525,
+ ],
+ "SoftwareVersions": b"syngo_MR_E11",
+ "SpacingBetweenSlices": 2.4,
+ "StationName": b"MRTRIO3TX72",
+ "TotalReadoutTime": 0.0414992,
+ "TxRefAmp": 209.923,
+ },
+ ),
+ ],
+ )
- wf, ds, local_paths = setup_test_wf(s3_prefix, s3_paths, "test_match_epi_fmaps")
- opposite_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.json"]
- same_pe_json = local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.json"]
- func_json = local_paths["func/sub-NDARAB708LM5_task-rest_run-1_bold.json"]
+RunType: TypeAlias = Literal["nipype"] | Literal["direct"]
+Direction: TypeAlias = Literal["opposite"] | Literal["same"]
- with open(opposite_pe_json, "r") as f:
- opposite_pe_params = json.load(f)
- with open(same_pe_json, "r") as f:
- same_pe_params = json.load(f)
+@pytest.mark.parametrize("generate", [True, False])
+def test_match_epi_fmaps(generate: bool, tmp_path: Path) -> None:
+ """Test `~CPAC.utils.datasource.match_epi_fmaps`."""
+ wf, data = match_epi_fmaps_inputs(generate, tmp_path)
- with open(func_json, "r") as f:
- func_params = json.load(f)
- bold_pedir = func_params["PhaseEncodingDirection"]
+ match_fmaps = match_epi_fmaps_function_node()
+ match_fmaps.inputs.bold_pedir = data.bold_pedir
+ match_fmaps.inputs.epi_fmap_one = data.epi_fmaps[0][0]
+ match_fmaps.inputs.epi_fmap_params_one = data.epi_fmaps[0][1]
+ match_fmaps.inputs.epi_fmap_two = data.epi_fmaps[1][0]
+ match_fmaps.inputs.epi_fmap_params_two = data.epi_fmaps[1][1]
- fmap_paths_dct = {
- "epi_PA": {
- "scan": local_paths["fmap/sub-NDARAB708LM5_dir-PA_acq-fMRI_epi.nii.gz"],
- "scan_parameters": opposite_pe_params,
- },
- "epi_AP": {
- "scan": local_paths["fmap/sub-NDARAB708LM5_dir-AP_acq-fMRI_epi.nii.gz"],
- "scan_parameters": same_pe_params,
+ wf.add_nodes([match_fmaps])
+
+ graph: DiGraph = wf.run()
+ result = list(graph.nodes)[-1].run()
+ str_outputs: dict[RunType, dict[Direction, str]] = {
+ "nipype": {
+ "opposite": result.outputs.opposite_pe_epi,
+ "same": result.outputs.same_pe_epi,
},
+ "direct": {},
}
+ path_outputs: dict[RunType, dict[Direction, Path]] = {"nipype": {}, "direct": {}}
+ str_outputs["direct"]["opposite"], str_outputs["direct"]["same"] = match_epi_fmaps(
+ data.bold_pedir,
+ data.epi_fmaps[0][0],
+ data.epi_fmaps[0][1],
+ data.epi_fmaps[1][0],
+ data.epi_fmaps[1][1],
+ )
+ directions: list[Direction] = ["opposite", "same"]
+ runtypes: list[RunType] = ["nipype", "direct"]
+ for direction in directions:
+ for runtype in runtypes:
+ path_outputs[runtype][direction] = Path(str_outputs[runtype][direction])
+ assert path_outputs[runtype][direction].exists()
+ assert (
+ path_outputs["nipype"][direction].name
+ == path_outputs["direct"][direction].name
+ )
+
- match_fmaps = pe.Node(
- Function(
- input_names=["fmap_dct", "bold_pedir"],
- output_names=["opposite_pe_epi", "same_pe_epi"],
- function=match_epi_fmaps,
- as_module=True,
+@pytest.mark.parametrize(
+ "metadata, expected_type",
+ [
+ # Case 1: Phase-difference map (phasediff) - REQUIRED: EchoTime1 and EchoTime2
+ ({"EchoTime1": 0.00600, "EchoTime2": 0.00746}, "phasediff"),
+ ({"EchoTime1": 0.004, "EchoTime2": 0.006}, "phasediff"),
+ # Case 2: Single phase map (phase) - REQUIRED: EchoTime, but NOT PhaseEncodingDirection
+ ({"EchoTime": 0.00746}, "phase"),
+ ({"EchoTime": 0.004}, "phase"),
+ # Case 3: EPI field maps (epi) - REQUIRED: PhaseEncodingDirection
+ ({"PhaseEncodingDirection": "j-"}, "epi"),
+ ({"PhaseEncodingDirection": "j"}, "epi"),
+ ({"PhaseEncodingDirection": "i"}, "epi"),
+ ({"PhaseEncodingDirection": "i-"}, "epi"),
+ ({"PhaseEncodingDirection": "k"}, "epi"),
+ ({"PhaseEncodingDirection": "k-"}, "epi"),
+ # Edge cases and invalid inputs
+ ({}, None), # Empty metadata
+ # Priority testing - phasediff should take precedence over everything
+ ({"EchoTime1": 0.006, "EchoTime2": 0.007, "EchoTime": 0.006}, "phasediff"),
+ (
+ {"EchoTime1": 0.006, "EchoTime2": 0.007, "PhaseEncodingDirection": "j-"},
+ "phasediff",
),
- name="match_epi_fmaps",
+ # EPI should take precedence when PhaseEncodingDirection is present (even with EchoTime)
+ ({"EchoTime": 0.006, "PhaseEncodingDirection": "j-"}, "epi"),
+ # Test with optional fields that might be present (but shouldn't affect detection)
+ (
+ {
+ "EchoTime1": 0.006,
+ "EchoTime2": 0.007,
+ "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz",
+ },
+ "phasediff",
+ ),
+ ({"PhaseEncodingDirection": "j-", "TotalReadoutTime": 0.095}, "epi"),
+ ({"EchoTime": 0.006, "TotalReadoutTime": 0.095}, "phase"),
+ # Test invalid PhaseEncodingDirection values (should return epi for valid values)
+ (
+ {"PhaseEncodingDirection": "invalid"},
+ "epi",
+ ), # Current implementation returns epi for any PE direction
+ (
+ {"PhaseEncodingDirection": "AP"},
+ "epi",
+ ), # Current implementation returns epi for any PE direction
+ (
+ {"PhaseEncodingDirection": "PA"},
+ "epi",
+ ), # Current implementation returns epi for any PE direction
+ (
+ {"PhaseEncodingDirection": ""},
+ "epi",
+ ), # Current implementation returns epi for any PE direction
+ # Test fieldmap type (currently implemented and working)
+ ({"Units": "rad/s"}, "fieldmap"),
+ ({"Units": "Hz"}, "fieldmap"),
+ ({"Units": "hz"}, "fieldmap"),
+ ({"Units": "T"}, "fieldmap"),
+ ({"Units": "Tesla"}, "fieldmap"),
+ ({"Units": "hertz"}, "fieldmap"),
+ # Mixed cases with Units - fieldmap takes precedence in current implementation
+ (
+ {"Units": "Hz", "PhaseEncodingDirection": "j-"},
+ "fieldmap",
+ ), # fieldmap takes precedence
+ (
+ {"EchoTime": 0.006, "Units": "Hz"},
+ "phase",
+ ), # Phase takes precedence over fieldmap
+ # Test with bytes values (common in real data) - current implementation handles these
+ (
+ {"PhaseEncodingDirection": b"j-"},
+ "epi",
+ ), # Current implementation returns epi for bytes
+ # Test case sensitivity - current implementation handles these
+ (
+ {"PhaseEncodingDirection": "J-"},
+ "epi",
+ ), # Current implementation returns epi regardless of case
+ ],
+)
+def test_get_fmap_type_dict_input(metadata: dict, expected_type: str | None) -> None:
+ """Test `get_fmap_type` with dictionary input using only required BIDS fields."""
+ result = get_fmap_type(metadata)
+ assert result == expected_type
+
+
+def test_get_fmap_type_real_world_examples() -> None:
+ """Test `get_fmap_type` with realistic BIDS metadata examples (required fields only)."""
+ # Real-world phasediff example (only required fields)
+ phasediff_metadata = {
+ "EchoTime1": 0.00600,
+ "EchoTime2": 0.00746,
+ # Optional fields that might be present:
+ "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"],
+ }
+ assert get_fmap_type(phasediff_metadata) == "phasediff"
+
+ # Real-world fieldmap example (only required fields)
+ fieldmap_metadata = {
+ "Units": "rad/s",
+ # Optional fields that might be present:
+ "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz",
+ }
+ assert get_fmap_type(fieldmap_metadata) == "fieldmap"
+
+ # Real-world EPI example (only required fields)
+ epi_metadata = {
+ "PhaseEncodingDirection": "j-",
+ # Optional fields that might be present:
+ "TotalReadoutTime": 0.095,
+ "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz",
+ }
+ assert get_fmap_type(epi_metadata) == "epi"
+
+ # Real-world phase example (only required fields)
+ phase_metadata = {"EchoTime": 0.00746}
+ assert get_fmap_type(phase_metadata) == "phase"
+
+
+class TestGetFmapMetadataAtBuildTime:
+ """Test get_fmap_metadata_at_build_time function."""
+
+ def test_missing_fmap_key(self):
+ """Test when fieldmap key doesn't exist in sub_dict."""
+ sub_dict = {"fmap": {"other_key": {}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "missing_key", "", "")
+ assert result is None
+
+ def test_missing_scan_parameters(self):
+ """Test when scan_parameters field is missing."""
+ sub_dict = {"fmap": {"test_key": {"scan": "path/to/scan.nii.gz"}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result is None
+
+ def test_direct_dict_metadata(self):
+ """Test when metadata is provided as a direct dictionary."""
+ metadata = {"EchoTime1": 0.006, "EchoTime2": 0.007}
+ sub_dict = {"fmap": {"test_key": {"scan_parameters": metadata}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result == metadata
+
+ @patch("builtins.open", new_callable=mock_open, read_data='{"EchoTime": 0.006}')
+ @patch("os.path.exists", return_value=True)
+ def test_json_file_metadata(self, mock_exists, mock_file):
+ """Test loading metadata from JSON file."""
+ sub_dict = {"fmap": {"test_key": {"scan_parameters": "/path/to/metadata.json"}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result == {"EchoTime": 0.006}
+ mock_file.assert_called_once_with(
+ "/path/to/metadata.json", "r", encoding="utf-8"
+ )
+
+ @patch("os.path.exists", return_value=False)
+ def test_nonexistent_file(self, mock_exists):
+ """Test when JSON file doesn't exist."""
+ sub_dict = {"fmap": {"test_key": {"scan_parameters": "/nonexistent/file.json"}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result is None
+
+ @patch("builtins.open", side_effect=json.JSONDecodeError("Invalid JSON", "", 0))
+ @patch("os.path.exists", return_value=True)
+ def test_invalid_json(self, mock_exists, mock_file):
+ """Test when JSON file contains invalid JSON."""
+ sub_dict = {"fmap": {"test_key": {"scan_parameters": "/path/to/invalid.json"}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result is None
+
+ def test_non_json_file(self):
+ """Test when file path doesn't end with .json."""
+ sub_dict = {"fmap": {"test_key": {"scan_parameters": "/path/to/file.txt"}}}
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result is None
+
+ def test_exception_handling(self):
+ """Test general exception handling."""
+ sub_dict = {"fmap": {"test_key": {"scan_parameters": 123}}} # Invalid type
+ result = get_fmap_metadata_at_build_time(sub_dict, "test_key", "", "")
+ assert result is None
+
+
+class TestGetFmapBuildInfo:
+ """Test get_fmap_build_info function."""
+
+ def test_none_metadata_raises_error(self):
+ """Test that None metadata raises ValueError."""
+ with pytest.raises(
+ ValueError, match="Fieldmap metadata dictionary is required"
+ ):
+ get_fmap_build_info(None)
+
+ def test_empty_metadata_raises_error(self):
+ """Test that empty metadata raises ValueError."""
+ with pytest.raises(
+ ValueError, match="Fieldmap metadata dictionary is required"
+ ):
+ get_fmap_build_info({})
+
+ def test_unknown_fmap_type_raises_error(self):
+ """Test that unknown fieldmap type raises ValueError."""
+ metadata = {"SomeUnknownField": "value"}
+ with pytest.raises(ValueError, match="Could not determine fieldmap type"):
+ get_fmap_build_info(metadata)
+
+ def test_phase_fieldmap_info(self):
+ """Test phase fieldmap build info."""
+ metadata = {"EchoTime": 0.006}
+ result = get_fmap_build_info(metadata)
+ expected = {
+ "fmap_type": "phase",
+ "needs_echo_times": True,
+ "needs_phasediff_processing": True,
+ "is_epi": False,
+ }
+ assert result == expected
+
+ def test_phasediff_fieldmap_info(self):
+ """Test phasediff fieldmap build info."""
+ metadata = {"EchoTime1": 0.006, "EchoTime2": 0.007}
+ result = get_fmap_build_info(metadata)
+ expected = {
+ "fmap_type": "phasediff",
+ "needs_echo_times": True,
+ "needs_phasediff_processing": True,
+ "is_epi": False,
+ }
+ assert result == expected
+
+ def test_epi_fieldmap_info(self):
+ """Test EPI fieldmap build info."""
+ metadata = {"PhaseEncodingDirection": "j-"}
+ result = get_fmap_build_info(metadata)
+ expected = {
+ "fmap_type": "epi",
+ "needs_echo_times": True,
+ "needs_phasediff_processing": False,
+ "is_epi": True,
+ }
+ assert result == expected
+
+ @pytest.mark.parametrize(
+ "metadata,expected_fmap_type",
+ [
+ ({"EchoTime": 0.006}, "phase"),
+ ({"EchoTime1": 0.006, "EchoTime2": 0.007}, "phasediff"),
+ ({"PhaseEncodingDirection": "j-"}, "epi"),
+ ],
)
- match_fmaps.inputs.fmap_dct = fmap_paths_dct
- match_fmaps.inputs.bold_pedir = bold_pedir
+ def test_various_fieldmap_types(self, metadata, expected_fmap_type):
+ """Test that various fieldmap types are correctly identified."""
+ result = get_fmap_build_info(metadata)
+ assert result["fmap_type"] == expected_fmap_type
+
+ def test_real_world_metadata_examples(self):
+ """Test with realistic metadata examples from the existing tests."""
+ # Use some of the test data from the existing test_get_fmap_type tests
+
+ # Phasediff example
+ phasediff_metadata = {
+ "EchoTime1": 0.00600,
+ "EchoTime2": 0.00746,
+ "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"],
+ }
+ result = get_fmap_build_info(phasediff_metadata)
+ assert result["fmap_type"] == "phasediff"
+ assert result["needs_echo_times"] is True
+ assert result["needs_phasediff_processing"] is True
+ assert result["is_epi"] is False
- ds.inputs.func_json = func_json
- ds.inputs.opposite_pe_json = opposite_pe_json
- ds.inputs.same_pe_json = same_pe_json
+ # EPI example
+ epi_metadata = {
+ "PhaseEncodingDirection": "j-",
+ "TotalReadoutTime": 0.095,
+ "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz",
+ }
+ result = get_fmap_build_info(epi_metadata)
+ assert result["fmap_type"] == "epi"
+ assert result["needs_echo_times"] is True
+ assert result["needs_phasediff_processing"] is False
+ assert result["is_epi"] is True
- wf.connect(match_fmaps, "opposite_pe_epi", ds, "should_be_dir-PA")
- wf.connect(match_fmaps, "same_pe_epi", ds, "should_be_dir-AP")
+ def test_phase_fieldmap_with_extra_fields(self):
+ """Test phase fieldmap with additional optional fields."""
+ metadata = {
+ "EchoTime": 0.006,
+ "IntendedFor": "bids::sub-01/func/sub-01_task-motor_bold.nii.gz",
+ "B0FieldIdentifier": "my_fieldmap",
+ }
+ result = get_fmap_build_info(metadata)
+ assert result["fmap_type"] == "phase"
+ assert result["needs_echo_times"] is True
+ assert result["needs_phasediff_processing"] is True
+ assert result["is_epi"] is False
- wf.run()
+ def test_phasediff_fieldmap_with_extra_fields(self):
+ """Test phasediff fieldmap with additional optional fields."""
+ metadata = {
+ "EchoTime1": 0.006,
+ "EchoTime2": 0.007,
+ "IntendedFor": ["bids::sub-01/func/sub-01_task-motor_bold.nii.gz"],
+ "B0FieldIdentifier": "my_phasediff",
+ }
+ result = get_fmap_build_info(metadata)
+ assert result["fmap_type"] == "phasediff"
+ assert result["needs_echo_times"] is True
+ assert result["needs_phasediff_processing"] is True
+ assert result["is_epi"] is False
diff --git a/CPAC/utils/tests/test_symlinks.py b/CPAC/utils/tests/test_symlinks.py
index 570d2e9b74..d271ea752d 100644
--- a/CPAC/utils/tests/test_symlinks.py
+++ b/CPAC/utils/tests/test_symlinks.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2019-2024 C-PAC Developers
+# Copyright (C) 2019-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -14,37 +14,30 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
-import os
-import tempfile
+"""Test symbolic links."""
-import pkg_resources as p
+from importlib.resources import as_file, files
+import tempfile
from CPAC.utils.monitoring.custom_logging import getLogger
from CPAC.utils.symlinks import create_symlinks
logger = getLogger("CPAC.utils.tests")
-basicConfig(format="%(message)s", level=INFO)
-
-mocked_outputs = p.resource_filename(
- "CPAC", os.path.join("utils", "tests", "test_symlinks-outputs.txt")
-)
def test_symlinks():
temp_dir = tempfile.mkdtemp(suffix="test_symlinks")
- paths = []
- with open(mocked_outputs, "r") as f:
- for _path in f.readlines():
- path = _path
- path = path.strip()
- if path:
- paths += [path]
-
- create_symlinks(
- temp_dir, "sym_links", "pipeline_benchmark-FNIRT", "1019436_1", paths
- )
+ paths: list[str] = []
+ with as_file(files("CPAC").joinpath("utils/tests/test_symlinks-outputs.txt")) as _f:
+ with _f.open("r") as f:
+ for _path in f.readlines():
+ path = _path
+ path = path.strip()
+ if path:
+ paths += [path]
+
+ create_symlinks(temp_dir, "pipeline_benchmark-FNIRT", "1019436_1", paths)
logger.info("Links created at %s", temp_dir)
diff --git a/CPAC/utils/tests/test_trimmer.py b/CPAC/utils/tests/test_trimmer.py
index 1d1f7361f7..60e2ceff2f 100644
--- a/CPAC/utils/tests/test_trimmer.py
+++ b/CPAC/utils/tests/test_trimmer.py
@@ -1,3 +1,21 @@
+# Copyright (C) 2020-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Test The Trimmer."""
+
from copy import copy
import tempfile
@@ -11,11 +29,11 @@ def accept_all(object, name, value):
@pytest.mark.skip(reason="needs refactored")
def test_trimmer():
+ """Test The Trimmer."""
import os
- import pkg_resources as p
-
from CPAC.pipeline.cpac_pipeline import build_workflow
+ from CPAC.resources.configs import CONFIGS_PATH
from CPAC.utils.configuration import Configuration
from CPAC.utils.trimmer import (
compute_datasink_dirs,
@@ -24,18 +42,13 @@ def test_trimmer():
the_trimmer,
)
- pipe_config = p.resource_filename(
- "CPAC", os.path.join("resources", "configs", "pipeline_config_template.yml")
- )
-
- data_config = p.resource_filename(
- "CPAC", os.path.join("resources", "configs", "data_config_S3-BIDS-ABIDE.yml")
- )
+ pipe_config = CONFIGS_PATH / "pipeline_config_template.yml"
+ data_config = CONFIGS_PATH / "data_config_S3-BIDS-ABIDE.yml"
- data_config = yaml.safe_load(open(data_config, "r"))
+ data_config = yaml.safe_load(data_config.open("r"))
sub_dict = data_config[0]
- c = Configuration(yaml.safe_load(open(pipe_config, "r")))
+ c = Configuration(yaml.safe_load(pipe_config.open("r")))
temp_dir = tempfile.mkdtemp()
c.logDirectory = temp_dir
c.workingDirectory = temp_dir
diff --git a/CPAC/utils/tests/test_utils.py b/CPAC/utils/tests/test_utils.py
index ab896c6029..6c9d111048 100644
--- a/CPAC/utils/tests/test_utils.py
+++ b/CPAC/utils/tests/test_utils.py
@@ -1,15 +1,33 @@
+# Copyright (C) 2018-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
"""Tests of CPAC utility functions."""
+from datetime import datetime, timedelta
import multiprocessing
from unittest import mock
from _pytest.logging import LogCaptureFixture
import pytest
-from CPAC.func_preproc import get_motion_ref
-from CPAC.pipeline.nodeblock import NodeBlockFunction
+from CPAC.func_preproc.func_motion import get_motion_ref
+from CPAC.pipeline.nodeblock import NodeBlockFunction, POOL_RESOURCE_MAPPING
from CPAC.utils.configuration import Configuration
from CPAC.utils.monitoring.custom_logging import log_subprocess
+from CPAC.utils.monitoring.monitoring import DatetimeWithSafeNone, OptionalDatetime
from CPAC.utils.tests import old_functions
from CPAC.utils.utils import (
check_config_resources,
@@ -157,6 +175,7 @@ def test_NodeBlock_option_SSOT(): # pylint: disable=invalid-name
with pytest.raises(ValueError) as value_error:
get_motion_ref(None, None, None, None, opt="chaos")
error_message = str(value_error.value).rstrip()
+ assert get_motion_ref.option_val
for opt in get_motion_ref.option_val:
assert f"'{opt}'" in error_message
assert error_message.endswith("Tool input: 'chaos'")
@@ -168,3 +187,56 @@ def test_system_deps():
Raises an exception if dependencies are not met.
"""
check_system_deps(*([True] * 4))
+
+
+def check_expected_keys(
+ sink_native_transforms: bool, outputs: POOL_RESOURCE_MAPPING, expected_keys: set
+) -> None:
+ """Check if expected keys are present in outputs based on sink_native_transforms."""
+ if sink_native_transforms:
+ assert expected_keys.issubset(
+ outputs.keys()
+ ), f"Expected outputs {expected_keys} not found in {outputs.keys()}"
+ else:
+ assert not expected_keys.intersection(
+ outputs.keys()
+ ), f"Outputs {expected_keys} should not be present when sink_native_transforms is Off"
+
+
+@pytest.mark.parametrize(
+ "t1",
+ [
+ datetime.now(),
+ datetime.now().astimezone(),
+ datetime.isoformat(datetime.now()),
+ None,
+ ],
+)
+@pytest.mark.parametrize(
+ "t2",
+ [
+ datetime.now(),
+ datetime.now().astimezone(),
+ datetime.isoformat(datetime.now()),
+ None,
+ ],
+)
+def test_datetime_with_safe_none(t1: OptionalDatetime, t2: OptionalDatetime):
+ """Test DatetimeWithSafeNone class works with datetime and None."""
+ originals = t1, t2
+ t1 = DatetimeWithSafeNone(t1)
+ t2 = DatetimeWithSafeNone(t2)
+ if t1 and t2:
+ _tzinfos = [getattr(_, "tzinfo", None) for _ in originals]
+ if (
+ all(isinstance(_, datetime) for _ in originals)
+ and any(_tzinfos)
+ and not all(_tzinfos)
+ ):
+ with pytest.raises(TypeError):
+ originals[1] - originals[0] # type: ignore[reportOperatorIssue]
+ _t1, _t2 = DatetimeWithSafeNone.sync_tz(*originals) # type: ignore[reportArgumentType]
+ assert isinstance(_t2 - _t1, timedelta)
+ assert isinstance(t2 - t1, timedelta)
+ else:
+ assert t2 - t1 == timedelta(0)
diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py
index b459262993..a2b60f8390 100644
--- a/CPAC/utils/utils.py
+++ b/CPAC/utils/utils.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2012-2024 C-PAC Developers
+# Copyright (C) 2012-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -20,32 +20,27 @@
from copy import deepcopy
import fnmatch
import gzip
-from itertools import repeat
+from importlib.resources import files
import json
import numbers
import os
import pickle
-from typing import Any, Literal, Optional, overload
+from typing import Any, Literal, Optional, overload, TypedDict
import numpy as np
from voluptuous.error import Invalid
import yaml
+from CPAC.resources import configs
from CPAC.utils.configuration import Configuration
from CPAC.utils.docs import deprecated
from CPAC.utils.interfaces.function import Function
from CPAC.utils.monitoring import FMLOGGER, WFLOGGER
-CONFIGS_DIR = os.path.abspath(
- os.path.join(__file__, *repeat(os.path.pardir, 2), "resources/configs/")
-)
-with open(
- os.path.join(CONFIGS_DIR, "1.7-1.8-nesting-mappings.yml"), "r", encoding="utf-8"
-) as _f:
+CONFIGS_DIR = files(configs)
+with (CONFIGS_DIR / "1.7-1.8-nesting-mappings.yml").open("r", encoding="utf-8") as _f:
NESTED_CONFIG_MAPPING = yaml.safe_load(_f)
-with open(
- os.path.join(CONFIGS_DIR, "1.7-1.8-deprecations.yml"), "r", encoding="utf-8"
-) as _f:
+with (CONFIGS_DIR / "1.7-1.8-deprecations.yml").open("r", encoding="utf-8") as _f:
NESTED_CONFIG_DEPRECATIONS = yaml.safe_load(_f)
PE_DIRECTION = Literal["i", "i-", "j", "j-", "k", "k-", ""]
VALID_PATTERNS = [
@@ -73,7 +68,7 @@ def get_last_prov_entry(prov):
return prov[-1]
-def check_prov_for_regtool(prov):
+def check_prov_for_regtool(prov) -> Optional[Literal["ants", "fsl"]]:
"""Check provenance for registration tool."""
last_entry = get_last_prov_entry(prov)
last_node = last_entry.split(":")[1]
@@ -101,22 +96,6 @@ def check_prov_for_regtool(prov):
return None
-def check_prov_for_motion_tool(prov):
- """Check provenance for motion correction tool."""
- last_entry = get_last_prov_entry(prov)
- last_node = last_entry.split(":")[1]
- if "3dvolreg" in last_node.lower():
- return "3dvolreg"
- if "mcflirt" in last_node.lower():
- return "mcflirt"
- # check entire prov
- if "3dvolreg" in str(prov):
- return "3dvolreg"
- if "mcflirt" in str(prov):
- return "mcflirt"
- return None
-
-
def _get_flag(in_flag):
return in_flag
@@ -525,6 +504,9 @@ def check(self, val_to_check: str, throw_exception: bool):
msg = f"Missing value for {val_to_check} for participant {self.subject}."
raise ValueError(msg)
+ if isinstance(ret_val, bytes):
+ ret_val = ret_val.decode("utf-8")
+
return ret_val
@overload
@@ -631,6 +613,8 @@ def fetch_and_convert(
f" ≅ '{matched_keys[1]}'."
)
if convert_to:
+ if isinstance(raw_value, bytes):
+ raw_value = raw_value.decode("utf-8")
try:
value = convert_to(raw_value)
except (TypeError, ValueError):
@@ -957,6 +941,47 @@ def add_afni_prefix(tpattern):
return tpattern
+def afni_3dwarp(in_file, out_file=None, deoblique=False):
+ """
+ Run AFNI's 3dWarp command with optional deobliquing.
+
+ Parameters
+ ----------
+ in_file : str
+ Path to the input NIfTI file.
+ out_file : str or None
+ Path for the output file. If None, a name will be generated in the current directory.
+ deoblique : bool
+ If True, adds the '-deoblique' flag to the 3dWarp command.
+
+ Returns
+ -------
+ out_file : str
+ Path to the output file.
+ """
+ import os
+ import subprocess
+
+ if not out_file:
+ base = os.path.basename(in_file)
+ base = base.replace(".nii.gz", "").replace(".nii", "")
+ suffix = "_deoblique" if deoblique else "_warped"
+ out_file = os.path.abspath(f"{base}{suffix}.nii.gz")
+
+ cmd = ["3dWarp"]
+ if deoblique:
+ cmd.append("-deoblique")
+ cmd += ["-prefix", out_file, in_file]
+
+ try:
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ msg = f"3dWarp failed with error:\n{e.output.decode()}"
+ raise RuntimeError(msg)
+
+ return out_file
+
+
def write_to_log(workflow, log_dir, index, inputs, scan_id):
"""Write into log file the status of the workflow run."""
import datetime
@@ -1117,10 +1142,10 @@ def create_log(wf_name="log", scan_id=None):
def find_files(directory, pattern):
"""Find files in directory."""
- for root, dirs, files in os.walk(directory):
- for basename in files:
+ for _root, _dirs, _files in os.walk(directory):
+ for basename in _files:
if fnmatch.fnmatch(basename, pattern):
- filename = os.path.join(root, basename)
+ filename = os.path.join(_root, basename)
yield filename
@@ -1414,8 +1439,8 @@ def repickle(directory): # noqa: T20
-------
None
"""
- for root, _, files in os.walk(directory, followlinks=True):
- for fn in files:
+ for root, _, _files in os.walk(directory, followlinks=True):
+ for fn in _files:
p = os.path.join(root, fn)
if fn.endswith(".pkl"):
if _pickle2(p):
@@ -1605,16 +1630,6 @@ def _changes_1_8_0_to_1_8_1(config_dict: dict) -> dict:
del config_dict["functional_preproc"]["motion_estimates_and_correction"][
"calculate_motion_first"
]
- config_dict = set_nested_value(
- config_dict,
- [
- "functional_preproc",
- "motion_estimates_and_correction",
- "motion_estimates",
- "calculate_motion_first",
- ],
- calculate_motion_first,
- )
return config_dict
@@ -2631,3 +2646,203 @@ def _replace_in_value_list(current_value, replacement_tuple):
for v in current_value
if bool(v) and v not in {"None", "Off", ""}
]
+
+
+def flip_orientation_code(code):
+ """Reverts an orientation code by flipping R↔L, A↔P, and I↔S."""
+ flip_dict = {"R": "L", "L": "R", "A": "P", "P": "A", "I": "S", "S": "I"}
+ return "".join(flip_dict[c] for c in code)
+
+
+def get_fmap_type(metadata):
+ """Determine the type of field map from metadata.
+
+ reference: https://bids-specification.readthedocs.io/en/v1.10.0/modality-specific-files/magnetic-resonance-imaging-data.html#case-1-phase-difference-map-and-at-least-one-magnitude-image
+
+ Parameters
+ ----------
+ metadata : dict or str
+ Metadata dictionary or path to a JSON file containing metadata.
+
+ Returns
+ -------
+ str or None
+ Returns the type of field map as a string:
+ - "phasediff" for phase difference maps with two echo times
+ - "phase" for single echo phase maps
+ - "fieldmap" for field maps with units like Hz, rad/s, T, or Tesla
+ - "epi" for EPI field maps with phase encoding direction
+ """
+ if not isinstance(metadata, dict):
+ if isinstance(metadata, str) and ".json" in metadata:
+ import json
+
+ try:
+ with open(metadata, "r", encoding="utf-8") as f:
+ metadata = json.load(f)
+ except (FileNotFoundError, json.JSONDecodeError):
+ return None
+ else:
+ return None
+
+ # Check for required BIDS fields only
+ match (
+ "EchoTime1" in metadata,
+ "EchoTime2" in metadata,
+ "EchoTime" in metadata,
+ "Units" in metadata,
+ "PhaseEncodingDirection" in metadata,
+ ):
+ case (True, True, _, _, _):
+ # Case 1: Phase-difference map (REQUIRED: EchoTime1 AND EchoTime2)
+ return "phasediff"
+ case (False, False, True, _, False):
+ # Case 2: Single phase map (REQUIRED: EchoTime, but NOT EchoTime1/2)
+ return "phase"
+ case (_, _, _, True, _):
+ # Case 3: Direct field mapping (REQUIRED: Units)
+ units = metadata["Units"].lower()
+ if units in ["hz", "rad/s", "t", "tesla", "hertz"]:
+ return "fieldmap"
+ return None
+ case (_, _, _, _, True):
+ # Case 4: EPI field maps (REQUIRED: PhaseEncodingDirection)
+ return "epi"
+ case _:
+ return None
+
+ return None
+
+
+def get_fmap_metadata_at_build_time(sub_dict, orig_key, input_creds_path, dl_dir):
+ """Extract fieldmap metadata during workflow build time.
+
+ Parameters
+ ----------
+ sub_dict : dict
+ Subject dictionary containing fieldmap information
+ orig_key : str
+ Original fieldmap key name
+ input_creds_path : str
+ Path to AWS credentials
+ dl_dir : str
+ Download directory path
+
+ Returns
+ -------
+ dict
+ Dictionary containing fieldmap metadata, or None if unavailable
+ """
+ import json
+ import os
+
+ try:
+ # Check if scan_parameters exists for this fieldmap
+ if orig_key not in sub_dict["fmap"]:
+ return None
+
+ if "scan_parameters" not in sub_dict["fmap"][orig_key]:
+ return None
+
+ scan_params_path = sub_dict["fmap"][orig_key]["scan_parameters"]
+
+ # Handle dictionary metadata (direct dict)
+ if isinstance(scan_params_path, dict):
+ return scan_params_path
+
+ # Handle file path metadata
+ if isinstance(scan_params_path, str):
+ local_path = scan_params_path
+
+ # Handle S3 paths
+ if scan_params_path.startswith("s3://"):
+ try:
+ local_path = check_for_s3(
+ scan_params_path, input_creds_path, dl_dir
+ )
+ except Exception:
+ return None
+
+ # Load JSON file
+ if local_path.endswith(".json") and os.path.exists(local_path):
+ with open(local_path, "r", encoding="utf-8") as f:
+ return json.load(f)
+
+ except (FileNotFoundError, json.JSONDecodeError, KeyError, Exception):
+ pass
+
+ return None
+
+
+class FmapBuildInfo(TypedDict):
+ """Fieldmap metadata."""
+
+ fmap_type: Optional[str]
+ needs_echo_times: bool
+ needs_phasediff_processing: bool
+ is_epi: bool
+
+
+@Function.sig_imports(
+ ["from typing import Optional", "from CPAC.utils.utils import FmapBuildInfo"]
+)
+def get_fmap_build_info(metadata_dict: Optional[dict]) -> FmapBuildInfo:
+ """Determine fieldmap processing requirements at build time.
+
+ Parameters
+ ----------
+ metadata_dict
+ Fieldmap metadata dictionary
+
+ Raises
+ ------
+ ValueError
+ If metadata_dict is None or if fieldmap type cannot be determined
+ """
+ from CPAC.utils.utils import get_fmap_type
+
+ if not metadata_dict:
+ raise ValueError(
+ "Fieldmap metadata dictionary is required but was None. "
+ "Cannot determine fieldmap processing requirements without metadata."
+ )
+
+ fmap_type = get_fmap_type(metadata_dict)
+
+ if fmap_type is None:
+ msg = (
+ f"Could not determine fieldmap type from metadata: {metadata_dict}. "
+ "Metadata must contain required BIDS fields for fieldmap type detection."
+ )
+ raise ValueError(msg)
+
+ build_info = {
+ "fmap_type": fmap_type,
+ "needs_echo_times": False,
+ "needs_phasediff_processing": False,
+ "is_epi": False,
+ }
+
+ match fmap_type:
+ case "phase":
+ build_info["needs_echo_times"] = True
+ build_info["needs_phasediff_processing"] = True
+
+ case "phasediff":
+ build_info["needs_echo_times"] = True
+ build_info["needs_phasediff_processing"] = True
+
+ case "epi":
+ build_info["needs_echo_times"] = True
+ build_info["is_epi"] = True
+
+ case "fieldmap":
+ build_info["needs_phasediff_processing"] = True
+
+ case _:
+ raise ValueError(
+ f"Unsupported fieldmap type '{fmap_type}'. "
+ "Supported types are: 'phase', 'phasediff', 'epi', 'fieldmap'."
+ )
+
+ return build_info
diff --git a/CPAC/vmhc/tests/test_vmhc.py b/CPAC/vmhc/tests/test_vmhc.py
index 2471a9b02c..e66d3cd782 100644
--- a/CPAC/vmhc/tests/test_vmhc.py
+++ b/CPAC/vmhc/tests/test_vmhc.py
@@ -14,7 +14,6 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
-from logging import basicConfig, INFO
import os
import pytest
@@ -25,7 +24,6 @@
from CPAC.vmhc.vmhc import vmhc as create_vmhc
logger = getLogger("CPAC.utils.tests")
-basicConfig(format="%(message)s", level=INFO)
@pytest.mark.skip(reason="test needs refactoring")
diff --git a/CPAC/vmhc/vmhc.py b/CPAC/vmhc/vmhc.py
index 3c547a8e2f..ddb2f57c60 100644
--- a/CPAC/vmhc/vmhc.py
+++ b/CPAC/vmhc/vmhc.py
@@ -1,3 +1,21 @@
+# Copyright (C) 2012-2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Voxel-Mirrored Homotopic Connectivity."""
+
from nipype.interfaces import fsl
from nipype.interfaces.afni import preprocess
@@ -5,7 +23,6 @@
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.registration.registration import apply_transform
-from CPAC.utils.utils import check_prov_for_regtool
from CPAC.vmhc import *
from .utils import *
@@ -60,8 +77,7 @@ def smooth_func_vmhc(wf, cfg, strat_pool, pipe_num, opt=None):
outputs=["space-symtemplate_desc-sm_bold"],
)
def warp_timeseries_to_sym_template(wf, cfg, strat_pool, pipe_num, opt=None):
- xfm_prov = strat_pool.get_cpac_provenance("from-bold_to-symtemplate_mode-image_xfm")
- reg_tool = check_prov_for_regtool(xfm_prov)
+ reg_tool = strat_pool.reg_tool("from-bold_to-symtemplate_mode-image_xfm")
num_cpus = cfg.pipeline_setup["system_config"]["max_cores_per_participant"]
diff --git a/Dockerfile b/Dockerfile
index 838d8dcc4b..e41bd6fc73 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -15,14 +15,14 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/stage-base:standard-v1.8.8.dev1
-LABEL org.opencontainers.image.description "Full C-PAC image"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.description="Full C-PAC image"
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# install C-PAC
COPY dev/circleci_data/pipe-test_ci.yml /cpac_resources/pipe-test_ci.yml
COPY . /code
-RUN pip cache purge && pip install -e "/code[graphviz]"
+RUN pip cache purge && pip install backports.tarfile && pip install -e "/code[graphviz]"
# set up runscript
COPY dev/docker_data /code/docker_data
RUN rm -Rf /code/docker_data/checksum && \
@@ -45,7 +45,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \
&& chmod 777 $(ls / | grep -v sys | grep -v proc)
ENV PYTHONUSERBASE=/home/c-pac_user/.local
ENV PATH=$PATH:/home/c-pac_user/.local/bin \
- PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages
+ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \
+ _SHELL=/bin/bash
# set user
WORKDIR /home/c-pac_user
diff --git a/README.md b/README.md
index 137bc57972..6bc400be3e 100644
--- a/README.md
+++ b/README.md
@@ -9,7 +9,8 @@ C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANT
You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see . -->
C-PAC: Configurable Pipeline for the Analysis of Connectomes
============================================================
-[](https://doi.org/10.1101/2021.12.01.470790) [](https://doi.org/10.5281/zenodo.164638)
+
+[](https://doi.org/10.1101/2021.12.01.470790) [](https://doi.org/10.5281/zenodo.164638) [](https://results.pre-commit.ci/latest/github/FCP-INDI/C-PAC/main) [](https://codecov.io/github/FCP-INDI/C-PAC)
[](./COPYING.LESSER)
@@ -17,6 +18,9 @@ A configurable, open-source, Nipype-based, automated processing pipeline for res
Designed for use by both novice users and experts, C-PAC brings the power, flexibility and elegance
of Nipype to users in a plug-and-play fashion; no programming required.
+> [!WARNING]
+> C-PAC entered maintenance mode in version 1.8.8. See [SUPPORT.md](./SUPPORT.md).
+
Website
-------
diff --git a/SUPPORT.md b/SUPPORT.md
new file mode 100644
index 0000000000..16eb8b642f
--- /dev/null
+++ b/SUPPORT.md
@@ -0,0 +1,13 @@
+Support Policy
+==============
+
+As of v1.8.8, C-PAC is in maintenance mode. With the 2.0.0 release, we will begin strict adherence to Semantic Versioning.
+
+While in maintenance mode, we will continue to publish new releases but FCP-INDI will no longer be developing new features.
+Community contributions will be reviewed and released when passing review. Responsibility for these reviews is defined in [.github/CODEOWNERS](./.github/CODEOWNERS).
+
+User support will continue at [Neurostars](https://neurostars.org/tag/cpac), though expect a slower response time.
+
+Major bug fixes will continue to be addressed by [**@FCP-INDI/maintenance**](https://github.com/orgs/FCP-INDI/teams/maintenance). Minor bugs will be documented and left to the community to contribute fixes and workarounds.
+
+Security releases will continue to be published by [**@FCP-INDI/DevOps**](https://github.com/orgs/FCP-INDI/teams/DevOps).
diff --git a/dev/circleci_data/conftest.py b/dev/circleci_data/conftest.py
new file mode 100644
index 0000000000..4966b986c5
--- /dev/null
+++ b/dev/circleci_data/conftest.py
@@ -0,0 +1,19 @@
+# Copyright (C) 2025 C-PAC Developers
+
+# This file is part of C-PAC.
+
+# C-PAC is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or (at your
+# option) any later version.
+
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
+# License for more details.
+
+# You should have received a copy of the GNU Lesser General Public
+# License along with C-PAC. If not, see .
+"""Global fixtures for C-PAC tests."""
+
+from CPAC._global_fixtures import * # noqa: F403
diff --git a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml
index 5449692350..c196250ac8 100644
--- a/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml
+++ b/dev/circleci_data/data_settings_bids_examples_ds051_default_BIDS.yml
@@ -15,7 +15,7 @@ dataFormat: BIDS
# BIDS Data Format only.
#
# This should be the path to the overarching directory containing the entire dataset.
-bidsBaseDir: ./bids-examples/ds051
+bidsBaseDir: ./ds051
# File Path Template for Anatomical Files
@@ -49,7 +49,7 @@ awsCredentialsFile: None
# Directory where CPAC should place data configuration files.
-outputSubjectListLocation: ./dev/circleci_data
+outputSubjectListLocation: /code/dev/circleci_data
# A label to be appended to the generated participant list files.
diff --git a/dev/circleci_data/requirements.txt b/dev/circleci_data/requirements.txt
index b59c3413be..fdd988a669 100644
--- a/dev/circleci_data/requirements.txt
+++ b/dev/circleci_data/requirements.txt
@@ -1,4 +1,4 @@
-coverage
+coverage >= 7.10.1
GitPython
pytest
pytest_bdd
diff --git a/dev/circleci_data/test_external_utils.py b/dev/circleci_data/test_external_utils.py
index f516b0c903..c55e264c8b 100644
--- a/dev/circleci_data/test_external_utils.py
+++ b/dev/circleci_data/test_external_utils.py
@@ -31,8 +31,6 @@
from CPAC.__main__ import utils as CPAC_main_utils # noqa: E402
-# pylint: disable=wrong-import-position
-
def _click_backport(command, key):
"""Switch back to underscores for older versions of click."""
@@ -93,18 +91,11 @@ def test_build_data_config(caplog, cli_runner, multiword_connector):
_delete_test_yaml(test_yaml)
-def test_new_settings_template(caplog, cli_runner):
+def test_new_settings_template(bids_examples: Path, caplog, cli_runner):
"""Test CLI ``utils new-settings-template``."""
caplog.set_level(INFO)
- os.chdir(CPAC_DIR)
-
- example_dir = os.path.join(CPAC_DIR, "bids-examples")
- if not os.path.exists(example_dir):
- from git import Repo
-
- Repo.clone_from(
- "https://github.com/bids-standard/bids-examples.git", example_dir
- )
+ assert bids_examples.exists()
+ os.chdir(bids_examples)
result = cli_runner.invoke(
CPAC_main_utils.commands[
diff --git a/dev/circleci_data/test_in_image.sh b/dev/circleci_data/test_in_image.sh
index b62de84994..d03b6e8015 100755
--- a/dev/circleci_data/test_in_image.sh
+++ b/dev/circleci_data/test_in_image.sh
@@ -1,5 +1,8 @@
export PATH=$PATH:/home/$(whoami)/.local/bin
+# don't force SSH for git clones in testing image
+git config --global --unset url.ssh://git@github.com.insteadof
+
# install testing requirements
pip install -r /code/dev/circleci_data/requirements.txt
diff --git a/dev/docker_data/required_afni_pkgs.txt b/dev/docker_data/required_afni_pkgs.txt
index 4aa745c906..acd32981e9 100644
--- a/dev/docker_data/required_afni_pkgs.txt
+++ b/dev/docker_data/required_afni_pkgs.txt
@@ -30,6 +30,7 @@ linux_openmp_64/3dTproject
linux_openmp_64/3dTshift
linux_openmp_64/3dTstat
linux_openmp_64/3dvolreg
+linux_openmp_64/3dWarp
linux_openmp_64/afni
linux_openmp_64/libcoxplot.a
linux_openmp_64/libcoxplot.so
diff --git a/dev/docker_data/unpinned_requirements.txt b/dev/docker_data/unpinned_requirements.txt
index 186fee9168..d4db5ce5bb 100644
--- a/dev/docker_data/unpinned_requirements.txt
+++ b/dev/docker_data/unpinned_requirements.txt
@@ -1,3 +1,8 @@
+# Copyright (C) 2023-2025 C-PAC Developers
+# This file is part of C-PAC.
+# C-PAC is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+# C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
+# You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see .
boto3
ciftify
click
@@ -23,7 +28,6 @@ pybids
PyPEER @ https://github.com/shnizzedy/PyPEER/archive/6965d2b2bea0fef824e885fec33a8e0e6bd50a97.zip
python-dateutil
PyYAML
-requests
scikit-learn
scipy
sdcflows
diff --git a/pyproject.toml b/pyproject.toml
index 13181c224b..84ffa2ad8f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -16,5 +16,24 @@
# License along with C-PAC. If not, see .
[build-system]
-requires = ["nipype==1.8.6", "numpy==1.25.1", "pyyaml==6.0", "setuptools<60.0", "voluptuous==0.13.1"]
+requires = ["nipype==1.8.6", "numpy==1.25.1", "pyyaml==6.0", "setuptools<60.0", "voluptuous==0.15.2"]
build-backend = "setuptools.build_meta"
+
+[tool.coverage.paths]
+source = [
+ "/code",
+ "/home/circleci/project"
+]
+
+[tool.coverage.report]
+ignore_errors = true
+include_namespace_packages = true
+skip_empty = true
+
+[tool.coverage.run]
+branch = true
+relative_files = true
+source = [
+ "CPAC",
+ "dev/circleci_data"
+]
diff --git a/requirements.txt b/requirements.txt
index 58afacfa6d..94f124b98b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018-2023 C-PAC Developers
+# Copyright (C) 2018-2025 C-PAC Developers
# This file is part of C-PAC.
# C-PAC is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
# C-PAC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
@@ -30,22 +30,22 @@ pybids==0.15.6
PyPEER @ git+https://git@github.com/ChildMindInstitute/PyPEER.git@6965d2b2bea0fef824e885fec33a8e0e6bd50a97
python-dateutil==2.8.2
PyYAML==6.0
-requests==2.32.0
+requests==2.32.3
scikit-learn==1.5.0
scipy==1.11.1
sdcflows==2.4.0
semver==3.0.1
traits==6.3.2
-voluptuous==0.13.1
+voluptuous==0.15.2
# the below are pinned specifically to match what the FSL installer installs
botocore==1.31.4
charset-normalizer==3.1.0
-cryptography==42.0.3
+cryptography==44.0.1
h5py==3.8.0
importlib-metadata==6.8.0
lxml==4.9.2
pip==23.3
-setuptools==70.0.0
-urllib3==1.26.18
+setuptools==78.1.1
+urllib3==1.26.19
wheel==0.40.0
zipp==3.19.1
diff --git a/setup.py b/setup.py
index 17919395d2..f22a744e2d 100755
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2022-2024 C-PAC Developers
+# Copyright (C) 2022-2025 C-PAC Developers
# This file is part of C-PAC.
@@ -84,7 +84,12 @@ def main(**extra_args):
extras_require={"graphviz": ["pygraphviz"]},
configuration=configuration,
scripts=glob("scripts/*"),
- entry_points={"console_scripts": ["cpac = CPAC.__main__:main"]},
+ entry_points={
+ "console_scripts": [
+ "cpac = CPAC.__main__:main",
+ "resource_inventory = CPAC.pipeline.resource_inventory:main",
+ ]
+ },
package_data={
"CPAC": [
"test_data/*",
diff --git a/variant-lite.Dockerfile b/variant-lite.Dockerfile
index b58801b519..6f350c4f18 100644
--- a/variant-lite.Dockerfile
+++ b/variant-lite.Dockerfile
@@ -15,15 +15,15 @@
# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see .
FROM ghcr.io/fcp-indi/c-pac/stage-base:lite-v1.8.8.dev1
-LABEL org.opencontainers.image.description "Full C-PAC image without FreeSurfer"
-LABEL org.opencontainers.image.source https://github.com/FCP-INDI/C-PAC
+LABEL org.opencontainers.image.description="Full C-PAC image without FreeSurfer"
+LABEL org.opencontainers.image.source=https://github.com/FCP-INDI/C-PAC
USER root
# install C-PAC
COPY dev/circleci_data/pipe-test_ci.yml /cpac_resources/pipe-test_ci.yml
COPY . /code
COPY --from=ghcr.io/fcp-indi/c-pac_templates:latest /cpac_templates /cpac_templates
-RUN pip cache purge && pip install -e "/code[graphviz]"
+RUN pip cache purge && pip install backports.tarfile && pip install -e "/code[graphviz]"
# set up runscript
COPY dev/docker_data /code/docker_data
RUN rm -Rf /code/docker_data/checksum && \
@@ -46,7 +46,8 @@ RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache/* \
&& chmod 777 $(ls / | grep -v sys | grep -v proc)
ENV PYTHONUSERBASE=/home/c-pac_user/.local
ENV PATH=$PATH:/home/c-pac_user/.local/bin \
- PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages
+ PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages \
+ _SHELL=/bin/bash
# set user
WORKDIR /home/c-pac_user