From 99a0ea87f3e5e098fd5f8018521ec0f091978a55 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 26 Sep 2022 16:13:42 -0400 Subject: [PATCH 01/72] :construction: WIP :goal_net: Add registration guardrail --- CPAC/registration/guardrails.py | 1 + 1 file changed, 1 insertion(+) create mode 100644 CPAC/registration/guardrails.py diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py new file mode 100644 index 0000000000..01b4a70532 --- /dev/null +++ b/CPAC/registration/guardrails.py @@ -0,0 +1 @@ +"""Guardrails to protect against bad registrations""" From 8cc3f5aa638e9d2ece84def46a60f7fe76ad432f Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 29 Sep 2022 11:35:33 -0400 Subject: [PATCH 02/72] :recycle: Modularize mask parameter checking --- CPAC/qc/qcmetrics.py | 106 ++++++++++++++++++++++++++++++------------- 1 file changed, 75 insertions(+), 31 deletions(-) diff --git a/CPAC/qc/qcmetrics.py b/CPAC/qc/qcmetrics.py index 6db977c495..d25adf6c9f 100644 --- a/CPAC/qc/qcmetrics.py +++ b/CPAC/qc/qcmetrics.py @@ -7,16 +7,29 @@ import numpy as np -def regisQ(bold2t1w_mask, t1w_mask, bold2template_mask, template_mask): - reg_qc = {'coregDice': [dc(bold2t1w_mask, t1w_mask)], - 'coregJaccard': [jc(bold2t1w_mask, t1w_mask)], - 'coregCrossCorr': [crosscorr(bold2t1w_mask, t1w_mask)], - 'coregCoverage': [coverage(bold2t1w_mask, t1w_mask)], - 'normDice': [dc(bold2template_mask, template_mask)], - 'normJaccard': [jc(bold2template_mask, template_mask)], - 'normCrossCorr': [crosscorr(bold2template_mask, template_mask)], - 'normCoverage': [coverage(bold2template_mask, template_mask)]} - return reg_qc +def coverage(input1, input2): + """Estimate the coverage between two masks.""" + input1 = nb.load(input1).get_fdata() + input2 = nb.load(input2).get_fdata() + input1 = np.atleast_1d(input1.astype(np.bool)) + input2 = np.atleast_1d(input2.astype(np.bool)) + intsec = np.count_nonzero(input1 & input2) + if np.sum(input1) > np.sum(input2): + smallv = np.sum(input2) + else: + smallv = np.sum(input1) + cov = float(intsec)/float(smallv) + return cov + + +def crosscorr(input1, input2): + r"""cross correlation: compute cross correction bewteen input masks""" + input1 = nb.load(input1).get_fdata() + input2 = nb.load(input2).get_fdata() + input1 = np.atleast_1d(input1.astype(np.bool)).flatten() + input2 = np.atleast_1d(input2.astype(np.bool)).flatten() + cc = np.corrcoef(input1, input2)[0][1] + return cc def dc(input1, input2): @@ -106,26 +119,57 @@ def jc(input1, input2): return jc -def crosscorr(input1, input2): - r"""cross correlation: compute cross correction bewteen input masks""" - input1 = nb.load(input1).get_fdata() - input2 = nb.load(input2).get_fdata() - input1 = np.atleast_1d(input1.astype(np.bool)).flatten() - input2 = np.atleast_1d(input2.astype(np.bool)).flatten() - cc = np.corrcoef(input1, input2)[0][1] - return cc +def _prefix_regqc_keys(qc_dict: dict, prefix: str) -> str: + """Prepend string to each key in a qc dict + Parameters + ---------- + qc_dict : dict + output of ``qc_masks`` -def coverage(input1, input2): - """Estimate the coverage between two masks.""" - input1 = nb.load(input1).get_fdata() - input2 = nb.load(input2).get_fdata() - input1 = np.atleast_1d(input1.astype(np.bool)) - input2 = np.atleast_1d(input2.astype(np.bool)) - intsec = np.count_nonzero(input1 & input2) - if np.sum(input1) > np.sum(input2): - smallv = np.sum(input2) - else: - smallv = np.sum(input1) - cov = float(intsec)/float(smallv) - return cov + prefix : str + string to prepend + + Returns + ------- + dict + """ + return {f'{prefix}{_key}': _value for _key, _value in qc_dict.items()} + + +def qc_masks(registered_mask: str, native_mask: str) -> dict: + """Return QC measures for coregistration + + Parameters + ---------- + registered_mask : str + path to registered mask + + native_mask : str + path to native-space mask + + Returns + ------- + dict + """ + return {'Dice': [dc(registered_mask, native_mask)], + 'Jaccard': [jc(registered_mask, native_mask)], + 'CrossCorr': [crosscorr(registered_mask, native_mask)], + 'Coverage': [coverage(registered_mask, native_mask)]} + + +def regisQ(bold2t1w_mask: str, t1w_mask: str, bold2template_mask: str, + template_mask: str) -> dict: + """Collect coregistration QC measures + + Parameters + ---------- + bold2t1w_mask, t1w_mask, bold2template_mask, template_mask : str + + Returns + ------- + dict + """ + return {**_prefix_regqc_keys(qc_masks(bold2t1w_mask, t1w_mask), 'coreg'), + **_prefix_regqc_keys(qc_masks(bold2template_mask, template_mask), + 'norm')} From fe7413bbf64f9fd148d61416a355718e08ba40fc Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 29 Sep 2022 12:34:08 -0400 Subject: [PATCH 03/72] :construction: WIP :goal_net: Create registration guardrail --- CPAC/qc/qcmetrics.py | 57 ++++++++++++++++++++++++++ CPAC/qc/xcp.py | 16 ++++++++ CPAC/registration/guardrails.py | 67 +++++++++++++++++++++++++++++++ CPAC/registration/registration.py | 25 ++++++------ 4 files changed, 153 insertions(+), 12 deletions(-) diff --git a/CPAC/qc/qcmetrics.py b/CPAC/qc/qcmetrics.py index d25adf6c9f..b45430020c 100644 --- a/CPAC/qc/qcmetrics.py +++ b/CPAC/qc/qcmetrics.py @@ -1,12 +1,61 @@ +# Modifications: Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . + +# Original code: BSD 3-Clause License + +# Copyright (c) 2020, Lifespan Informatics and Neuroimaging Center + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: + +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. + +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. + +# 3. Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. """QC metrics from XCP-D v0.0.9 Ref: https://github.com/PennLINC/xcp_d/tree/0.0.9 """ +# LGPL-3.0-or-later: Module docstring and lint exclusions # pylint: disable=invalid-name, redefined-outer-name +# BSD-3-Clause: imports and unspecified sections import nibabel as nb import numpy as np +# BSD-3-Clause def coverage(input1, input2): """Estimate the coverage between two masks.""" input1 = nb.load(input1).get_fdata() @@ -22,6 +71,7 @@ def coverage(input1, input2): return cov +# BSD-3-Clause def crosscorr(input1, input2): r"""cross correlation: compute cross correction bewteen input masks""" input1 = nb.load(input1).get_fdata() @@ -32,6 +82,7 @@ def crosscorr(input1, input2): return cc +# BSD-3-Clause def dc(input1, input2): r""" Dice coefficient @@ -84,6 +135,7 @@ def dc(input1, input2): return dc +# BSD-3-Clause def jc(input1, input2): r""" Jaccard coefficient @@ -119,6 +171,7 @@ def jc(input1, input2): return jc +# LGPL-3.0-or-later def _prefix_regqc_keys(qc_dict: dict, prefix: str) -> str: """Prepend string to each key in a qc dict @@ -137,6 +190,8 @@ def _prefix_regqc_keys(qc_dict: dict, prefix: str) -> str: return {f'{prefix}{_key}': _value for _key, _value in qc_dict.items()} +# BSD-3-Clause: logic +# LGPL-3.0-or-later: docstring and refactored function def qc_masks(registered_mask: str, native_mask: str) -> dict: """Return QC measures for coregistration @@ -158,6 +213,8 @@ def qc_masks(registered_mask: str, native_mask: str) -> dict: 'Coverage': [coverage(registered_mask, native_mask)]} +# BSD-3-Clause: name and signature +# LGPL-3.0-or-later: docstring and refactored function def regisQ(bold2t1w_mask: str, t1w_mask: str, bold2template_mask: str, template_mask: str) -> dict: """Collect coregistration QC measures diff --git a/CPAC/qc/xcp.py b/CPAC/qc/xcp.py index fbb672c9f5..05429fcce8 100644 --- a/CPAC/qc/xcp.py +++ b/CPAC/qc/xcp.py @@ -1,3 +1,19 @@ +# Copyright (C) 2021-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """ .. seealso:: diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 01b4a70532..b0a017a1c0 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -1 +1,68 @@ +# Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" +from CPAC.qc.qcmetrics import qc_masks + + +class BadRegistrationError(ValueError): + """Exception for when a QC measure for a registration falls below a + specified threshold""" + def __init__(self, *args, metric=None, value=None, threshold=None, + **kwargs): + """ + Parameters + ---------- + metric : str + QC metric + + value : float + calculated QC value + + threshold : float + specified threshold + """ + msg = "Registration failed quality control" + if all(arg is not None for arg in (metric, value, threshold)): + msg += f" ({metric}: {value} < {threshold})" + msg += "." + super().__init__(msg, *args, **kwargs) + + +def registration_guardrail(registered_mask: str, target_mask: str, + thresholds: dict) -> None: + """Check QC metrics post-registration and throw an exception if + metrics are below given thresholds + + Parameters + ---------- + registerd_mask, target_mask : str + path to mask + + thresholds : RegistrationThreshold + thresholds to check against + + Returns + ------- + None + """ + qc_metrics = qc_masks(registered_mask, target_mask) + for metric, threshold in thresholds.items(): + if threshold is not None: + value = qc_metrics.get(metric) + if value < threshold: + raise BadRegistrationError(metric=metric, value=value, + threshold=threshold) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 7902673a6d..a08039ecbe 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1,19 +1,20 @@ -"""Copyright (C) 2012-2022 C-PAC Developers +# Copyright (C) 2012-2022 C-PAC Developers -This file is part of C-PAC. +# This file is part of C-PAC. -C-PAC is free software: you can redistribute it and/or modify it under -the terms of the GNU Lesser General Public License as published by the -Free Software Foundation, either version 3 of the License, or (at your -option) any later version. +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -C-PAC is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public -License for more details. +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. -You should have received a copy of the GNU Lesser General Public -License along with C-PAC. If not, see .""" +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Registration functions""" # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order # TODO: replace Tuple with tuple, Union with |, once Python >= 3.9, 3.10 from typing import Optional, Tuple, Union From 3aea3714b504e21b4f718794cec849300c209ea3 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 29 Sep 2022 12:46:18 -0400 Subject: [PATCH 04/72] :wrench: Add 'quality_thresholds' to 'registration_workflows' in pipeline config --- CPAC/pipeline/schema.py | 30 ++++++++++--------- .../configs/pipeline_config_default.yml | 10 +++++++ 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 6aa1898f96..2732201e8a 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -1,21 +1,20 @@ -'''Validation schema for C-PAC pipeline configurations +# Copyright (C) 2022 C-PAC Developers -Copyright (C) 2022 C-PAC Developers +# This file is part of C-PAC. -This file is part of C-PAC. +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -C-PAC is free software: you can redistribute it and/or modify it under -the terms of the GNU Lesser General Public License as published by the -Free Software Foundation, either version 3 of the License, or (at your -option) any later version. +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. -C-PAC is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public -License for more details. - -You should have received a copy of the GNU Lesser General Public -License along with C-PAC. If not, see .''' +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Validation schema for C-PAC pipeline configurations""" # pylint: disable=too-many-lines from itertools import chain, permutations import numpy as np @@ -592,6 +591,9 @@ def _changes_1_8_0_to_1_8_1(config_dict): }, }, 'registration_workflows': { + 'quality_thresholds': { + metric: Maybe(float) for + metric in ('Dice', 'Jaccard', 'CrossCorr', 'Coverage')}, 'anatomical_registration': { 'run': bool, 'resolution_for_anat': All(str, Match(resolution_regex)), diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index d149e40919..4dfcba571c 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -586,6 +586,16 @@ segmentation: registration_workflows: + # Minimum QC values to allow a run to complete post-registration + # Set any metric empty (like "Dice:") or to None to disable that guardrail + # Default thresholds adopted from XCP-Engine + # (https://github.com/PennLINC/xcpEngine/blob/397ab6cf/designs/cbf_all.dsn#L66) + quality_thresholds: + Dice: 0.8 + Jaccard: 0.9 + CrossCorr: 0.7 + Coverage: 0.8 + anatomical_registration: run: On From 8fa5c239be2a9facd01c072266a20e44060c4e28 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 29 Sep 2022 13:03:26 -0400 Subject: [PATCH 05/72] :wrench: Set global registration QC thresholds on config load --- CHANGELOG.md | 1 + CPAC/qc/__init__.py | 22 ++++++++++++++++++++-- CPAC/qc/globals.py | 18 ++++++++++++++++++ CPAC/registration/guardrail_thresholds.py | 18 ++++++++++++++++++ CPAC/registration/guardrails.py | 2 +- CPAC/utils/configuration/configuration.py | 5 +++++ 6 files changed, 63 insertions(+), 3 deletions(-) create mode 100644 CPAC/qc/globals.py create mode 100644 CPAC/registration/guardrail_thresholds.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bd30ce3f7..2fb0bf1da2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Added the ability to downsample to 10K or 2K resolution for freesurfer runs +- Added ability to set minimum quality measure thresholds to all registration steps - Difference method (``-``) for ``CPAC.utils.configuration.Configuration`` instances ### Changed diff --git a/CPAC/qc/__init__.py b/CPAC/qc/__init__.py index 75ee654fec..1602803627 100644 --- a/CPAC/qc/__init__.py +++ b/CPAC/qc/__init__.py @@ -1,2 +1,20 @@ -from .utils import * -from .qc import * +# Copyright (C) 2013-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Quality control utilities for C-PAC""" +from CPAC.qc.globals import REGISTRATION_GUARDRAIL_THRESHOLDS +from CPAC.qc.qcmetrics import qc_masks +__all__ = ['qc_masks', 'REGISTRATION_GUARDRAIL_THRESHOLDS'] diff --git a/CPAC/qc/globals.py b/CPAC/qc/globals.py new file mode 100644 index 0000000000..5fa3606a85 --- /dev/null +++ b/CPAC/qc/globals.py @@ -0,0 +1,18 @@ +# Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global QC values""" +REGISTRATION_GUARDRAIL_THRESHOLDS = {} diff --git a/CPAC/registration/guardrail_thresholds.py b/CPAC/registration/guardrail_thresholds.py new file mode 100644 index 0000000000..704be8259b --- /dev/null +++ b/CPAC/registration/guardrail_thresholds.py @@ -0,0 +1,18 @@ +# Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Global thresholds for guardrails""" + diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index b0a017a1c0..c8c5af8bdd 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" -from CPAC.qc.qcmetrics import qc_masks +from CPAC.qc import qc_masks class BadRegistrationError(ValueError): diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index 108638cb8d..80c46e89e1 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -25,6 +25,7 @@ import yaml +from CPAC.qc import REGISTRATION_GUARDRAIL_THRESHOLDS from CPAC.utils.utils import load_preconfig from .diff import dct_diff @@ -152,6 +153,10 @@ def __init__(self, config_map=None): # set attribute setattr(self, key, set_from_ENV(config_map[key])) + # set global QC thresholds + REGISTRATION_GUARDRAIL_THRESHOLDS.update(self[ + 'registration_workflows', 'quality_thresholds']) + self.__update_attr() def __str__(self): From b8919285fb13a9410d31f46dc6a8c9ed40103d71 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 29 Sep 2022 13:22:40 -0400 Subject: [PATCH 06/72] :goal_net: Pass registrations through guardrail before continuing --- CPAC/registration/guardrails.py | 28 +++++++++++++++++----------- CPAC/registration/registration.py | 17 ++++++++++++++--- 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index c8c5af8bdd..237a7c0c25 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" -from CPAC.qc import qc_masks +from CPAC.qc import qc_masks, REGISTRATION_GUARDRAIL_THRESHOLDS class BadRegistrationError(ValueError): @@ -42,27 +42,33 @@ def __init__(self, *args, metric=None, value=None, threshold=None, super().__init__(msg, *args, **kwargs) -def registration_guardrail(registered_mask: str, target_mask: str, - thresholds: dict) -> None: +def registration_guardrail(registered: str, target: str) -> str: """Check QC metrics post-registration and throw an exception if - metrics are below given thresholds + metrics are below given thresholds. + + If inputs point to images that are not masks, images will be + binarized before being compared. + + .. seealso:: + + :py:mod:`CPAC.qc.qcmetrics` + Documentation of the :py:mod:`CPAC.qc.qcmetrics` module. Parameters ---------- - registerd_mask, target_mask : str + registered, target : str path to mask - thresholds : RegistrationThreshold - thresholds to check against - Returns ------- - None + registered_mask : str + path to mask """ - qc_metrics = qc_masks(registered_mask, target_mask) - for metric, threshold in thresholds.items(): + qc_metrics = qc_masks(registered, target) + for metric, threshold in REGISTRATION_GUARDRAIL_THRESHOLDS.items(): if threshold is not None: value = qc_metrics.get(metric) if value < threshold: raise BadRegistrationError(metric=metric, value=value, threshold=threshold) + return registered diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a08039ecbe..ca1fe6fe0e 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,6 +24,7 @@ from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks +from CPAC.registration.guardrails import registration_guardrail from CPAC.registration.utils import seperate_warps_list, \ check_transforms, \ generate_inverse_transform_flags, \ @@ -192,6 +193,14 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # go in as a warpfield file wf.connect(inputNode, 'transform', apply_warp, 'field_file') + # Guardrail: check QC metrics + guardrail = pe.Node(util.Function(input_names=['registered_mask', + 'target_mask'], + output_names=['registered_mask'], + function=registration_guardrail), + name='registration_guardrail') + wf.connect(inputNode, 'reference', guardrail, 'target') + # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: @@ -232,12 +241,14 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, func_concat.inputs.outputtype = 'NIFTI_GZ' wf.connect(apply_warp, 'out_file', func_concat, 'in_files') - - wf.connect(func_concat, 'out_file', outputNode, 'output_image') + wf.connect(func_concat, 'out_file', guardrail, 'registered') else: wf.connect(inputNode, 'input_image', apply_warp, 'in_file') - wf.connect(apply_warp, 'out_file', outputNode, 'output_image') + wf.connect(apply_warp, 'out_file', guardrail, 'registered') + + # Pass output through guardrail before continuing + wf.connect(guardrail, 'registered', outputNode, 'output_image') return wf From 3fb78f03f899757b5e7cb30e48efed5179e24e3b Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 29 Sep 2022 16:12:14 -0400 Subject: [PATCH 07/72] :goal_net: Plug in registration guardrails --- CPAC/registration/guardrails.py | 18 +- CPAC/registration/registration.py | 627 ++++++++++++++---------------- 2 files changed, 301 insertions(+), 344 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 237a7c0c25..6d436a1f73 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,6 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" +from nipype.interfaces.utility import Function +from CPAC.pipeline.nipype_pipeline_engine import Node from CPAC.qc import qc_masks, REGISTRATION_GUARDRAIL_THRESHOLDS @@ -42,7 +44,17 @@ def __init__(self, *args, metric=None, value=None, threshold=None, super().__init__(msg, *args, **kwargs) -def registration_guardrail(registered: str, target: str) -> str: +def registration_guardrail_node(name=None, ): + """Convenience method to get a new registration_guardrail Node""" + if name is None: + name = 'registration_guardrail' + return Node(Function(input_names=['registered_mask', + 'reference_mask'], + output_names=['registered_mask'], + function=registration_guardrail), name=name) + + +def registration_guardrail(registered: str, reference: str) -> str: """Check QC metrics post-registration and throw an exception if metrics are below given thresholds. @@ -56,7 +68,7 @@ def registration_guardrail(registered: str, target: str) -> str: Parameters ---------- - registered, target : str + registered, reference : str path to mask Returns @@ -64,7 +76,7 @@ def registration_guardrail(registered: str, target: str) -> str: registered_mask : str path to mask """ - qc_metrics = qc_masks(registered, target) + qc_metrics = qc_masks(registered, reference) for metric, threshold in REGISTRATION_GUARDRAIL_THRESHOLDS.items(): if threshold is not None: value = qc_metrics.get(metric) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index ca1fe6fe0e..c91d518f76 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,7 +24,7 @@ from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks -from CPAC.registration.guardrails import registration_guardrail +from CPAC.registration.guardrails import registration_guardrail_node from CPAC.registration.utils import seperate_warps_list, \ check_transforms, \ generate_inverse_transform_flags, \ @@ -194,12 +194,8 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, wf.connect(inputNode, 'transform', apply_warp, 'field_file') # Guardrail: check QC metrics - guardrail = pe.Node(util.Function(input_names=['registered_mask', - 'target_mask'], - output_names=['registered_mask'], - function=registration_guardrail), - name='registration_guardrail') - wf.connect(inputNode, 'reference', guardrail, 'target') + guardrail = registration_guardrail_node() + wf.connect(inputNode, 'reference', guardrail, 'reference') # parallelize the apply warp, if multiple CPUs, and it's a time # series! @@ -335,28 +331,28 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' + guardrail = registration_guardrail_node() + inv_flirt_xfm = pe.Node(interface=fsl.utils.ConvertXFM(), name='inv_linear_reg0_xfm') inv_flirt_xfm.inputs.invert_xfm = True linear_register.connect(inputspec, 'input_brain', linear_reg, 'in_file') - linear_register.connect(inputspec, 'reference_brain', linear_reg, 'reference') - + linear_register.connect(inputspec, 'reference_brain', + guardrail, 'reference') linear_register.connect(inputspec, 'interp', linear_reg, 'interp') - linear_register.connect(linear_reg, 'out_file', + guardrail, 'registered') + linear_register.connect(guardrail, 'registered', outputspec, 'output_brain') - linear_register.connect(linear_reg, 'out_matrix_file', inv_flirt_xfm, 'in_file') - linear_register.connect(inv_flirt_xfm, 'out_file', outputspec, 'invlinear_xfm') - linear_register.connect(linear_reg, 'out_matrix_file', outputspec, 'linear_xfm') @@ -589,7 +585,7 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): brain_warp, 'ref_file') head_warp = pe.Node(interface=fsl.ApplyWarp(), - name='head_warp') + name='head_warp') head_warp.inputs.interp = 'spline' head_warp.inputs.relwarp = True @@ -713,14 +709,19 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, fields=['func_to_anat_linear_xfm_nobbreg', 'anat_func_nobbreg']), name='outputspec') - linear_reg = pe.Node(interface=fsl.FLIRT(), - name='linear_func_to_anat') + linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_func_to_anat') + guardrail = registration_guardrail_node() - linear_reg.inputs.interp = config.registration_workflows['functional_registration']['coregistration']['interpolation'] - linear_reg.inputs.cost = config.registration_workflows['functional_registration']['coregistration']['cost'] - linear_reg.inputs.dof = config.registration_workflows['functional_registration']['coregistration']['dof'] - if config.registration_workflows['functional_registration']['coregistration']['arguments'] is not None: - linear_reg.inputs.args = config.registration_workflows['functional_registration']['coregistration']['arguments'] + linear_reg.inputs.interp = config.registration_workflows[ + 'functional_registration']['coregistration']['interpolation'] + linear_reg.inputs.cost = config.registration_workflows[ + 'functional_registration']['coregistration']['cost'] + linear_reg.inputs.dof = config.registration_workflows[ + 'functional_registration']['coregistration']['dof'] + if config.registration_workflows['functional_registration'][ + 'coregistration']['arguments'] is not None: + linear_reg.inputs.args = config.registration_workflows[ + 'functional_registration']['coregistration']['arguments'] if phase_diff_distcor: register_func_to_anat.connect( @@ -734,24 +735,23 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, linear_reg, 'echospacing') register_func_to_anat.connect(inputspec, 'func', linear_reg, 'in_file') - register_func_to_anat.connect(inputspec, 'anat', linear_reg, 'reference') - + register_func_to_anat.connect(inputspec, 'anat', guardrail, 'reference') register_func_to_anat.connect(inputspec, 'dof', linear_reg, 'dof') - register_func_to_anat.connect(inputspec, 'interp', linear_reg, 'interp') - register_func_to_anat.connect(linear_reg, 'out_matrix_file', - outputspec, - 'func_to_anat_linear_xfm_nobbreg') - + outputspec, 'func_to_anat_linear_xfm_' + 'nobbreg') register_func_to_anat.connect(linear_reg, 'out_file', + guardrail, 'registered') + register_func_to_anat.connect(guardrail, 'registered', outputspec, 'anat_func_nobbreg') return register_func_to_anat -def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_T2'): +def create_register_func_to_anat_use_T2(config, + name='register_func_to_anat_use_T2'): # for monkey data # ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 # https://github.com/HechengJin0/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 @@ -788,8 +788,6 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ outputspec.anat_func_nobbreg : string (nifti file) Functional scan registered to anatomical space """ - - register_func_to_anat_use_T2 = pe.Workflow(name=name) inputspec = pe.Node(util.IdentityInterface(fields=['func', @@ -798,14 +796,16 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ 'T2_brain']), name='inputspec') - outputspec = pe.Node(util.IdentityInterface(fields=['func_to_anat_linear_xfm_nobbreg', - 'func_to_anat_linear_warp_nobbreg', + outputspec = pe.Node(util.IdentityInterface(fields=['func_to_anat_linear_' + 'xfm_nobbreg', + 'func_to_anat_linear_' + 'warp_nobbreg', 'anat_func_nobbreg']), - name='outputspec') + name='outputspec') # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/${ScoutName}_gdc -ref ${T1wFolder}/${T2wRestoreImage} -omat "$fMRIFolder"/Scout2T2w.mat -out ${fMRIFolder}/Scout2T2w.nii.gz -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo linear_reg_func_to_t2 = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_func_to_t2') + name='linear_reg_func_to_t2') linear_reg_func_to_t2.inputs.interp = 'spline' linear_reg_func_to_t2.inputs.cost = 'mutualinfo' linear_reg_func_to_t2.inputs.dof = 6 @@ -813,44 +813,63 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - register_func_to_anat_use_T2.connect(inputspec, 'func', linear_reg_func_to_t2, 'in_file') + guardrail_t1 = registration_guardrail_node('guardrail_T1') + guardrail_t2 = registration_guardrail_node('guardrail_T2') - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', linear_reg_func_to_t2, 'reference') + register_func_to_anat_use_T2.connect(inputspec, 'func', + linear_reg_func_to_t2, 'in_file') + register_func_to_anat_use_T2.connect(inputspec, 'T2_head', + linear_reg_func_to_t2, 'reference') + register_func_to_anat_use_T2.connect(inputspec, 'T2_head', + guardrail_t2, 'reference') # ${FSLDIR}/bin/convert_xfm -omat "$fMRIFolder"/T2w2Scout.mat -inverse "$fMRIFolder"/Scout2T2w.mat invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') invt.inputs.invert_xfm = True - register_func_to_anat_use_T2.connect(linear_reg_func_to_t2, 'out_matrix_file', invt, 'in_file') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t2, 'out_matrix_file', invt, 'in_file') # ${FSLDIR}/bin/applywarp --interp=nn -i ${T1wFolder}/${T2wRestoreImageBrain} -r ${fMRIFolder}/${ScoutName}_gdc --premat="$fMRIFolder"/T2w2Scout.mat -o ${fMRIFolder}/Scout_brain_mask.nii.gz anat_to_func = pe.Node(interface=fsl.ApplyWarp(), name='anat_to_func') anat_to_func.inputs.interp = 'nn' - register_func_to_anat_use_T2.connect(inputspec, 'T2_brain', anat_to_func, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'func', anat_to_func, 'ref_file') - register_func_to_anat_use_T2.connect(invt, 'out_file', anat_to_func, 'premat') + guardrail_anat_to_func = registration_guardrail_node('guardrail_' + 'anat-to-func') + register_func_to_anat_use_T2.connect(inputspec, 'T2_brain', + anat_to_func, 'in_file') + register_func_to_anat_use_T2.connect(inputspec, 'func', + anat_to_func, 'ref_file') + register_func_to_anat_use_T2.connect(inputspec, 'func', + guardrail_anat_to_func, 'reference') + register_func_to_anat_use_T2.connect(invt, 'out_file', + anat_to_func, 'premat') # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/Scout_brain_mask.nii.gz -bin ${fMRIFolder}/Scout_brain_mask.nii.gz func_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'func_brain_mask') + name='func_brain_mask') func_brain_mask.inputs.args = '-bin' - register_func_to_anat_use_T2.connect(anat_to_func, 'out_file', func_brain_mask, 'in_file') + register_func_to_anat_use_T2.connect(anat_to_func, 'out_file', + guardrail_anat_to_func, 'registered') + register_func_to_anat_use_T2.connect(guardrail_anat_to_func, 'registered', + func_brain_mask, 'in_file') # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/${ScoutName}_gdc -mas ${fMRIFolder}/Scout_brain_mask.nii.gz ${fMRIFolder}/Scout_brain_dc.nii.gz func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name='func_brain') + name='func_brain') func_brain.inputs.op_string = "-mas %s " - register_func_to_anat_use_T2.connect(inputspec, 'func', func_brain, 'in_file') - register_func_to_anat_use_T2.connect(func_brain_mask, 'out_file', func_brain, 'operand_files') + register_func_to_anat_use_T2.connect(inputspec, 'func', + func_brain, 'in_file') + register_func_to_anat_use_T2.connect(func_brain_mask, 'out_file', + func_brain, 'operand_files') # ## re-registering the maked brain to the T1 brain: # ${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${fMRIFolder}/Scout_brain_dc.nii.gz -ref ${T1wFolder}/${T1wRestoreImageBrain} -omat "$fMRIFolder"/${ScoutName}_gdc2T1w_init.mat -out ${fMRIFolder}/${ScoutName}_gdc2T1w_brain_init -searchrx -30 30 -searchry -30 30 -searchrz -30 30 -cost mutualinfo linear_reg_func_to_t1 = pe.Node(interface=fsl.FLIRT(), - name='linear_reg_func_to_t1') + name='linear_reg_func_to_t1') linear_reg_func_to_t1.inputs.interp = 'spline' linear_reg_func_to_t1.inputs.cost = 'mutualinfo' linear_reg_func_to_t1.inputs.dof = 6 @@ -858,9 +877,12 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - register_func_to_anat_use_T2.connect(func_brain, 'out_file', linear_reg_func_to_t1, 'in_file') - - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', linear_reg_func_to_t1, 'reference') + register_func_to_anat_use_T2.connect(func_brain, 'out_file', + linear_reg_func_to_t1, 'in_file') + register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', + linear_reg_func_to_t1, 'reference') + register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', + guardrail_t1, 'reference') # #taking out warpfield as it is not being made without a fieldmap. # ${FSLDIR}/bin/convertwarp --relout --rel -r ${T1wFolder}/${T2wRestoreImage} --postmat=${fMRIFolder}/${ScoutName}_gdc2T1w_init.mat -o ${fMRIFolder}/${ScoutName}_gdc2T1w_init_warp @@ -869,20 +891,19 @@ def create_register_func_to_anat_use_T2(config, name='register_func_to_anat_use_ convert_warp.inputs.out_relwarp = True convert_warp.inputs.relwarp = True - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', convert_warp, 'postmat') - - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', convert_warp, 'reference') - - - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_matrix_file', - outputspec, - 'func_to_anat_linear_xfm_nobbreg') - + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, 'out_matrix_file', convert_warp, 'postmat') + register_func_to_anat_use_T2.connect(inputspec, 'T2_head', + convert_warp, 'reference') + register_func_to_anat_use_T2.connect( + linear_reg_func_to_t1, 'out_matrix_file', + outputspec, 'func_to_anat_linear_xfm_nobbreg') register_func_to_anat_use_T2.connect(convert_warp, 'out_file', - outputspec, - 'func_to_anat_linear_warp_nobbreg') - + outputspec, 'func_to_anat_linear_' + 'warp_nobbreg') register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_file', + guardrail_t1, 'registered') + register_func_to_anat_use_T2.connect(guardrail_t1, 'registered', outputspec, 'anat_func_nobbreg') return register_func_to_anat_use_T2 @@ -975,19 +996,15 @@ def bbreg_args(bbreg_target): register_bbregister_func_to_anat.connect( inputspec, 'bbr_schedule', bbreg_func_to_anat, 'schedule') - register_bbregister_func_to_anat.connect( wm_bb_mask, ('out_file', bbreg_args), bbreg_func_to_anat, 'args') - register_bbregister_func_to_anat.connect( inputspec, 'func', bbreg_func_to_anat, 'in_file') - register_bbregister_func_to_anat.connect( inputspec, 'anat', bbreg_func_to_anat, 'reference') - register_bbregister_func_to_anat.connect( inputspec, 'linear_reg_matrix', bbreg_func_to_anat, 'in_matrix_file') @@ -1006,20 +1023,22 @@ def bbreg_args(bbreg_target): inputNode_echospacing, 'echospacing', bbreg_func_to_anat, 'echospacing') + guardrail = registration_guardrail_node() + register_bbregister_func_to_anat.connect(inputspec, 'anat', + guardrail, 'reference') register_bbregister_func_to_anat.connect( bbreg_func_to_anat, 'out_matrix_file', outputspec, 'func_to_anat_linear_xfm') - - register_bbregister_func_to_anat.connect( - bbreg_func_to_anat, 'out_file', - outputspec, 'anat_func') + register_bbregister_func_to_anat.connect(bbreg_func_to_anat, 'out_file', + guardrail, 'registered') + register_bbregister_func_to_anat.connect(guardrail, 'registered', + outputspec, 'anat_func') return register_bbregister_func_to_anat -def create_wf_calculate_ants_warp( - name='create_wf_calculate_ants_warp', num_threads=1, reg_ants_skull=1 -): +def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', + num_threads=1, reg_ants_skull=1): ''' Calculates the nonlinear ANTS registration transform. This workflow employs the antsRegistration tool: @@ -1236,99 +1255,63 @@ def create_wf_calculate_ants_warp( select_inverse_warp.inputs.selection = "Inverse" - calc_ants_warp_wf.connect( - inputspec, 'moving_brain', - calculate_ants_warp, 'moving_brain') - - calc_ants_warp_wf.connect( - inputspec, 'reference_brain', - calculate_ants_warp, 'reference_brain') + guardrail = registration_guardrail_node() + calc_ants_warp_wf.connect(inputspec, 'moving_brain', + calculate_ants_warp, 'moving_brain') + calc_ants_warp_wf.connect(inputspec, 'reference_brain', + calculate_ants_warp, 'reference_brain') + calc_ants_warp_wf.connect(inputspec, 'reference_brain', + guardrail, 'reference') if reg_ants_skull == 1: - calculate_ants_warp.inputs.reg_with_skull = 1 - calc_ants_warp_wf.connect( - inputspec, 'moving_skull', - calculate_ants_warp, 'moving_skull') - - calc_ants_warp_wf.connect( - inputspec, 'reference_skull', - calculate_ants_warp, 'reference_skull') - + calc_ants_warp_wf.connect(inputspec, 'moving_skull', + calculate_ants_warp, 'moving_skull') + calc_ants_warp_wf.connect(inputspec, 'reference_skull', + calculate_ants_warp, 'reference_skull') else: - calc_ants_warp_wf.connect( - inputspec, 'moving_brain', - calculate_ants_warp, 'moving_skull') - - calc_ants_warp_wf.connect( - inputspec, 'reference_brain', - calculate_ants_warp, 'reference_skull') - - calc_ants_warp_wf.connect( - inputspec, 'fixed_image_mask', - calculate_ants_warp, 'fixed_image_mask') + calc_ants_warp_wf.connect(inputspec, 'moving_brain', + calculate_ants_warp, 'moving_skull') + calc_ants_warp_wf.connect(inputspec, 'reference_brain', + calculate_ants_warp, 'reference_skull') + calc_ants_warp_wf.connect(inputspec, 'fixed_image_mask', + calculate_ants_warp, 'fixed_image_mask') calc_ants_warp_wf.connect(inputspec, 'reference_mask', - calculate_ants_warp, 'reference_mask') - + calculate_ants_warp, 'reference_mask') calc_ants_warp_wf.connect(inputspec, 'moving_mask', - calculate_ants_warp, 'moving_mask') - + calculate_ants_warp, 'moving_mask') calc_ants_warp_wf.connect(inputspec, 'ants_para', - calculate_ants_warp, 'ants_para') - - calc_ants_warp_wf.connect( - inputspec, 'interp', - calculate_ants_warp, 'interp') - + calculate_ants_warp, 'ants_para') + calc_ants_warp_wf.connect(inputspec, 'interp', + calculate_ants_warp, 'interp') # inter-workflow connections - - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_initial, 'warp_list') - - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_rigid, 'warp_list') - - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_affine, 'warp_list') - - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_forward_warp, 'warp_list') - - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warp_list', - select_inverse_warp, 'warp_list') - + calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + select_forward_initial, 'warp_list') + calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + select_forward_rigid, 'warp_list') + calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + select_forward_affine, 'warp_list') + calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + select_forward_warp, 'warp_list') + calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + select_inverse_warp, 'warp_list') # connections to outputspec - - calc_ants_warp_wf.connect( - select_forward_initial, 'selected_warp', - outputspec, 'ants_initial_xfm') - - calc_ants_warp_wf.connect( - select_forward_rigid, 'selected_warp', - outputspec, 'ants_rigid_xfm') - - calc_ants_warp_wf.connect( - select_forward_affine, 'selected_warp', - outputspec, 'ants_affine_xfm') - - calc_ants_warp_wf.connect( - select_forward_warp, 'selected_warp', - outputspec, 'warp_field') - - calc_ants_warp_wf.connect( - select_inverse_warp, 'selected_warp', - outputspec, 'inverse_warp_field') - - calc_ants_warp_wf.connect( - calculate_ants_warp, 'warped_image', - outputspec, 'normalized_output_brain') + calc_ants_warp_wf.connect(select_forward_initial, 'selected_warp', + outputspec, 'ants_initial_xfm') + calc_ants_warp_wf.connect(select_forward_rigid, 'selected_warp', + outputspec, 'ants_rigid_xfm') + calc_ants_warp_wf.connect(select_forward_affine, 'selected_warp', + outputspec, 'ants_affine_xfm') + calc_ants_warp_wf.connect(select_forward_warp, 'selected_warp', + outputspec, 'warp_field') + calc_ants_warp_wf.connect(select_inverse_warp, 'selected_warp', + outputspec, 'inverse_warp_field') + calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', + guardrail, 'registered') + calc_ants_warp_wf.connect(guardrail, 'registered', + outputspec, 'normalized_output_brain') return calc_ants_warp_wf @@ -1369,10 +1352,8 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, # Input registration parameters wf.connect(inputNode, 'interpolation', flirt_reg_anat_mni, 'inputspec.interp') - wf.connect(inputNode, 'input_brain', flirt_reg_anat_mni, 'inputspec.input_brain') - wf.connect(inputNode, 'reference_brain', flirt_reg_anat_mni, 'inputspec.reference_brain') @@ -1381,7 +1362,6 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, wf.connect(inputNode, 'reference_brain', write_lin_composite_xfm, 'reference') - wf.connect(flirt_reg_anat_mni, 'outputspec.linear_xfm', write_lin_composite_xfm, 'premat') @@ -1391,7 +1371,6 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, wf.connect(inputNode, 'reference_brain', write_invlin_composite_xfm, 'reference') - wf.connect(flirt_reg_anat_mni, 'outputspec.invlinear_xfm', write_invlin_composite_xfm, 'premat') @@ -1406,7 +1385,6 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, write_lin_composite_xfm, 'out_file') } - if opt == 'FSL': if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: @@ -1420,23 +1398,17 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, wf.connect(inputNode, 'input_brain', fnirt_reg_anat_mni, 'inputspec.input_brain') - wf.connect(inputNode, 'reference_brain', fnirt_reg_anat_mni, 'inputspec.reference_brain') - wf.connect(inputNode, 'input_head', fnirt_reg_anat_mni, 'inputspec.input_skull') - # NOTE: crossover from above opt block wf.connect(flirt_reg_anat_mni, 'outputspec.linear_xfm', fnirt_reg_anat_mni, 'inputspec.linear_aff') - wf.connect(inputNode, 'reference_head', fnirt_reg_anat_mni, 'inputspec.reference_skull') - wf.connect(inputNode, 'reference_mask', fnirt_reg_anat_mni, 'inputspec.ref_mask') - # assign the FSL FNIRT config file specified in pipeline config.yml wf.connect(inputNode, 'fnirt_config', fnirt_reg_anat_mni, 'inputspec.fnirt_config') @@ -1525,19 +1497,14 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(inputNode, 'input_brain', ants_reg_anat_mni, 'inputspec.moving_brain') - wf.connect(inputNode, 'reference_brain', ants_reg_anat_mni, 'inputspec.reference_brain') - wf.connect(inputNode, 'input_head', ants_reg_anat_mni, 'inputspec.moving_skull') - wf.connect(inputNode, 'reference_head', ants_reg_anat_mni, 'inputspec.reference_skull') - wf.connect(inputNode, 'input_mask', ants_reg_anat_mni, 'inputspec.moving_mask') - wf.connect(inputNode, 'reference_mask', ants_reg_anat_mni, 'inputspec.reference_mask') @@ -1586,10 +1553,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', collect_transforms, 'in1') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', collect_transforms, 'in2') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', collect_transforms, 'in3') @@ -1603,7 +1568,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", mem_gb=6) wf.connect(collect_transforms, 'out', check_transform, 'transform_list') - wf.connect(check_transform, 'checked_transform_list', write_composite_linear_xfm, 'transforms') @@ -1619,10 +1583,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(inputNode, 'reference_brain', write_composite_invlinear_xfm, 'input_image') - wf.connect(inputNode, 'input_brain', write_composite_invlinear_xfm, 'reference_image') - wf.connect(inputNode, 'interpolation', write_composite_invlinear_xfm, 'interpolation') @@ -1635,10 +1597,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', collect_inv_transforms, 'in1') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', collect_inv_transforms, 'in2') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', collect_inv_transforms, 'in3') @@ -1652,7 +1612,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(collect_inv_transforms, 'out', check_invlinear_transform, 'transform_list') - wf.connect(check_invlinear_transform, 'checked_transform_list', write_composite_invlinear_xfm, 'transforms') @@ -1666,7 +1625,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(check_invlinear_transform, 'checked_transform_list', inverse_transform_flags, 'transform_list') - wf.connect(inverse_transform_flags, 'inverse_transform_flags', write_composite_invlinear_xfm, 'invert_transform_flags') @@ -1680,10 +1638,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", f"from-{orig}_to-{sym}{tmpl}template_mode-image_xfm.nii.gz" wf.connect(inputNode, 'input_brain', write_composite_xfm, 'input_image') - wf.connect(inputNode, 'reference_brain', write_composite_xfm, 'reference_image') - wf.connect(inputNode, 'interpolation', write_composite_xfm, 'interpolation') @@ -1696,13 +1652,10 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(ants_reg_anat_mni, 'outputspec.warp_field', collect_all_transforms, 'in1') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', collect_all_transforms, 'in2') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', collect_all_transforms, 'in3') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', collect_all_transforms, 'in4') @@ -1716,7 +1669,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(collect_all_transforms, 'out', check_all_transform, 'transform_list') - wf.connect(check_all_transform, 'checked_transform_list', write_composite_xfm, 'transforms') @@ -1732,10 +1684,8 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(inputNode, 'reference_brain', write_composite_inv_xfm, 'input_image') - wf.connect(inputNode, 'input_brain', write_composite_inv_xfm, 'reference_image') - wf.connect(inputNode, 'interpolation', write_composite_inv_xfm, 'interpolation') @@ -1748,13 +1698,10 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(ants_reg_anat_mni, 'outputspec.ants_initial_xfm', collect_all_inv_transforms, 'in1') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_rigid_xfm', collect_all_inv_transforms, 'in2') - wf.connect(ants_reg_anat_mni, 'outputspec.ants_affine_xfm', collect_all_inv_transforms, 'in3') - wf.connect(ants_reg_anat_mni, 'outputspec.inverse_warp_field', collect_all_inv_transforms, 'in4') @@ -1768,7 +1715,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(collect_all_inv_transforms, 'out', check_all_inv_transform, 'transform_list') - wf.connect(check_all_inv_transform, 'checked_transform_list', write_composite_inv_xfm, 'transforms') @@ -1782,7 +1728,6 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", wf.connect(check_all_inv_transform, 'checked_transform_list', inverse_all_transform_flags, 'transform_list') - wf.connect(inverse_all_transform_flags, 'inverse_transform_flags', write_composite_inv_xfm, 'invert_transform_flags') @@ -1831,9 +1776,7 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): # convert the .mat from linear Func->Anat to # ANTS format wf.connect(inputNode, 'coreg_xfm', fsl_reg_2_itk, 'transform_file') - wf.connect(inputNode, 'input_brain', fsl_reg_2_itk, 'reference_file') - wf.connect(inputNode, 'mean_bold', fsl_reg_2_itk, 'source_file') itk_imports = ['import os'] @@ -1858,7 +1801,6 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): wf.connect(inputNode, 'mean_bold', write_composite_xfm, 'input_image') - wf.connect(inputNode, 'T1w-brain-template_funcreg', write_composite_xfm, 'reference_image') @@ -1869,20 +1811,18 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): 'registration']['ANTs']['interpolation'] collect_all_transforms = pe.Node(util.Merge(2), - name=f'collect_all_transforms') + name='collect_all_transforms') wf.connect(inputNode, 'T1w_to_template_xfm', collect_all_transforms, 'in1') - wf.connect(change_transform, 'updated_affine_file', collect_all_transforms, 'in2') - wf.connect(collect_all_transforms, 'out', write_composite_xfm, 'transforms') write_composite_inv_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_inv_xfm', + name='write_composite_inv_xfm', mem_gb=1.5) write_composite_inv_xfm.inputs.print_out_composite_warp_file = True write_composite_inv_xfm.inputs.invert_transform_flags = [True, False] @@ -1891,7 +1831,6 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): wf.connect(inputNode, 'T1w-brain-template_funcreg', write_composite_inv_xfm, 'input_image') - wf.connect(inputNode, 'mean_bold', write_composite_inv_xfm, 'reference_image') @@ -1906,10 +1845,8 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): wf.connect(change_transform, 'updated_affine_file', collect_inv_transforms, 'in1') - wf.connect(inputNode, 'template_to_T1w_xfm', collect_inv_transforms, 'in2') - wf.connect(collect_inv_transforms, 'out', write_composite_inv_xfm, 'transforms') @@ -1927,9 +1864,7 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): wf.connect(inputNode, 'T1w-brain-template_funcreg', write_composite_xfm, 'reference') - wf.connect(inputNode, 'coreg_xfm', write_composite_xfm, 'premat') - wf.connect(inputNode, 'T1w_to_template_xfm', write_composite_xfm, 'warp1') @@ -1968,7 +1903,6 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "from-template_to-longitudinal_mode-image_desc-linear_xfm", "from-longitudinal_to-template_mode-image_xfm"]} ''' - fsl, outputs = FSL_registration_connector(f'register_{opt}_anat_to_' f'template_{pipe_num}', cfg, orig='T1w', opt=opt) @@ -2243,7 +2177,6 @@ def register_ANTs_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): "space-longitudinal_desc-reorient_T1w"]) wf.connect(node, out, ants_rc, 'inputspec.input_head') - t1w_template = strat_pool.node_data('T1w-template') wf.connect(t1w_template.node, t1w_template.out, ants_rc, 'inputspec.reference_head') @@ -2416,18 +2349,21 @@ def register_ANTs_EPI_to_template(wf, cfg, strat_pool, pipe_num, opt=None): return (wf, outputs) -def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): +def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, + opt=None): ''' {"name": "overwrite_transform_anat_to_template", "config": "None", "switch": [["registration_workflows", "anatomical_registration", "run"], - ["registration_workflows", "anatomical_registration", "overwrite_transform", "run"]], + ["registration_workflows", "anatomical_registration", + "overwrite_transform", "run"]], "option_key": ["registration_workflows", "anatomical_registration", "overwrite_transform", "using"], "option_val": "FSL", "inputs": [("desc-restore-brain_T1w", ["desc-brain_T1w", "space-longitudinal_desc-brain_T1w"], - ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", "T1w"], + ["desc-restore_T1w", "desc-preproc_T1w", "desc-reorient_T1w", + "T1w"], ["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], "space-T1w_desc-brain_mask", "T1w-template", @@ -2458,17 +2394,27 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # -t ${WD}/xfms/T1w_to_MNI_1Rigid.mat \ # -t ${WD}/xfms/T1w_to_MNI_0DerivedInitialMovingTranslation.mat \ # -o [${WD}/xfms/ANTs_CombinedWarp.nii.gz,1] - ants_apply_warp_t1_to_template = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_T1_to_template_{pipe_num}') + ants_apply_warp_t1_to_template = pe.Node( + interface=ants.ApplyTransforms(), + name=f'ANTS-ABCD_T1_to_template_{pipe_num}') ants_apply_warp_t1_to_template.inputs.dimension = 3 ants_apply_warp_t1_to_template.inputs.print_out_composite_warp_file = True ants_apply_warp_t1_to_template.inputs.output_image = 'ANTs_CombinedWarp.nii.gz' - node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w', 'desc-reorient_T1w', 'T1w']) + guardrail_brain = registration_guardrail_node('guardrail_brain_T1w') + guardrail_head = registration_guardrail_node('guardrail_head_T1w') + guardrail_mask = registration_guardrail_node('guardrail_T1w_mask') + + node, out = strat_pool.get_data(['desc-restore_T1w', + 'desc-preproc_T1w', + 'desc-reorient_T1w', 'T1w']) wf.connect(node, out, ants_apply_warp_t1_to_template, 'input_image') node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_t1_to_template, 'reference_image') + wf.connect(node, out, + ants_apply_warp_t1_to_template, 'reference_image') + for guardrail in (guardrail_brain, guardrail_head, guardrail_mask): + wf.connect(node, out, guardrail, 'reference') node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, ants_apply_warp_t1_to_template, 'transforms') @@ -2481,118 +2427,119 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # -o [${WD}/xfms/ANTs_CombinedInvWarp.nii.gz,1] # T1wImage is ACPC aligned head - ants_apply_warp_template_to_t1 = pe.Node(interface=ants.ApplyTransforms(), - name=f'ANTS-ABCD_template_to_T1_{pipe_num}') + ants_apply_warp_template_to_t1 = pe.Node( + interface=ants.ApplyTransforms(), + name=f'ANTS-ABCD_template_to_T1_{pipe_num}') ants_apply_warp_template_to_t1.inputs.dimension = 3 ants_apply_warp_template_to_t1.inputs.print_out_composite_warp_file = True ants_apply_warp_template_to_t1.inputs.output_image = 'ANTs_CombinedInvWarp.nii.gz' - node, out = strat_pool.get_data(['desc-preproc_T1w', 'desc-reorient_T1w', 'T1w']) + node, out = strat_pool.get_data(['desc-preproc_T1w', + 'desc-reorient_T1w', 'T1w']) wf.connect(node, out, ants_apply_warp_template_to_t1, 'input_image') node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, ants_apply_warp_template_to_t1, 'reference_image') + wf.connect(node, out, + ants_apply_warp_template_to_t1, 'reference_image') node, out = strat_pool.get_data('from-template_to-T1w_mode-image_xfm') wf.connect(node, out, ants_apply_warp_template_to_t1, 'transforms') # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple - split_combined_warp = pe.Node(util.Function(input_names=['input', - 'output_name'], - output_names=['output1', - 'output2', - 'output3'], - function=run_c4d), - name=f'split_combined_warp_{pipe_num}') + split_combined_warp = pe.Node(util.Function( + input_names=['input', 'output_name'], + output_names=['output1', 'output2', 'output3'], + function=run_c4d), name=f'split_combined_warp_{pipe_num}') split_combined_warp.inputs.output_name = 'e' wf.connect(ants_apply_warp_t1_to_template, 'output_image', - split_combined_warp, 'input') + split_combined_warp, 'input') # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - split_combined_inv_warp = pe.Node(util.Function(input_names=['input', - 'output_name'], - output_names=['output1', - 'output2', - 'output3'], - function=run_c4d), - name=f'split_combined_inv_warp_{pipe_num}') + split_combined_inv_warp = pe.Node(util.Function( + input_names=['input', 'output_name'], + output_names=['output1', 'output2', 'output3'], + function=run_c4d), name=f'split_combined_inv_warp_{pipe_num}') split_combined_inv_warp.inputs.output_name = 'einv' wf.connect(ants_apply_warp_template_to_t1, 'output_image', - split_combined_inv_warp, 'input') + split_combined_inv_warp, 'input') # fslmaths ${WD}/xfms/e2.nii.gz -mul -1 ${WD}/xfms/e-2.nii.gz change_e2_sign = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'change_e2_sign_{pipe_num}') + name=f'change_e2_sign_{pipe_num}') change_e2_sign.inputs.args = '-mul -1' - wf.connect(split_combined_warp, 'output2', - change_e2_sign, 'in_file') + wf.connect(split_combined_warp, 'output2', change_e2_sign, 'in_file') # fslmaths ${WD}/xfms/e2inv.nii.gz -mul -1 ${WD}/xfms/e-2inv.nii.gz change_e2inv_sign = pe.Node(interface=fsl.maths.MathsCommand(), - name=f'change_e2inv_sign_{pipe_num}') + name=f'change_e2inv_sign_{pipe_num}') change_e2inv_sign.inputs.args = '-mul -1' wf.connect(split_combined_inv_warp, 'output2', - change_e2inv_sign, 'in_file') + change_e2inv_sign, 'in_file') # fslmerge -t ${OutputTransform} ${WD}/xfms/e1.nii.gz ${WD}/xfms/e-2.nii.gz ${WD}/xfms/e3.nii.gz - merge_xfms_to_list = pe.Node(util.Merge(3), - name=f'merge_t1_to_template_xfms_to_list_{pipe_num}') + merge_xfms_to_list = pe.Node( + util.Merge(3), + name=f'merge_t1_to_template_xfms_to_list_{pipe_num}') wf.connect(split_combined_warp, 'output1', - merge_xfms_to_list, 'in1') + merge_xfms_to_list, 'in1') wf.connect(change_e2_sign, 'out_file', - merge_xfms_to_list, 'in2') + merge_xfms_to_list, 'in2') wf.connect(split_combined_warp, 'output3', - merge_xfms_to_list, 'in3') + merge_xfms_to_list, 'in3') merge_xfms = pe.Node(interface=fslMerge(), name=f'merge_t1_to_template_xfms_{pipe_num}') merge_xfms.inputs.dimension = 't' wf.connect(merge_xfms_to_list, 'out', - merge_xfms, 'in_files') + merge_xfms, 'in_files') # fslmerge -t ${OutputInvTransform} ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e-2inv.nii.gz ${WD}/xfms/e3inv.nii.gz - merge_inv_xfms_to_list = pe.Node(util.Merge(3), - name=f'merge_template_to_t1_xfms_to_list_{pipe_num}') + merge_inv_xfms_to_list = pe.Node( + util.Merge(3), + name=f'merge_template_to_t1_xfms_to_list_{pipe_num}') wf.connect(split_combined_inv_warp, 'output1', - merge_inv_xfms_to_list, 'in1') + merge_inv_xfms_to_list, 'in1') wf.connect(change_e2inv_sign, 'out_file', - merge_inv_xfms_to_list, 'in2') + merge_inv_xfms_to_list, 'in2') wf.connect(split_combined_inv_warp, 'output3', - merge_inv_xfms_to_list, 'in3') + merge_inv_xfms_to_list, 'in3') merge_inv_xfms = pe.Node(interface=fslMerge(), name=f'merge_template_to_t1_xfms_{pipe_num}') merge_inv_xfms.inputs.dimension = 't' wf.connect(merge_inv_xfms_to_list, 'out', - merge_inv_xfms, 'in_files') + merge_inv_xfms, 'in_files') # applywarp --rel --interp=spline -i ${T1wRestore} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestore} fsl_apply_warp_t1_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_to_template_{pipe_num}') + name='FSL-ABCD_T1_to_' + f'template_{pipe_num}') fsl_apply_warp_t1_to_template.inputs.relwarp = True fsl_apply_warp_t1_to_template.inputs.interp = 'spline' - node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w', 'desc-reorient_T1w', 'T1w']) + node, out = strat_pool.get_data(['desc-restore_T1w', + 'desc-preproc_T1w', + 'desc-reorient_T1w', 'T1w']) wf.connect(node, out, fsl_apply_warp_t1_to_template, 'in_file') node, out = strat_pool.get_data('T1w-template') wf.connect(node, out, fsl_apply_warp_t1_to_template, 'ref_file') - wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_to_template, 'field_file') + fsl_apply_warp_t1_to_template, 'field_file') # applywarp --rel --interp=nn -i ${T1wRestoreBrain} -r ${Reference} -w ${OutputTransform} -o ${OutputT1wImageRestoreBrain} - fsl_apply_warp_t1_brain_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_brain_to_template_{pipe_num}') + fsl_apply_warp_t1_brain_to_template = pe.Node( + interface=fsl.ApplyWarp(), + name=f'FSL-ABCD_T1_brain_to_template_{pipe_num}') fsl_apply_warp_t1_brain_to_template.inputs.relwarp = True fsl_apply_warp_t1_brain_to_template.inputs.interp = 'nn' @@ -2604,38 +2551,45 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None wf.connect(node, out, fsl_apply_warp_t1_brain_to_template, 'ref_file') wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_brain_to_template, 'field_file') + fsl_apply_warp_t1_brain_to_template, 'field_file') - fsl_apply_warp_t1_brain_mask_to_template = pe.Node(interface=fsl.ApplyWarp(), - name=f'FSL-ABCD_T1_brain_mask_to_template_{pipe_num}') + fsl_apply_warp_t1_brain_mask_to_template = pe.Node( + interface=fsl.ApplyWarp(), + name=f'FSL-ABCD_T1_brain_mask_to_template_{pipe_num}') fsl_apply_warp_t1_brain_mask_to_template.inputs.relwarp = True fsl_apply_warp_t1_brain_mask_to_template.inputs.interp = 'nn' node, out = strat_pool.get_data('space-T1w_desc-brain_mask') - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'in_file') + wf.connect(node, out, + fsl_apply_warp_t1_brain_mask_to_template, 'in_file') node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, fsl_apply_warp_t1_brain_mask_to_template, 'ref_file') - + wf.connect(node, out, + fsl_apply_warp_t1_brain_mask_to_template, 'ref_file') wf.connect(merge_xfms, 'merged_file', - fsl_apply_warp_t1_brain_mask_to_template, 'field_file') + fsl_apply_warp_t1_brain_mask_to_template, 'field_file') # fslmaths ${OutputT1wImageRestore} -mas ${OutputT1wImageRestoreBrain} ${OutputT1wImageRestoreBrain} apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), - name=f'get_t1_brain_{pipe_num}') + name=f'get_t1_brain_{pipe_num}') wf.connect(fsl_apply_warp_t1_to_template, 'out_file', - apply_mask, 'in_file') - + apply_mask, 'in_file') wf.connect(fsl_apply_warp_t1_brain_to_template, 'out_file', - apply_mask, 'mask_file') + apply_mask, 'mask_file') + wf.connect(apply_mask, 'out_file', guardrail_brain, 'registered') + wf.connect(fsl_apply_warp_t1_to_template, 'out_file', + guardrail_head, 'registered') + wf.connect(fsl_apply_warp_t1_brain_mask_to_template, 'out_file', + guardrail_mask, 'registered') outputs = { - 'space-template_desc-brain_T1w': (apply_mask, 'out_file'), - 'space-template_desc-head_T1w': (fsl_apply_warp_t1_to_template, 'out_file'), - 'space-template_desc-T1w_mask': (fsl_apply_warp_t1_brain_mask_to_template, 'out_file'), + 'space-template_desc-brain_T1w': (guardrail_brain, 'registered'), + 'space-template_desc-head_T1w': (guardrail_head, 'registered'), + 'space-template_desc-T1w_mask': (guardrail_mask, 'registered'), 'from-T1w_to-template_mode-image_xfm': (merge_xfms, 'merged_file'), - 'from-template_to-T1w_mode-image_xfm': (merge_inv_xfms, 'merged_file') + 'from-template_to-T1w_mode-image_xfm': (merge_inv_xfms, + 'merged_file') } return (wf, outputs) @@ -3420,8 +3374,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L131 # ${FSLDIR}/bin/flirt -interp spline -in ${T1wImage} -ref ${T1wImage} -applyisoxfm $FinalfMRIResolution -out ${WD}/${T1wImageFile}.${FinalfMRIResolution} anat_resample = pe.Node(interface=fsl.FLIRT(), - name=f'anat_resample_func_res_{pipe_num}' - ) + name=f'anat_resample_func_res_{pipe_num}') anat_resample.inputs.apply_isoxfm = float(cfg.registration_workflows['functional_registration']['func_registration_to_template']['output_resolution']['func_preproc_outputs'].replace("mm", "")) anat_resample.inputs.interp = 'spline' @@ -3429,9 +3382,12 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(node, out, anat_resample, 'in_file') wf.connect(node, out, anat_resample, 'reference') + guardrail_brain = registration_guardrail_node('guardrail-brain_bold') + guardrail_mask = registration_guardrail_node('guardrail-bold_mask') + # ${FSLDIR}/bin/applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} applywarp_anat_res = pe.Node(interface=fsl.ApplyWarp(), - name=f'anat_func_res_{pipe_num}') + name=f'anat_func_res_{pipe_num}') applywarp_anat_res.inputs.relwarp = True applywarp_anat_res.inputs.interp = 'spline' @@ -3452,7 +3408,9 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No node, out = strat_pool.get_data('space-template_desc-T1w_mask') wf.connect(node, out, applywarp_anat_mask_res, 'in_file') - wf.connect(applywarp_anat_res, 'out_file', applywarp_anat_mask_res, 'ref_file') + wf.connect(applywarp_anat_res, 'out_file', + applywarp_anat_mask_res, 'ref_file') + wf.connect(applywarp_anat_res, 'out_file', guardrail_mask, 'reference') # ${FSLDIR}/bin/fslmaths ${WD}/${T1wImageFile}.${FinalfMRIResolution} -mas ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz T1_brain_res = pe.Node(interface=fsl.MultiImageMaths(), @@ -3494,8 +3452,8 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No node, out = strat_pool.get_data('from-T1w_to-template_mode-image_warp') wf.connect(node, out, convert_func_to_standard_warp, 'warp2') - - wf.connect(applywarp_anat_res, 'out_file', convert_func_to_standard_warp, 'reference') + wf.connect(applywarp_anat_res, 'out_file', + convert_func_to_standard_warp, 'reference') # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L157-L158 # fslroi "$fMRIFolder"/"$NameOffMRI"_gdc "$fMRIFolder"/"$NameOffMRI"_gdc_warp 0 3 @@ -3515,7 +3473,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No multiply_func_roi_by_zero.inputs.args = '-mul 0' wf.connect(extract_func_roi, 'roi_file', - multiply_func_roi_by_zero, 'in_file') + multiply_func_roi_by_zero, 'in_file') # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/OneStepResampling.sh#L173 # fslsplit ${InputfMRI} ${WD}/prevols/vol -t @@ -3537,10 +3495,9 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No convert_motion_distortion_warp.inputs.relwarp = True wf.connect(multiply_func_roi_by_zero, 'out_file', - convert_motion_distortion_warp, 'warp1') - + convert_motion_distortion_warp, 'warp1') wf.connect(split_func, 'out_files', - convert_motion_distortion_warp, 'reference') + convert_motion_distortion_warp, 'reference') node, out = strat_pool.get_data('coordinate-transformation') wf.connect(node, out, convert_motion_distortion_warp, 'postmat') @@ -3553,13 +3510,12 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No convert_registration_warp.inputs.out_relwarp = True convert_registration_warp.inputs.relwarp = True - wf.connect(applywarp_anat_res, 'out_file', convert_registration_warp, 'reference') - + wf.connect(applywarp_anat_res, 'out_file', + convert_registration_warp, 'reference') wf.connect(convert_motion_distortion_warp, 'out_file', - convert_registration_warp, 'warp1') - + convert_registration_warp, 'warp1') wf.connect(convert_func_to_standard_warp, 'out_file', - convert_registration_warp, 'warp2') + convert_registration_warp, 'warp2') # fslmaths ${WD}/prevols/vol${vnum}.nii.gz -mul 0 -add 1 ${WD}/prevols/vol${vnum}_mask.nii.gz generate_vol_mask = pe.MapNode(interface=fsl.maths.MathsCommand(), @@ -3568,8 +3524,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No generate_vol_mask.inputs.args = '-mul 0 -add 1' - wf.connect(split_func, 'out_files', - generate_vol_mask, 'in_file') + wf.connect(split_func, 'out_files', generate_vol_mask, 'in_file') # applywarp --rel --interp=spline --in=${WD}/prevols/vol${vnum}.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}.nii.gz applywarp_func_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3579,14 +3534,12 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No applywarp_func_to_standard.inputs.relwarp = True applywarp_func_to_standard.inputs.interp = 'spline' - wf.connect(split_func, 'out_files', - applywarp_func_to_standard, 'in_file') - + wf.connect(split_func, 'out_files', applywarp_func_to_standard, 'in_file') wf.connect(convert_registration_warp, 'out_file', - applywarp_func_to_standard, 'field_file') - + applywarp_func_to_standard, 'field_file') wf.connect(applywarp_anat_res, 'out_file', - applywarp_func_to_standard, 'ref_file') + applywarp_func_to_standard, 'ref_file') + wf.connect(applywarp_anat_res, 'out_file', guardrail_brain, 'reference') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3597,13 +3550,11 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No applywarp_func_mask_to_standard.inputs.interp = 'nn' wf.connect(generate_vol_mask, 'out_file', - applywarp_func_mask_to_standard, 'in_file') - + applywarp_func_mask_to_standard, 'in_file') wf.connect(convert_registration_warp, 'out_file', - applywarp_func_mask_to_standard, 'field_file') - + applywarp_func_mask_to_standard, 'field_file') wf.connect(applywarp_anat_res, 'out_file', - applywarp_func_mask_to_standard, 'ref_file') + applywarp_func_mask_to_standard, 'ref_file') ### Loop ends! ### @@ -3614,7 +3565,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No merge_func_to_standard.inputs.dimension = 't' wf.connect(applywarp_func_to_standard, 'out_file', - merge_func_to_standard, 'in_files') + merge_func_to_standard, 'in_files') # fslmerge -tr ${OutputfMRI}_mask $FrameMergeSTRINGII $TR_vol merge_func_mask_to_standard = pe.Node(interface=fslMerge(), @@ -3624,7 +3575,7 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No merge_func_mask_to_standard.inputs.dimension = 't' wf.connect(applywarp_func_mask_to_standard, 'out_file', - merge_func_mask_to_standard, 'in_files') + merge_func_mask_to_standard, 'in_files') # fslmaths ${OutputfMRI}_mask -Tmin ${OutputfMRI}_mask find_min_mask = pe.Node(interface=fsl.maths.MathsCommand(), @@ -3633,44 +3584,41 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No find_min_mask.inputs.args = '-Tmin' wf.connect(merge_func_mask_to_standard, 'merged_file', - find_min_mask, 'in_file') + find_min_mask, 'in_file') # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/scripts/IntensityNormalization.sh#L113-L119 # fslmaths ${InputfMRI} -div ${BiasField} $jacobiancom -mas ${BrainMask} -mas ${InputfMRI}_mask -ing 10000 ${OutputfMRI} -odt float merge_func_mask = pe.Node(util.Merge(3), - name=f'merge_operand_files_{pipe_num}') + name=f'merge_operand_files_{pipe_num}') wf.connect(biasfield_thr, 'out_file', merge_func_mask, 'in1') - wf.connect(applywarp_anat_mask_res, 'out_file', merge_func_mask, 'in2') - wf.connect(find_min_mask, 'out_file', merge_func_mask, 'in3') - extract_func_brain = pe.Node(interface=fsl.MultiImageMaths(), - name=f'extract_func_brain_{pipe_num}') + name=f'extract_func_brain_{pipe_num}') extract_func_brain.inputs.op_string = '-div %s -mas %s -mas %s -ing 10000' extract_func_brain.inputs.output_datatype = 'float' wf.connect(merge_func_to_standard, 'merged_file', - extract_func_brain, 'in_file') - + extract_func_brain, 'in_file') wf.connect(merge_func_mask, 'out', - extract_func_brain, 'operand_files') + extract_func_brain, 'operand_files') func_mask_final = pe.Node(interface=fsl.MultiImageMaths(), - name=f'func_mask_final_{pipe_num}') + name=f'func_mask_final_{pipe_num}') func_mask_final.inputs.op_string = "-mas %s " wf.connect(applywarp_anat_mask_res, 'out_file', func_mask_final, 'in_file') - wf.connect(find_min_mask, 'out_file', func_mask_final, 'operand_files') + wf.connect(extract_func_brain, 'out_file', guardrail_brain, 'registered') + wf.connect(func_mask_final, 'out_file', guardrail_mask, 'registered') outputs = { - 'space-template_desc-brain_bold': (extract_func_brain, 'out_file'), - 'space-template_desc-bold_mask': (func_mask_final, 'out_file') + 'space-template_desc-brain_bold': (guardrail_brain, 'registered'), + 'space-template_desc-bold_mask': (guardrail_mask, 'registered') } return (wf, outputs) @@ -3744,27 +3692,27 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, output_names=['itk_transform'], function=run_c3d), name=f'convert_bbr2itk_{pipe_num}') - + guardrail_preproc = registration_guardrail_node() if cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'whole-head': node, out = strat_pool.get_data('T1w') - wf.connect(node, out, bbr2itk, 'reference_file') - elif cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'brain': node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, bbr2itk, 'reference_file') + wf.connect(node, out, bbr2itk, 'reference_file') + wf.connect(node, out, guardrail_preproc, 'reference') node, out = strat_pool.get_data(['desc-reginput_bold', 'desc-mean_bold']) wf.connect(node, out, bbr2itk, 'source_file') - node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') + node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_' + 'xfm') wf.connect(node, out, bbr2itk, 'transform_file') split_func = pe.Node(interface=fsl.Split(), - name=f'split_func_{pipe_num}') + name=f'split_func_{pipe_num}') split_func.inputs.dimension = 't' @@ -3807,9 +3755,7 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, collectxfm, 'in1') - - wf.connect(bbr2itk, 'itk_transform', - collectxfm, 'in2') + wf.connect(bbr2itk, 'itk_transform', collectxfm, 'in2') collectxfm.inputs.in3 = 'identity' @@ -3822,15 +3768,15 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, applyxfm_func_to_standard.inputs.float = True applyxfm_func_to_standard.inputs.interpolation = 'LanczosWindowedSinc' + guardrail_brain = registration_guardrail_node() wf.connect(split_func, 'out_files', applyxfm_func_to_standard, 'input_image') node, out = strat_pool.get_data('T1w-brain-template-funcreg') wf.connect(node, out, applyxfm_func_to_standard, 'reference_image') - - wf.connect(collectxfm, 'out', - applyxfm_func_to_standard, 'transforms') + wf.connect(node, out, guardrail_brain, 'reference') + wf.connect(collectxfm, 'out', applyxfm_func_to_standard, 'transforms') ### Loop ends! ### @@ -3840,10 +3786,11 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, merge_func_to_standard.inputs.dimension = 't' wf.connect(applyxfm_func_to_standard, 'output_image', - merge_func_to_standard, 'in_files') + merge_func_to_standard, 'in_files') applyxfm_func_mask_to_standard = pe.Node(interface=ants.ApplyTransforms(), - name=f'applyxfm_func_mask_to_standard_{pipe_num}') + name='applyxfm_func_mask_to_' + f'standard_{pipe_num}') applyxfm_func_mask_to_standard.inputs.interpolation = 'MultiLabel' @@ -3858,28 +3805,26 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, collectxfm_mask, 'in1') - - wf.connect(bbr2itk, 'itk_transform', - collectxfm_mask, 'in2') - + wf.connect(bbr2itk, 'itk_transform', collectxfm_mask, 'in2') wf.connect(collectxfm_mask, 'out', - applyxfm_func_mask_to_standard, 'transforms') + applyxfm_func_mask_to_standard, 'transforms') apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f'get_func_brain_to_standard_{pipe_num}') wf.connect(merge_func_to_standard, 'merged_file', - apply_mask, 'in_file') - + apply_mask, 'in_file') wf.connect(applyxfm_func_mask_to_standard, 'output_image', - apply_mask, 'mask_file') + apply_mask, 'mask_file') + wf.connect(merge_func_to_standard, 'merged_file', + guardrail_preproc, 'registered') + wf.connect(apply_mask, 'out_file', guardrail_brain, 'registered') outputs = { - 'space-template_desc-preproc_bold': (merge_func_to_standard, - 'merged_file'), - 'space-template_desc-brain_bold': (apply_mask, 'out_file'), + 'space-template_desc-preproc_bold': (guardrail_preproc, 'registered'), + 'space-template_desc-brain_bold': (guardrail_brain, 'registered'), 'space-template_desc-bold_mask': (applyxfm_func_mask_to_standard, - 'output_image'), + 'output_image'), } return (wf, outputs) From c12e7c4a48504480325b72c6920b099e91d1dd55 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 4 Oct 2022 10:12:36 -0400 Subject: [PATCH 08/72] :recycle: Convert random.log to random.tsv --- CPAC/pipeline/nipype_pipeline_engine/engine.py | 7 +++---- CPAC/pipeline/random_state/seed.py | 5 +++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 12e8808f1f..8efa4324c2 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -399,10 +399,9 @@ def run(self, updatehash=False): if self.seed is not None: self._apply_random_seed() if self.seed_applied: - random_state_logger.info('%s', - '%s # (Atropos constant)' % - self.name if 'atropos' in - self.name else self.name) + random_state_logger.info('%s\t%s', '# (Atropos constant)' if + 'atropos' in self.name else + str(self.seed), self.name) return super().run(updatehash) diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index 370a9427dd..7bd74c3da5 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -186,5 +186,6 @@ def set_up_random_state_logger(log_dir): ---------- log_dir : str ''' - set_up_logger('random', level='info', log_dir=log_dir) - getLogger('random').info('seed: %s', random_seed()) + set_up_logger('random', filename='random.tsv', level='info', + log_dir=log_dir) + getLogger('random').info('seed\tnode') From b7fabf4703305f3194c48c6d4cd8915be84ee1fe Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 4 Oct 2022 14:04:00 -0400 Subject: [PATCH 09/72] :sparkles: Afford retrying registration with a different random seed --- CPAC/pipeline/nipype_pipeline_engine/utils.py | 28 +++ CPAC/registration/guardrails.py | 165 ++++++++++++++++-- 2 files changed, 179 insertions(+), 14 deletions(-) create mode 100644 CPAC/pipeline/nipype_pipeline_engine/utils.py diff --git a/CPAC/pipeline/nipype_pipeline_engine/utils.py b/CPAC/pipeline/nipype_pipeline_engine/utils.py new file mode 100644 index 0000000000..3542b4f70c --- /dev/null +++ b/CPAC/pipeline/nipype_pipeline_engine/utils.py @@ -0,0 +1,28 @@ +# Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Custom nipype utilities""" + + +def connect_from_spec(wf, spec, original_spec, exclude=None): + """Function to connect all original inputs to a new spec""" + for _item, _value in original_spec.items(): + if isinstance(exclude, (list, tuple)): + if _item not in exclude: + setattr(spec.inputs, _item, _value) + elif _item != exclude: + setattr(spec.inputs, _item, _value) + return wf diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 6d436a1f73..5bfdddae47 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,11 +15,19 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" +from copy import deepcopy +from nipype.interfaces.fsl import FLIRT from nipype.interfaces.utility import Function -from CPAC.pipeline.nipype_pipeline_engine import Node +from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow +from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec from CPAC.qc import qc_masks, REGISTRATION_GUARDRAIL_THRESHOLDS +_SPEC_KEYS = { + FLIRT: {'reference': 'reference', 'registered': 'out_file'} +} + + class BadRegistrationError(ValueError): """Exception for when a QC measure for a registration falls below a specified threshold""" @@ -44,17 +52,8 @@ def __init__(self, *args, metric=None, value=None, threshold=None, super().__init__(msg, *args, **kwargs) -def registration_guardrail_node(name=None, ): - """Convenience method to get a new registration_guardrail Node""" - if name is None: - name = 'registration_guardrail' - return Node(Function(input_names=['registered_mask', - 'reference_mask'], - output_names=['registered_mask'], - function=registration_guardrail), name=name) - - -def registration_guardrail(registered: str, reference: str) -> str: +def registration_guardrail(registered: str, reference: str, retry: bool = False + ) -> str: """Check QC metrics post-registration and throw an exception if metrics are below given thresholds. @@ -71,6 +70,9 @@ def registration_guardrail(registered: str, reference: str) -> str: registered, reference : str path to mask + retry : bool + can retry? + Returns ------- registered_mask : str @@ -81,6 +83,141 @@ def registration_guardrail(registered: str, reference: str) -> str: if threshold is not None: value = qc_metrics.get(metric) if value < threshold: - raise BadRegistrationError(metric=metric, value=value, - threshold=threshold) + with open(f'{registered}.failed_qc', 'w', + encoding='utf-8') as _f: + _f.write(True) + if retry: + registered = f'{registered}-failed' + else: + raise BadRegistrationError(metric=metric, value=value, + threshold=threshold) return registered + + +def registration_guardrail_node(name=None): + """Convenience method to get a new registration_guardrail Node + + Parameters + ---------- + name : str, optional + + Returns + ------- + Node + """ + if name is None: + name = 'registration_guardrail' + return Node(Function(input_names=['registered', + 'reference'], + output_names=['registered'], + function=registration_guardrail), name=name) + + +def registration_guardrail_workflow(name, registration_node, retry=False): + """A workflow to handle hitting a registration guardrail + + Parameters + ---------- + name : str + + registration_node : Node + + retry : bool, optional + + Returns + ------- + Workflow + """ + if name is None: + name = 'registration_guardrail_wf' + wf = Workflow(name=name) + inputspec = deepcopy(registration_node.inputs) + outputspec = deepcopy(registration_node.outputs) + guardrail = registration_guardrail_node(f'{name}_guardrail') + outkey = spec_key(registration_node, 'registered') + wf.connect([ + (inputspec, guardrail, [ + (spec_key(registration_node, 'reference'), 'reference')]), + (registration_node, guardrail, [(outkey, 'registered')])]) + if retry: + wf = retry_registration(wf, registration_node, + guardrail.outputs.registered) + else: + wf.connect(guardrail, 'registered', outputspec, outkey) + wf = connect_from_spec(wf, outputspec, registration_node, outkey) + return wf + + +def retry_registration(wf, registration_node, registered): + """Function conditionally retry registration if previous attempt failed + + Parameters + ---------- + wf : Workflow + + registration_node : Node + + registered : str + + Returns + ------- + Workflow + """ + name = f'retry_{registration_node.name}' + retry_node = Node(Function(function=retry_registration_node, + inputs=['registered', 'registration_node'], + outputs=['registered']), name=name) + retry_node.inputs.registration_node = registration_node + inputspec = deepcopy(registration_node.inputs) + outputspec = deepcopy(registration_node.outputs) + outkey = spec_key(registration_node, 'registered') + guardrail = registration_guardrail_node(f'{name}_guardrail') + wf = connect_from_spec(wf, inputspec, retry_node) + wf.connect([ + (inputspec, guardrail, [ + (spec_key(retry_node, 'reference'), 'reference')]), + (retry_node, guardrail, [(outkey, 'registered')]), + (guardrail, outputspec, [('registered', outkey)])]) + wf = connect_from_spec(wf, retry_node, outputspec, registered) + return wf + + +def retry_registration_node(registered, registration_node): + """Retry registration if previous attempt failed + + Parameters + ---------- + registered : str + + registration_node : Node + + Returns + ------- + Node + """ + from CPAC.pipeline.random_state.seed import random_seed + seed = random_seed() + if registered.endswith('-failed') and isinstance(seed, int): + retry_node = registration_node.clone( + name=f'{registration_node.name}-retry') + retry_node.seed = seed + 1 + return retry_node + return registration_node + + +def spec_key(interface, guardrail_key): + """Function to get the canonical key to connect to a guardrail + + Parameters + ---------- + interface : Interface or Node + + guardrail_key : str + + Returns + ------- + str + """ + if isinstance(interface, Node): + interface = interface.interface + return _SPEC_KEYS.get(interface, {}).get(guardrail_key, guardrail_key) From d3bdfa89e5673dd6cc7e07f14e2f8a1e61fcb655 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 4 Oct 2022 15:42:52 -0400 Subject: [PATCH 10/72] :goal_net: Add registration guardrails (with retries) to anatomical preproc --- CPAC/anat_preproc/anat_preproc.py | 143 ++++++++++++++++++------------ CPAC/registration/guardrails.py | 21 +++-- 2 files changed, 97 insertions(+), 67 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index eac127c152..293897c2bf 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -15,6 +15,7 @@ wb_command, \ fslmaths_command, \ VolumeRemoveIslands +from CPAC.registration.guardrails import registration_guardrail_workflow from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.interfaces.function.seg_preproc import \ pick_tissue_from_labels_file_interface @@ -792,6 +793,8 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): native_brain_to_template_brain.inputs.interp = 'sinc' wf.connect(unet_masked_brain, 'out_file', native_brain_to_template_brain, 'in_file') + guardrail_native_brain_to_template_brain = registration_guardrail_workflow( + native_brain_to_template_brain) node, out = strat_pool.get_data('T1w-brain-template') wf.connect(node, out, native_brain_to_template_brain, 'reference') @@ -801,6 +804,8 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): name=f'native_head_to_template_' f'head_{pipe_num}') native_head_to_template_head.inputs.apply_xfm = True + guardrail_native_head_to_template_head = registration_guardrail_workflow( + native_head_to_template_head) if strat_pool.check_rpool('desc-preproc_T1w') or \ strat_pool.check_rpool('desc-reorient_T1w') or \ @@ -811,10 +816,12 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): elif strat_pool.check_rpool('desc-preproc_T2w') or \ strat_pool.check_rpool('desc-reorient_T2w') or \ strat_pool.check_rpool('T2w'): - node, out = strat_pool.get_data(['desc-preproc_T2w', 'desc-reorient_T2w','T2w']) + node, out = strat_pool.get_data([ + 'desc-preproc_T2w', 'desc-reorient_T2w', 'T2w']) wf.connect(node, out, native_head_to_template_head, 'in_file') - wf.connect(native_brain_to_template_brain, 'out_matrix_file', + wf.connect(guardrail_native_brain_to_template_brain, + 'outputspec.out_matrix_file', native_head_to_template_head, 'in_matrix_file') node, out = strat_pool.get_data('T1w-template') @@ -830,7 +837,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # ANTS 3 -m CC[head_rot2atl.nii.gz,NMT_0.5mm.nii.gz,1,5] -t SyN[0.25] -r Gauss[3,0] -o atl2T1rot -i 60x50x20 --use-Histogram-Matching --number-of-affine-iterations 10000x10000x10000x10000x10000 --MI-option 32x16000 ants_template_head_to_template = pe.Node(interface=ants.Registration(), - name=f'template_head_to_' + name='template_head_to_' f'template_{pipe_num}') ants_template_head_to_template.inputs.metric = ['CC'] ants_template_head_to_template.inputs.metric_weight = [1, 5] @@ -842,7 +849,10 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): ants_template_head_to_template.inputs.smoothing_sigmas = [[0.6, 0.2, 0.0]] ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]] ants_template_head_to_template.inputs.convergence_threshold = [1.e-8] - wf.connect(native_head_to_template_head, 'out_file', + guardrail_ants_template_head_to_template = registration_guardrail_workflow( + ants_template_head_to_template, retry=True) + + wf.connect(guardrail_native_head_to_template_head, 'outputspec.out_file', ants_template_head_to_template, 'fixed_image') node, out = strat_pool.get_data('T1w-brain-template') @@ -853,26 +863,33 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): interface=ants.ApplyTransforms(), name=f'template_head_transform_to_template_{pipe_num}') template_head_transform_to_template.inputs.dimension = 3 + guardrail_template_head_transform_to_template = ( + registration_guardrail_workflow(template_head_transform_to_template)) wf.connect(template_brain_mask, 'out_file', template_head_transform_to_template, 'input_image') - wf.connect(native_brain_to_template_brain, 'out_file', + wf.connect(guardrail_native_brain_to_template_brain, 'outputspec.out_file', template_head_transform_to_template, 'reference_image') - wf.connect(ants_template_head_to_template, 'forward_transforms', + wf.connect(guardrail_ants_template_head_to_template, + 'outputspec.forward_transforms', template_head_transform_to_template, 'transforms') # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat  invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') invt.inputs.invert_xfm = True - wf.connect(native_brain_to_template_brain, 'out_matrix_file', invt, - 'in_file') + wf.connect(guardrail_native_brain_to_template_brain, + 'outputspec.out_matrix_file', + invt, 'in_file') # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat template_brain_to_native_brain = pe.Node(interface=fsl.FLIRT(), name=f'template_brain_to_native_' f'brain_{pipe_num}') template_brain_to_native_brain.inputs.apply_xfm = True - wf.connect(template_head_transform_to_template, 'output_image', + guardrail_template_brain_to_native_brain = registration_guardrail_workflow( + template_brain_to_native_brain) + wf.connect(guardrail_template_head_transform_to_template, + 'outputspec.output_image', template_brain_to_native_brain, 'in_file') wf.connect(unet_masked_brain, 'out_file', template_brain_to_native_brain, 'reference') @@ -884,8 +901,9 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): f'_{pipe_num}') refined_mask.inputs.thresh = 0.5 refined_mask.inputs.args = '-bin' - wf.connect(template_brain_to_native_brain, 'out_file', refined_mask, - 'in_file') + wf.connect(guardrail_template_brain_to_native_brain, + 'outputspec.out_file', + refined_mask, 'in_file') outputs = { 'space-T1w_desc-brain_mask': (refined_mask, 'out_file') @@ -896,9 +914,8 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # register FS brain mask to native space - fs_brain_mask_to_native = pe.Node( - interface=freesurfer.ApplyVolTransform(), - name='fs_brain_mask_to_native') + fs_brain_mask_to_native = pe.Node(interface=freesurfer.ApplyVolTransform(), + name='fs_brain_mask_to_native') fs_brain_mask_to_native.inputs.reg_header = True node, out = strat_pool.get_data('space-T1w_desc-brain_mask') @@ -983,6 +1000,8 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): name=f'brain_mask_to_t1_restore_{pipe_num}') brain_mask_to_t1_restore.inputs.interp = 'nn' brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + guardrail_brain_mask_to_t1_restore = registration_guardrail_workflow( + brain_mask_to_t1_restore) wf.connect(binary_filled_mask, 'out_file', brain_mask_to_t1_restore, 'in_file') @@ -991,7 +1010,8 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, brain_mask_to_t1_restore, 'ref_file') outputs = { - 'space-T1w_desc-brain_mask': (brain_mask_to_t1_restore, 'out_file') + 'space-T1w_desc-brain_mask': (guardrail_brain_mask_to_t1_restore, + 'outputspec.out_file') } return (wf, outputs) @@ -1002,10 +1022,11 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): node_id = f'{opt.lower()}_{pipe_num}' # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz - convert_fs_brainmask_to_nifti = pe.Node(util.Function(input_names=['in_file'], - output_names=['out_file'], - function=mri_convert), - name=f'convert_fs_brainmask_to_nifti_{node_id}') + convert_fs_brainmask_to_nifti = pe.Node( + util.Function(input_names=['in_file'], + output_names=['out_file'], + function=mri_convert), + name=f'convert_fs_brainmask_to_nifti_{node_id}') node, out = strat_pool.get_data('brainmask') wf.connect(node, out, convert_fs_brainmask_to_nifti, 'in_file') @@ -1052,8 +1073,8 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # flirt -in head_fs.nii.gz -ref ${FSLDIR}/data/standard/MNI152_T1_1mm.nii.gz \ # -out tmp_head_fs2standard.nii.gz -omat tmp_head_fs2standard.mat -bins 256 -cost corratio \ # -searchrx -90 90 -searchry -90 90 -searchrz -90 90 -dof 12 -interp trilinear - convert_head_to_template = pe.Node(interface=fsl.FLIRT(), - name=f'convert_head_to_template_{node_id}') + convert_head_to_template = pe.Node( + interface=fsl.FLIRT(), name=f'convert_head_to_template_{node_id}') convert_head_to_template.inputs.cost = 'corratio' convert_head_to_template.inputs.interp = 'trilinear' convert_head_to_template.inputs.bins = 256 @@ -1061,35 +1082,38 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): convert_head_to_template.inputs.searchr_x = [-90, 90] convert_head_to_template.inputs.searchr_y = [-90, 90] convert_head_to_template.inputs.searchr_z = [-90, 90] + guardrail_convert_head_to_template = registration_guardrail_workflow( + convert_head_to_template) wf.connect(reorient_fs_T1, 'out_file', - convert_head_to_template, 'in_file') + convert_head_to_template, 'in_file') node, out = strat_pool.get_data('T1w-ACPC-template') wf.connect(node, out, convert_head_to_template, 'reference') # convert_xfm -omat tmp_standard2head_fs.mat -inverse tmp_head_fs2standard.mat convert_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name=f'convert_xfm_{node_id}') + name=f'convert_xfm_{node_id}') convert_xfm.inputs.invert_xfm = True - wf.connect(convert_head_to_template, 'out_matrix_file', - convert_xfm, 'in_file') + wf.connect(guardrail_convert_head_to_template, + 'outputspec.out_matrix_file', + convert_xfm, 'in_file') # bet tmp_head_fs2standard.nii.gz tmp.nii.gz -f ${bet_thr_tight} -m - skullstrip = pe.Node(interface=fsl.BET(), + skullstrip = pe.Node(interface=fsl.BET(), name=f'anat_BET_skullstrip_{node_id}') skullstrip.inputs.output_type = 'NIFTI_GZ' skullstrip.inputs.mask=True if opt == 'FreeSurfer-BET-Tight': - skullstrip.inputs.frac=0.3 + skullstrip.inputs.frac = 0.3 elif opt == 'FreeSurfer-BET-Loose': - skullstrip.inputs.frac=0.1 + skullstrip.inputs.frac = 0.1 + + wf.connect(guardrail_convert_head_to_template, 'outputspec.out_file', + skullstrip, 'in_file') - wf.connect(convert_head_to_template, 'out_file', - skullstrip, 'in_file') - # fslmaths tmp_mask.nii.gz -mas ${CCSDIR}/templates/MNI152_T1_1mm_first_brain_mask.nii.gz tmp_mask.nii.gz apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f'apply_mask_{node_id}') @@ -1102,23 +1126,26 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # flirt -in tmp_mask.nii.gz -applyxfm -init tmp_standard2head_fs.mat -out brain_fsl_mask_tight.nii.gz \ # -paddingsize 0.0 -interp nearestneighbour -ref head_fs.nii.gz - convert_template_mask_to_native = pe.Node(interface=fsl.FLIRT(), - name=f'convert_template_mask_to_native_{node_id}') + convert_template_mask_to_native = pe.Node( + interface=fsl.FLIRT(), + name=f'convert_template_mask_to_native_{node_id}') convert_template_mask_to_native.inputs.apply_xfm = True convert_template_mask_to_native.inputs.padding_size = 0 convert_template_mask_to_native.inputs.interp = 'nearestneighbour' + guardrail_convert_template_mask_to_native = ( + registration_guardrail_workflow(convert_template_mask_to_native)) wf.connect(apply_mask, 'out_file', - convert_template_mask_to_native, 'in_file') + convert_template_mask_to_native, 'in_file') wf.connect(convert_xfm, 'out_file', - convert_template_mask_to_native, 'in_matrix_file') + convert_template_mask_to_native, 'in_matrix_file') wf.connect(reorient_fs_T1, 'out_file', - convert_template_mask_to_native, 'reference') + convert_template_mask_to_native, 'reference') # fslmaths brain_fs_mask.nii.gz -add brain_fsl_mask_tight.nii.gz -bin brain_mask_tight.nii.gz - # BinaryMaths doesn't use -bin! + # BinaryMaths doesn't use -bin! combine_mask = pe.Node(interface=fsl.BinaryMaths(), name=f'combine_mask_{node_id}') @@ -1128,10 +1155,11 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): combine_mask.inputs.operation = 'mul' wf.connect(binarize_fs_brain, 'out_file', - combine_mask, 'in_file') + combine_mask, 'in_file') - wf.connect(convert_template_mask_to_native, 'out_file', - combine_mask, 'operand_file') + wf.connect(guardrail_convert_template_mask_to_native, + 'outputspec.out_file', + combine_mask, 'operand_file') binarize_combined_mask = pe.Node(interface=fsl.maths.MathsCommand(), name=f'binarize_combined_mask_{node_id}') @@ -1141,8 +1169,9 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): binarize_combined_mask, 'in_file') # CCS brain mask is in FS space, transfer it back to native T1 space - fs_fsl_brain_mask_to_native = pe.Node(interface=freesurfer.ApplyVolTransform(), - name=f'fs_fsl_brain_mask_to_native_{node_id}') + fs_fsl_brain_mask_to_native = pe.Node( + interface=freesurfer.ApplyVolTransform(), + name=f'fs_fsl_brain_mask_to_native_{node_id}') fs_fsl_brain_mask_to_native.inputs.reg_header = True fs_fsl_brain_mask_to_native.inputs.interp = 'nearest' @@ -1157,12 +1186,12 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): if opt == 'FreeSurfer-BET-Tight': outputs = { - 'space-T1w_desc-tight_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file') - } + 'space-T1w_desc-tight_brain_mask': ( + fs_fsl_brain_mask_to_native, 'transformed_file')} elif opt == 'FreeSurfer-BET-Loose': outputs = { - 'space-T1w_desc-loose_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file') - } + 'space-T1w_desc-loose_brain_mask': ( + fs_fsl_brain_mask_to_native, 'outputspec.transformed_file')} return (wf, outputs) @@ -3155,11 +3184,12 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 # applywarp --rel --interp=spline -i "$BiasField" -r "$T1wImageBrain" -w "$AtlasTransform" -o "$BiasFieldOutput" - applywarp_biasfield = pe.Node(interface=fsl.ApplyWarp(), + applywarp_biasfield = pe.Node(interface=fsl.ApplyWarp(), name=f'applywarp_biasfield_{pipe_num}') - applywarp_biasfield.inputs.relwarp = True applywarp_biasfield.inputs.interp = 'spline' + guardrail_applywarp_biasfield = registration_guardrail_workflow( + applywarp_biasfield) node, out = strat_pool.get_data('desc-fast_biasfield') wf.connect(node, out, applywarp_biasfield, 'in_file') @@ -3175,32 +3205,33 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None name=f'threshold_biasfield_{pipe_num}') threshold_biasfield.inputs.op_string = '-thr 0.1' - wf.connect(applywarp_biasfield, 'out_file', - threshold_biasfield, 'in_file') + wf.connect(guardrail_applywarp_biasfield, 'outputspec.out_file', + threshold_biasfield, 'in_file') # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 # applywarp --rel --interp=spline -i "$OrginalT1wImage" -r "$T1wImageBrain" -w "$OutputOrigT1wToT1w" -o "$OutputT1wImage" - applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), + applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), name=f'applywarp_t1_{pipe_num}') - applywarp_t1.inputs.relwarp = True applywarp_t1.inputs.interp = 'spline' - + guardrail_applywarp_t1 = registration_guardrail_workflow(applywarp_t1) + node, out = strat_pool.get_data('desc-n4_T1w') wf.connect(node, out, applywarp_t1, 'in_file') node, out = strat_pool.get_data('space-T1w_desc-brain_mask') wf.connect(node, out, applywarp_t1, 'ref_file') - + wf.connect(convertwarp_orig_t1_to_t1, 'out_file', - applywarp_t1, 'field_file') + applywarp_t1, 'field_file') # fslmaths "$OutputT1wImage" -abs "$OutputT1wImage" -odt float abs_t1 = pe.Node(interface=fsl.ImageMaths(), name=f'abs_t1_{pipe_num}') abs_t1.inputs.op_string = '-abs' - wf.connect(applywarp_t1, 'out_file', abs_t1, 'in_file') + wf.connect(guardrail_applywarp_t1, 'outputspec.out_file', + abs_t1, 'in_file') # fslmaths "$OutputT1wImage" -div "$BiasField" "$OutputT1wImageRestore" div_t1_by_biasfield = pe.Node(interface=fsl.ImageMaths(), diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 5bfdddae47..288da178ef 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -16,6 +16,7 @@ # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" from copy import deepcopy +from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT from nipype.interfaces.utility import Function from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow @@ -24,8 +25,8 @@ _SPEC_KEYS = { - FLIRT: {'reference': 'reference', 'registered': 'out_file'} -} + FLIRT: {'reference': 'reference', 'registered': 'out_file'}, + Registration: {'reference': 'reference', 'registered': 'out_file'}} class BadRegistrationError(ValueError): @@ -113,7 +114,7 @@ def registration_guardrail_node(name=None): function=registration_guardrail), name=name) -def registration_guardrail_workflow(name, registration_node, retry=False): +def registration_guardrail_workflow(registration_node, retry=False): """A workflow to handle hitting a registration guardrail Parameters @@ -128,15 +129,13 @@ def registration_guardrail_workflow(name, registration_node, retry=False): ------- Workflow """ - if name is None: - name = 'registration_guardrail_wf' - wf = Workflow(name=name) - inputspec = deepcopy(registration_node.inputs) + name = f'{registration_node.name}_guardrail' + wf = Workflow(name=f'{name}_wf') outputspec = deepcopy(registration_node.outputs) - guardrail = registration_guardrail_node(f'{name}_guardrail') + guardrail = registration_guardrail_node(name) outkey = spec_key(registration_node, 'registered') wf.connect([ - (inputspec, guardrail, [ + (registration_node, guardrail, [ (spec_key(registration_node, 'reference'), 'reference')]), (registration_node, guardrail, [(outkey, 'registered')])]) if retry: @@ -168,8 +167,8 @@ def retry_registration(wf, registration_node, registered): inputs=['registered', 'registration_node'], outputs=['registered']), name=name) retry_node.inputs.registration_node = registration_node - inputspec = deepcopy(registration_node.inputs) - outputspec = deepcopy(registration_node.outputs) + inputspec = registration_node.inputs + outputspec = registration_node.outputs outkey = spec_key(registration_node, 'registered') guardrail = registration_guardrail_node(f'{name}_guardrail') wf = connect_from_spec(wf, inputspec, retry_node) From db3d5552fba9c8f73e766d06683c4eeebf1a27f1 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 4 Oct 2022 15:49:37 -0400 Subject: [PATCH 11/72] :page_facing_up: Add license notice to modified files [skip ci] --- CPAC/anat_preproc/anat_preproc.py | 17 +++++++++++++++++ CPAC/pipeline/random_state/seed.py | 19 +++++++++++++++++-- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 293897c2bf..72e5bd873e 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1,4 +1,21 @@ # -*- coding: utf-8 -*- + +# Copyright (C) 2012-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os from nipype.interfaces import afni from nipype.interfaces import ants diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index 7bd74c3da5..9b5e6386cc 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -1,5 +1,20 @@ -'''Functions to set, check, and log random seed''' -import os +# Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Functions to set, check, and log random seed""" import random from logging import getLogger From b9a7ad0ffe7b6fa2406591f97531e356715fc1c9 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 4 Oct 2022 16:25:17 -0400 Subject: [PATCH 12/72] :recycle: Plug in ants guardrail --- CPAC/registration/registration.py | 36 +++++++++++++++++-------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index c91d518f76..1b2f08c1ec 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -87,6 +87,10 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, apply_warp.inputs.dimension = 3 apply_warp.interface.num_threads = int(num_ants_cores) + # Guardrail: check QC metrics + guardrail = registration_guardrail_node() + wf.connect(inputNode, 'reference', guardrail, 'reference') + if time_series: apply_warp.inputs.input_image_type = 3 @@ -101,8 +105,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, interp_string.inputs.reg_tool = reg_tool wf.connect(inputNode, 'interpolation', interp_string, 'interpolation') - wf.connect(interp_string, 'interpolation', - apply_warp, 'interpolation') + wf.connect(interp_string, 'interpolation', apply_warp, 'interpolation') ants_xfm_list = \ pe.Node(util.Function(input_names=['transform'], @@ -122,9 +125,9 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, chunk = pe.Node(util.Function(input_names=['func_file', 'n_chunks', 'chunk_size'], - output_names=['TR_ranges'], - function=chunk_ts, - imports=chunk_imports), + output_names=['TR_ranges'], + function=chunk_ts, + imports=chunk_imports), name=f'chunk_{wf_name}', mem_gb=2.5) @@ -138,9 +141,9 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, split_imports = ['import os', 'import subprocess'] split = pe.Node(util.Function(input_names=['func_file', 'tr_ranges'], - output_names=['split_funcs'], - function=split_ts_chunks, - imports=split_imports), + output_names=['split_funcs'], + function=split_ts_chunks, + imports=split_imports), name=f'split_{wf_name}', mem_gb=2.5) @@ -154,13 +157,14 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, mem_gb=2.5) func_concat.inputs.outputtype = 'NIFTI_GZ' - wf.connect(apply_warp, 'output_image', func_concat, 'in_files') - + wf.connect(apply_warp, 'output_image', guardrail, 'registered') + wf.connect(guardrail, 'registered', func_concat, 'in_files') wf.connect(func_concat, 'out_file', outputNode, 'output_image') else: wf.connect(inputNode, 'input_image', apply_warp, 'input_image') - wf.connect(apply_warp, 'output_image', outputNode, 'output_image') + wf.connect(apply_warp, 'output_image', guardrail, 'registered') + wf.connect(guardrail, 'registered', outputNode, 'output_image') elif reg_tool == 'fsl': @@ -221,9 +225,9 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, split_imports = ['import os', 'import subprocess'] split = pe.Node(util.Function(input_names=['func_file', 'tr_ranges'], - output_names=['split_funcs'], - function=split_ts_chunks, - imports=split_imports), + output_names=['split_funcs'], + function=split_ts_chunks, + imports=split_imports), name=f'split_{wf_name}', mem_gb=2.5) @@ -243,8 +247,8 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, wf.connect(inputNode, 'input_image', apply_warp, 'in_file') wf.connect(apply_warp, 'out_file', guardrail, 'registered') - # Pass output through guardrail before continuing - wf.connect(guardrail, 'registered', outputNode, 'output_image') + # Pass output through guardrail before continuing + wf.connect(guardrail, 'registered', outputNode, 'output_image') return wf From d9d0748fa154cce0893437b04ecf5b8c1b9a25c5 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 5 Oct 2022 11:47:16 -0400 Subject: [PATCH 13/72] :fire: Remove empty file [skip ci] --- CPAC/registration/guardrail_thresholds.py | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 CPAC/registration/guardrail_thresholds.py diff --git a/CPAC/registration/guardrail_thresholds.py b/CPAC/registration/guardrail_thresholds.py deleted file mode 100644 index 704be8259b..0000000000 --- a/CPAC/registration/guardrail_thresholds.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (C) 2022 C-PAC Developers - -# This file is part of C-PAC. - -# C-PAC is free software: you can redistribute it and/or modify it under -# the terms of the GNU Lesser General Public License as published by the -# Free Software Foundation, either version 3 of the License, or (at your -# option) any later version. - -# C-PAC is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public -# License for more details. - -# You should have received a copy of the GNU Lesser General Public -# License along with C-PAC. If not, see . -"""Global thresholds for guardrails""" - From d61569fe2a60929abb850d34658f749b04aeb0df Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 5 Oct 2022 11:53:08 -0400 Subject: [PATCH 14/72] :art: Initialze guardrail before conditional --- CPAC/registration/registration.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1b2f08c1ec..b42ee4bac8 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -66,6 +66,10 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # time series chunks multi_input = True + # Guardrail: check QC metrics + guardrail = registration_guardrail_node() + wf.connect(inputNode, 'reference', guardrail, 'reference') + if reg_tool == 'ants': if multi_input: @@ -87,10 +91,6 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, apply_warp.inputs.dimension = 3 apply_warp.interface.num_threads = int(num_ants_cores) - # Guardrail: check QC metrics - guardrail = registration_guardrail_node() - wf.connect(inputNode, 'reference', guardrail, 'reference') - if time_series: apply_warp.inputs.input_image_type = 3 @@ -197,10 +197,6 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # go in as a warpfield file wf.connect(inputNode, 'transform', apply_warp, 'field_file') - # Guardrail: check QC metrics - guardrail = registration_guardrail_node() - wf.connect(inputNode, 'reference', guardrail, 'reference') - # parallelize the apply warp, if multiple CPUs, and it's a time # series! if int(num_cpus) > 1 and time_series: From 58811dff610db2f9a962f12a33edfbe5d2eb3fdf Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 5 Oct 2022 12:07:06 -0400 Subject: [PATCH 15/72] :bug: Fix concat-then-guardrail logic --- CPAC/registration/registration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b42ee4bac8..e6dab1d0aa 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -157,9 +157,9 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, mem_gb=2.5) func_concat.inputs.outputtype = 'NIFTI_GZ' - wf.connect(apply_warp, 'output_image', guardrail, 'registered') - wf.connect(guardrail, 'registered', func_concat, 'in_files') - wf.connect(func_concat, 'out_file', outputNode, 'output_image') + wf.connect(apply_warp, 'output_image', func_concat, 'in_files') + wf.connect(func_concat, 'out_file', guardrail, 'registered') + wf.connect(guardrail, 'registered', outputNode, 'output_image') else: wf.connect(inputNode, 'input_image', apply_warp, 'input_image') From fe139ccb5f906fb5ca644e5f476201d16f38b74d Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 5 Oct 2022 14:32:30 -0400 Subject: [PATCH 16/72] :recycle: Give each guardrail a unique name Co-authored-by: Steve Giavasis --- CPAC/registration/registration.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index e6dab1d0aa..e49b3f93ba 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -67,7 +67,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, multi_input = True # Guardrail: check QC metrics - guardrail = registration_guardrail_node() + guardrail = registration_guardrail_node(f'{wf_name}_guardrail') wf.connect(inputNode, 'reference', guardrail, 'reference') if reg_tool == 'ants': @@ -331,7 +331,7 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' - guardrail = registration_guardrail_node() + guardrail = registration_guardrail_node(f'{name}_guardrail') inv_flirt_xfm = pe.Node(interface=fsl.utils.ConvertXFM(), name='inv_linear_reg0_xfm') @@ -710,7 +710,7 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, name='outputspec') linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_func_to_anat') - guardrail = registration_guardrail_node() + guardrail = registration_guardrail_node(f'{name}_guardrail') linear_reg.inputs.interp = config.registration_workflows[ 'functional_registration']['coregistration']['interpolation'] @@ -1023,7 +1023,7 @@ def bbreg_args(bbreg_target): inputNode_echospacing, 'echospacing', bbreg_func_to_anat, 'echospacing') - guardrail = registration_guardrail_node() + guardrail = registration_guardrail_node(name=f'{name}_guardrail') register_bbregister_func_to_anat.connect(inputspec, 'anat', guardrail, 'reference') register_bbregister_func_to_anat.connect( @@ -1255,7 +1255,7 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', select_inverse_warp.inputs.selection = "Inverse" - guardrail = registration_guardrail_node() + guardrail = registration_guardrail_node(f'{name}_guardrail') calc_ants_warp_wf.connect(inputspec, 'moving_brain', calculate_ants_warp, 'moving_brain') calc_ants_warp_wf.connect(inputspec, 'reference_brain', @@ -3692,7 +3692,8 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, output_names=['itk_transform'], function=run_c3d), name=f'convert_bbr2itk_{pipe_num}') - guardrail_preproc = registration_guardrail_node() + guardrail_preproc = registration_guardrail_node( + 'single-step-resampling-preproc_guardrail') if cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'whole-head': @@ -3768,7 +3769,8 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, applyxfm_func_to_standard.inputs.float = True applyxfm_func_to_standard.inputs.interpolation = 'LanczosWindowedSinc' - guardrail_brain = registration_guardrail_node() + guardrail_brain = registration_guardrail_node( + 'single-step-resampling-brain_guardrail') wf.connect(split_func, 'out_files', applyxfm_func_to_standard, 'input_image') From 07a91cd2cd9534dc102acb1b2d11dd5e31774efc Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 5 Oct 2022 14:45:24 -0400 Subject: [PATCH 17/72] :art: Keep random seed in bounds --- CPAC/pipeline/random_state/__init__.py | 8 ++++---- CPAC/pipeline/random_state/seed.py | 7 ++++--- CPAC/pipeline/schema.py | 4 ++-- CPAC/registration/__init__.py | 27 +++++++++++++------------- CPAC/registration/guardrails.py | 7 +++++-- 5 files changed, 29 insertions(+), 24 deletions(-) diff --git a/CPAC/pipeline/random_state/__init__.py b/CPAC/pipeline/random_state/__init__.py index 5956f33416..b590912417 100644 --- a/CPAC/pipeline/random_state/__init__.py +++ b/CPAC/pipeline/random_state/__init__.py @@ -1,6 +1,6 @@ '''Random state for C-PAC''' -from .seed import random_seed, random_seed_flags, set_up_random_state, \ - set_up_random_state_logger +from .seed import MAX_SEED, random_seed, random_seed_flags, \ + set_up_random_state, set_up_random_state_logger -__all__ = ['random_seed', 'random_seed_flags', 'set_up_random_state', - 'set_up_random_state_logger'] +__all__ = ['MAX_SEED', 'random_seed', 'random_seed_flags', + 'set_up_random_state', 'set_up_random_state_logger'] diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index 9b5e6386cc..662ee7dc17 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -29,6 +29,7 @@ from CPAC.utils.interfaces.ants import AI from CPAC.utils.monitoring.custom_logging import set_up_logger +MAX_SEED = np.iinfo(np.int32).max _seed = {'seed': None} @@ -45,10 +46,10 @@ def random_random_seed(): Examples -------- - >>> 0 < random_random_seed() <= np.iinfo(np.int32).max + >>> 0 < random_random_seed() <= MAX_SEED True ''' - return random.randint(1, np.iinfo(np.int32).max) + return random.randint(1, MAX_SEED) def random_seed(): @@ -183,7 +184,7 @@ def set_up_random_state(seed): seed = random_random_seed() if (seed != 'random' and not ( isinstance(seed, int) and - (0 < int(seed) <= np.iinfo(np.int32).max) + (0 < int(seed) <= MAX_SEED) )): raise ValueError('Valid random seeds are positive integers up to ' f'2147483647, "random", or None, not {seed}') diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 2732201e8a..a69994dfea 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -17,11 +17,11 @@ """Validation schema for C-PAC pipeline configurations""" # pylint: disable=too-many-lines from itertools import chain, permutations -import numpy as np from voluptuous import All, ALLOW_EXTRA, Any, Capitalize, Coerce, \ ExactSequence, ExclusiveInvalid, In, Length, Lower, \ Match, Maybe, Optional, Range, Required, Schema from CPAC import docs_prefix +from CPAC.pipeline.random_state.seed import MAX_SEED from CPAC.utils.datatypes import ListFromItem from CPAC.utils.utils import delete_nested_value, lookup_nested_value, \ set_nested_value @@ -406,7 +406,7 @@ def _changes_1_8_0_to_1_8_1(config_dict): 'num_participants_at_once': int, 'random_seed': Maybe(Any( 'random', - All(int, Range(min=1, max=np.iinfo(np.int32).max)))), + All(int, Range(min=1, max=MAX_SEED)))), 'observed_usage': { 'callback_log': Maybe(str), 'buffer': Number, diff --git a/CPAC/registration/__init__.py b/CPAC/registration/__init__.py index 2d5b934e1c..1ab40d425a 100644 --- a/CPAC/registration/__init__.py +++ b/CPAC/registration/__init__.py @@ -1,17 +1,18 @@ from .registration import create_fsl_flirt_linear_reg, \ - create_fsl_fnirt_nonlinear_reg, \ - create_fsl_fnirt_nonlinear_reg_nhp, \ - create_register_func_to_anat, \ - create_register_func_to_anat_use_T2, \ - create_bbregister_func_to_anat, \ - create_wf_calculate_ants_warp + create_fsl_fnirt_nonlinear_reg, \ + create_fsl_fnirt_nonlinear_reg_nhp, \ + create_register_func_to_anat, \ + create_register_func_to_anat_use_T2, \ + create_bbregister_func_to_anat, \ + create_wf_calculate_ants_warp from .output_func_to_standard import output_func_to_standard -__all__ = ['create_fsl_flirt_linear_reg', \ - 'create_fsl_fnirt_nonlinear_reg', \ - 'create_fsl_fnirt_nonlinear_reg_nhp', \ - 'create_register_func_to_anat', \ - 'create_register_func_to_anat_use_T2', \ - 'create_bbregister_func_to_anat', \ - 'create_wf_calculate_ants_warp'] \ No newline at end of file +__all__ = ['create_fsl_flirt_linear_reg', + 'create_fsl_fnirt_nonlinear_reg', + 'create_fsl_fnirt_nonlinear_reg_nhp', + 'create_register_func_to_anat', + 'create_register_func_to_anat_use_T2', + 'create_bbregister_func_to_anat', + 'create_wf_calculate_ants_warp', + 'output_func_to_standard'] diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 288da178ef..0917c10cbe 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -194,12 +194,15 @@ def retry_registration_node(registered, registration_node): ------- Node """ - from CPAC.pipeline.random_state.seed import random_seed + from CPAC.pipeline.random_state.seed import MAX_SEED, random_seed seed = random_seed() if registered.endswith('-failed') and isinstance(seed, int): retry_node = registration_node.clone( name=f'{registration_node.name}-retry') - retry_node.seed = seed + 1 + if seed < MAX_SEED: # increment random seed + retry_node.seed = seed + 1 + else: # loop back to minumum seed + retry_node.seed = 1 return retry_node return registration_node From cf8272bfafa182ab248daf24bbe2ca222aa60c50 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 7 Oct 2022 09:28:23 -0400 Subject: [PATCH 18/72] :bug: Add imports to guardrail node initialization --- CPAC/registration/guardrails.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 0917c10cbe..aa8ef14761 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -111,6 +111,10 @@ def registration_guardrail_node(name=None): return Node(Function(input_names=['registered', 'reference'], output_names=['registered'], + imports=['from CPAC.qc import qc_masks, ' + 'REGISTRATION_GUARDRAIL_THRESHOLDS', + 'from CPAC.registration.guardrails ' + 'import BadRegistrationError'], function=registration_guardrail), name=name) From cfd7a91f975813282f0f608c2e4e5cc5b7777017 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 7 Oct 2022 12:33:26 -0400 Subject: [PATCH 19/72] :necktie: Change guardrail retry default to `True` and retry even if not changing seed Co-authored-by: Mike Milham Co-authored-by: Steve Giavasis --- CPAC/registration/guardrails.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index aa8ef14761..2a64fca294 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -118,7 +118,7 @@ def registration_guardrail_node(name=None): function=registration_guardrail), name=name) -def registration_guardrail_workflow(registration_node, retry=False): +def registration_guardrail_workflow(registration_node, retry=True): """A workflow to handle hitting a registration guardrail Parameters @@ -200,13 +200,14 @@ def retry_registration_node(registered, registration_node): """ from CPAC.pipeline.random_state.seed import MAX_SEED, random_seed seed = random_seed() - if registered.endswith('-failed') and isinstance(seed, int): + if registered.endswith('-failed'): retry_node = registration_node.clone( name=f'{registration_node.name}-retry') - if seed < MAX_SEED: # increment random seed - retry_node.seed = seed + 1 - else: # loop back to minumum seed - retry_node.seed = 1 + if isinstance(seed, int): + if seed < MAX_SEED: # increment random seed + retry_node.seed = seed + 1 + else: # loop back to minumum seed + retry_node.seed = 1 return retry_node return registration_node From ed81db07ed3bc914b512fb49a8271b097e30c7ed Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 7 Oct 2022 14:48:31 -0400 Subject: [PATCH 20/72] :bug: Handle list values returned from `qc_masks` --- CPAC/registration/guardrails.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 2a64fca294..825329e0d9 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -83,6 +83,8 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False for metric, threshold in REGISTRATION_GUARDRAIL_THRESHOLDS.items(): if threshold is not None: value = qc_metrics.get(metric) + if isinstance(value, list): + value = value[0] if value < threshold: with open(f'{registered}.failed_qc', 'w', encoding='utf-8') as _f: From e9359c24483478f5374d5ebc9359427807fa480c Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 7 Oct 2022 17:21:57 -0400 Subject: [PATCH 21/72] :bug: Log failed metric instead of boolean --- CPAC/registration/guardrails.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 825329e0d9..e53e7bca1f 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -88,7 +88,7 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False if value < threshold: with open(f'{registered}.failed_qc', 'w', encoding='utf-8') as _f: - _f.write(True) + _f.write(f'{metric}: {value} < {threshold}') if retry: registered = f'{registered}-failed' else: From 6de7c6d6b716a29540b7e7ddc750963b8b14a75d Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 18 Oct 2022 15:52:41 +0000 Subject: [PATCH 22/72] :twisted_rightwards_arrows: Merge 'develop' into 'enh/guardrails' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Squashed commit of the following: commit 356dc30cd61e62305818f37a761f5ebf1f93cc6b Merge: b1ce64b5 55667a2f Author: Steve Giavasis Date: Fri Oct 14 16:33:28 2022 -0400 Merge pull request #1778 from FCP-INDI/deeper-config-path 🚚 Move C-PAC generated configs into `log/sub-${sub}_ses-${ses}` subdirectory commit b1ce64b5976b18356268a0a61a37cdb33e2d67f2 Merge: 37a6874d c6ec0a0a Author: Steve Giavasis Date: Fri Oct 14 16:33:19 2022 -0400 Merge pull request #1769 from FCP-INDI/feature/total_readout_time Ingress and use TotalReadoutTime from epi fmap meta-data in FSL topup commit c6ec0a0ad333faa95f71b3797bc744f57fad813c Merge: 8301e3c0 37a6874d Author: Steve Giavasis Date: Fri Oct 14 16:03:28 2022 -0400 Merge branch 'develop' into feature/total_readout_time commit 8301e3c09994b2128d72b8394d91976903589c08 Author: Steve Giavasis Date: Fri Oct 14 15:56:44 2022 -0400 Revert to using dwell time first to maintain original DCAN script's functionality commit 55667a2f12cadda692cd0a4b404a71b71660ebc0 Author: Jon Clucas Date: Fri Oct 14 15:55:55 2022 -0400 :art: Include minimized pipeline config in hardlinking step commit a641d00d7afc97b810abe549530dd7edcb5f51c4 Author: Jon Clucas Date: Fri Oct 14 15:44:42 2022 -0400 :lock: Make generated config files read-only Co-authored-by: Steve Giavasis commit 37a6874da3c25e4e7695d8e753976c11a5239d80 Merge: b5d0d1f8 5b2465fc Author: Steve Giavasis Date: Fri Oct 14 15:46:39 2022 -0400 Merge pull request #1787 from diegoaper/main :bug: :white_check_mark: Group Runner 1.8 and MDMR Changes commit cb5bc42d7af48dbd42f36cb41a4d0c2009bc08ab Merge: 9c0a89c5 b5d0d1f8 Author: Jon Clucas Date: Fri Oct 14 15:37:53 2022 -0400 :twisted_rightwards_arrows: Merge develop into deeper-config-path commit b5d0d1f821fc094fdcdf35213b644ee4fd519e18 Merge: 7f50838c dc3f1136 Author: Steve Giavasis Date: Fri Oct 14 15:19:13 2022 -0400 Merge pull request #1796 from clane9/bugfix/random_seed :bug: fix type handling in `set_up_random_state` commit dc3f113674b9c4b6583f1d2faf22851e01de3180 Merge: 0ccc40d9 7f50838c Author: Steve Giavasis Date: Fri Oct 14 15:18:31 2022 -0400 Merge branch 'develop' into bugfix/random_seed commit 7f50838ce42add51e5ea954532baecc1e72fdddd Merge: 8ebf0a78 c2ba9d6e Author: Steve Giavasis Date: Fri Oct 14 15:17:39 2022 -0400 Merge pull request #1790 from FCP-INDI/backwards-compatibility/default_pipeline.yml ⚰️ Add placeholder default_pipeline.yml for backwards compatibility commit 8ebf0a784156dd254a3bf070234ea0040bc3da66 Merge: fd91a429 0182f98c Author: Steve Giavasis Date: Fri Oct 14 15:16:59 2022 -0400 Merge pull request #1788 from FCP-INDI/rbc-update 🔧 Base `rbc-options` on `fmriprep-options`' preproc commit fd91a4292ff35229cb8c5e1fa7e892b8c4a6720f Merge: 6b402b0d 76a4a751 Author: Steve Giavasis Date: Fri Oct 14 15:16:14 2022 -0400 Merge pull request #1781 from FCP-INDI/fix/FUGUE--dwell 🐛 Fix `FUGUE --dwell` parameterization commit 0ccc40d955956c66a714f4a10a0bc58e5332a40a Author: Connor Lane Date: Thu Oct 13 16:55:12 2022 -0400 move changelog entry to the unreleased section commit b5c742a12e42b68ff78e2f039a24cdeb714216e1 Author: Connor Lane Date: Wed Oct 12 17:43:26 2022 -0400 update changelog commit 6b402b0d8cc44e0b5ef70d8c0aeef768ca1d6375 Author: Jon Clucas Date: Wed Oct 12 16:12:59 2022 -0400 fixup! :fast_forward: Bring `develop` up to date with `main` commit 5867210016c5df683458847ac0c8cabed3a17112 Author: Connor Lane Date: Wed Oct 12 09:48:39 2022 -0400 :bug: fix type handling in `set_up_random_state` commit 5b2465fcc8a9986462db774046edbe55d4b91f7e Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Tue Oct 11 11:48:57 2022 -0400 Removed participant Configuration commit 92f9658a869432f86e7e2cbfcfb8f4a9a8675f9c Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 14:09:46 2022 -0400 Update cpac_group_runner.py commit a112d34b559d2991ac91472f7159cd08400370e5 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:40:56 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 031946af377930cd21e736768bd141f1ebbc5ad7 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:40:35 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit df2bb336f2228bc5287f879dd9c9e4a33ccd64d2 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:40:00 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit c5d306da403533bee48bfe189dd4df1c191e1503 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:39:46 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit e903b982c02f0a8a9dae58e195f372ca1fee866f Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:39:16 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 82e2ece25f52e9e553f2c27aae05463b011b2cf7 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:38:50 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit d75dbe37fb9e19f9c189ee4abbe12383b6aec137 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:38:32 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 9407c63f4f0b5f77292d7b4c709d79aed6f09a85 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:38:11 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 61097e3a4e7724222575e9a078746d25fcffa7fe Merge: 0ba271af 750c9ecc Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:37:40 2022 -0400 Merge branch 'main' of https://github.com/diegoaper/C-PAC_MDMR-Group_Runner commit 0ba271af3f0bdfaaf8779f9a717863013a7d413d Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:37:29 2022 -0400 Update cpac_group_runner.py commit 750c9ecc80660d1cac404dcd82f58934d6da3b30 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:37:04 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit cf81d08bd8cd897c38c6b97574461e7654e90a51 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:36:52 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit e570fbfb480b69692ed58bfec277ddaa94abc805 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:36:38 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 2eea1f40f38ad0225d74ed71c7445a5f6b6d34e3 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:35:59 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 99620c621f498708e14b088d2a065b8b2f985aab Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:35:44 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit cf8f0d9acf89aa0721f461331cb4457520ee5cb4 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:33:11 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit de6235f1677214172aee2516c187e3f8d4b4e7cc Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:32:40 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit ebe6a272b5f1b1f66db81c346f55cf07d642bf25 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:32:26 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit c50db8dd8ad4618cb5366cc338e2a7b6abd5b5cb Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:30:13 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 748f3766d7e4f559480bea63f973a9255d743487 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:29:26 2022 -0400 Update CPAC/cwas/pipeline.py Co-authored-by: Jon Clucas commit 6d938f4916c51e0c508fe9ee36bd22c26662753b Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:28:55 2022 -0400 Update CPAC/cwas/pipeline.py Co-authored-by: Jon Clucas commit 0b2b868e4449af08a06dc143b6b4363748780f4c Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:28:20 2022 -0400 Update CPAC/cwas/pipeline.py Co-authored-by: Jon Clucas commit 281427d7756862d0db114608741fcaacf93e5698 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:27:24 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 63c3ad4f453c6027a172d7b3b68b3bdfbc953f6e Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:27:15 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit ca75679c7c76245fed77a4ea2000e188c8467413 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:27:05 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 22ed56abac40e808b150f88f05931269a0fb373b Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:26:54 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 490e319acdbcd2901878c4ffff1149abbebd610b Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:26:32 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 846286507c1c87f9a2124e1420693b9d77cbfb80 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:25:57 2022 -0400 Update CPAC/resources/configs/group_config_template.yml Co-authored-by: Jon Clucas commit 3269c0ee924f414020ac066a583ee05c12a346a7 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:25:14 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit bb6458b6516423bd366ede7bf11cd454bb4a065d Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:33 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit fe1dff4866db62227043e4567844b6a2cfee5dfd Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:25 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 6fcb279a9b404215d3617ca5bc4c19e38c0d27eb Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:10 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit f25b604bf4ecefd7798a9167b3acb5fcb3591103 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:04 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 35318a58572b5d9c508c7f6b35e78808c2812b88 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:21:52 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 6ccce46f49e4840f1837cad8c130137e224f8730 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:20:28 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 6dc4a2b904529504a21a91252d4dbfa3b4418fc8 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 10:55:53 2022 -0400 Update cpac_outputs.tsv Added 2 lines for MDMR group_analysis output commit b6ed208a307dde12be98df6a1b3f8b39163d5b1a Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 10:48:47 2022 -0400 Update group_config_template.yml commit 2ab1d648b08104e22a878dec2128f5430f61304e Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 10:48:09 2022 -0400 Fixed 1.8.4 image intake Fixed the intake for images to make the 1.8.4 BIDS convention. commit 68e34164d1392e457a2c181ea64fbf1e39cdc644 Merge: 32632fa3 9586d714 Author: Jon Clucas Date: Fri Oct 7 10:20:22 2022 -0400 :fast_forward: Bring `develop` up to date with `main` commit c2ba9d6e40d640f2ca62da1ac039fa80329aae94 Author: Jon Clucas Date: Wed Oct 5 15:16:39 2022 -0400 :coffin: Add placeholder default_pipeline.yml for backwards compatibility [skip ci] commit 76a4a751885a989ea5cfcdaa646532b8d81e5bde Author: Jon Clucas Date: Wed Oct 5 17:51:41 2022 +0000 :recycle: Send 'desc-mean_bold' as input to `distcor_blip_fsl_topup` Co-authored-by: Steve Giavasis commit 0182f98c61cb7fbb495c8300e6a6a7991c859240 Author: Jon Clucas Date: Wed Oct 5 11:12:03 2022 -0400 :memo: Add rbc-options update to CHANGELOG [skip ci] Co-authored-by: Greg Kiar commit 52089447e163efbba5248c430ce7defd1e281847 Author: Jon Clucas Date: Tue Oct 4 17:00:49 2022 -0400 :wrench: Base rbc-options on fmriprep-options Co-authored-by: Greg Kiar commit 1ab4086a07cc74d645624c56e404c4002ca527a9 Author: Jon Clucas Date: Tue Oct 4 20:48:57 2022 +0000 :recycle: Replumb to prevent wild TOPUP forking Co-authored-by: Steve Giavasis commit c3ac6ac0ef3e9d64e3bf54a0e1d2d94cb4399e85 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Tue Oct 4 15:48:30 2022 -0400 MDMR Group Config Template Update to the scripts and config template commit eee5461c92410a30cb010b6ca958a5dd528b9c97 Author: Jon Clucas Date: Fri Sep 30 21:21:15 2022 +0000 :recycle: Fix cyclical imports commit db2785138f1a7c8af5b3adc61347c5d113dcc043 Author: Jon Clucas Date: Fri Sep 30 16:37:46 2022 -0400 :alien: Import typehints in function node commit c1481538fd0971aa03b6d7aabf331610663bbeba Author: Jon Clucas Date: Fri Sep 30 15:41:34 2022 +0000 :loud_sound: Add FSL-`DwellTime` fix to CHANGELOG [skip ci] Co-authored-by: Nathalia Esper Co-authored-by: Alex Franco commit e53581ba558b2265ad2ed2aaebda80073f2751be Author: Jon Clucas Date: Fri Sep 30 15:27:52 2022 +0000 :bug: Fix import path commit 233858594fe3fbe72c12156b5af34f19a3c78ff1 Author: Jon Clucas Date: Fri Sep 30 15:24:38 2022 +0000 :page_facing_up: Add/update LGPL-3-or-later notice to modified files [skip ci] commit 109d5362ff4ba303948fd77b6713a6b2ac1e30e7 Author: Jon Clucas Date: Fri Sep 30 15:11:39 2022 +0000 :recycle: Only look for `EffectiveEchoSpacing` in functional sidecar commit 695d29a38f54610e5f91ecb34085cb1934e42c11 Author: Jon Clucas Date: Fri Sep 30 14:41:04 2022 +0000 :rotating_light: Remove duplicate good-name [skip ci] commit c9bea1881402c104d4a1ceb57b3282fc682151df Author: Jon Clucas Date: Fri Sep 30 14:37:12 2022 +0000 :recycle: DRY and let `calc_delta_te_and_asym_ratio` determine EES source commit 55ca4d91270ec750a7ff5ab9e774f838e18b8146 Author: Jon Clucas Date: Thu Sep 29 14:09:12 2022 +0000 :bug: Fix logic checking for `diffphase-effectiveEchoSpacing` commit d41dc2922c3e9133ecccba84020e2b15ac728bdc Author: Jon Clucas Date: Wed Sep 28 12:42:26 2022 -0400 :bug: Only connect calc_delta_ratio if diff commit ca86b0b99671c66bb75874e010b192a4f445d4b3 Author: Jon Clucas Date: Wed Sep 28 09:56:32 2022 -0400 :bug: Fix plumbing for EffectiveEchoSpacing commit f7ca070e9a4dd3967f45f57b33155b9b6857d8bf Author: Jon Clucas Date: Tue Sep 27 17:26:49 2022 -0400 :recycle: Get EffectiveEchoSpacing from func metadata if possible commit 376d5508b7011008ab0c6b6857fe401f882eb7d1 Author: Jon Clucas Date: Tue Sep 27 15:19:59 2022 -0400 :bug: Update DwellTime → EffectiveEchoSpacing for FSL FUGUE Co-authored-by: Nathalia Esper Co-authored-by: Alex Franco commit 32632fa3dcf0c60f2f966df3b19b54ba7d66d687 Merge: eed40c51 238f4991 Author: Steve Giavasis Date: Mon Sep 26 13:23:08 2022 -0400 Merge pull request #1777 from FCP-INDI/differentiate-sametime-data-configs 🐛 Differentiate sametime data and pipeline configs commit eed40c51d9c63914339422f966ec8da7cfe8744e Merge: 76eb34d2 a64541e0 Author: Steve Giavasis Date: Mon Sep 26 13:22:50 2022 -0400 Merge pull request #1775 from FCP-INDI/deeper-working-path 🚚 Use `f'pipeline_{pipeline_name}'` subdirectories for `log`, `working`, and `output` commit 76eb34d278004dcd2389012f93ab42fa7558089c Author: sgiavasis Date: Mon Sep 26 17:21:02 2022 +0000 Merge resolution for latest nuisance and native/template-space options changes. commit b30a2e0f5fa3016c0c2cbba1d24ac0cd8c71d011 Merge: 1e24c690 ea182429 Author: sgiavasis Date: Sun Sep 25 08:30:26 2022 +0000 Merge branch 'dev-fix' into develop commit ea182429bd4fb8e50d796f86b92eccd3098bd4c5 Author: sgiavasis Date: Sun Sep 25 08:27:49 2022 +0000 Staging changes from template-space derivatives for next release. commit 9c0a89c5b6fa0dbfa363aadd28f6c9bf8efa2cda Author: Jon Clucas Date: Fri Sep 23 12:48:29 2022 -0400 :bug: Create nested working directory before writing PID to disk commit d50f8daa167c4f1834617d6cc70fb436d78f5e5d Author: Jon Clucas Date: Fri Sep 23 12:26:11 2022 -0400 :recycle: Move PID on disk one level deeper in working directory commit 137c482ee9dfd269ec41377f7666512aeab6254c Author: Jon Clucas Date: Fri Sep 23 12:17:07 2022 -0400 :recycle: Modularize checking p_name commit 1899c9e56fa822aaece7cdbd6ef3c42edb313e0c Author: Jon Clucas Date: Fri Sep 23 10:50:41 2022 -0400 :truck: Nest `failedToStart.log` as deeply as possible commit ed554a69e195c4076be21399b9a477b44b16617d Merge: 04903949 a64541e0 Author: Jon Clucas Date: Fri Sep 23 10:33:30 2022 -0400 :twisted_rightwards_arrow: Merge develop into deeper-config-path commit 238f49919b4d55a0b2d4d71072183957275063e0 Merge: 68ac96ab 1e24c690 Author: Jon Clucas Date: Fri Sep 23 10:19:13 2022 -0400 :twisted_rightwards_arrows: Merge develop into differentiate-sametime-data-configs commit a64541e060c58ad94ed3d97c85953e8d05d19deb Merge: e5ec37c2 1e24c690 Author: Jon Clucas Date: Fri Sep 23 10:14:02 2022 -0400 :twisted_rightwards_arrows: Merge develop into deeper-working-path commit 04903949816ccfa74c64e14831f45112f75a5879 Author: Jon Clucas Date: Thu Sep 22 14:38:02 2022 -0400 :pencil2: Set c['subject_id'] commit f3aefca12e4d1c47d9a6251e52d05460a4fc4db9 Author: Jon Clucas Date: Thu Sep 22 12:20:57 2022 -0400 :truck: Move C-PAC-generated data and pipeline configs one level deeper commit 2ed4d6a5d27a4df4a9668a2cd20effdcc0c50cbc Author: Jon Clucas Date: Thu Sep 22 11:21:48 2022 -0400 :truck: Move C-PAC-generated config files into log dir commit 19c3890377d1bf6ed65d371b403dafbfa8b5c5fd Merge: e5ec37c2 68ac96ab Author: Jon Clucas Date: Thu Sep 22 10:59:04 2022 -0400 :twisted_rightwards_arrows: Merge differentiate-sametime-data-configs into deeper-config-path commit 68ac96ab2ccd9e509ad58ccabe5577b569b6b6bc Author: Jon Clucas Date: Wed Sep 21 17:37:02 2022 -0400 :white_check_mark: Add unit test for hash_data_config commit 600319493a9c48e5766743a08dc74f059271f95c Author: Jon Clucas Date: Wed Sep 21 17:07:57 2022 -0400 :loud_sound: Add config hash string update to CHANGELOG commit e5ec37c22e6b09776be134c2caa189877680625e Merge: e09cfa0c 14e1591a Author: Jon Clucas Date: Wed Sep 21 16:36:39 2022 -0400 :twisted_rightwards_arrows: Merge develop into deeper-working-path commit e09cfa0c523ff4fbcbfb8543dd02e8f848a644eb Author: Jon Clucas Date: Wed Sep 21 16:13:35 2022 -0400 :bug: Change working path update location commit 9586d7146eff224200e18622523a56da6a9826a6 Merge: 12c4f74a 94d80829 Author: Steve Giavasis Date: Mon Sep 19 21:31:01 2022 -0400 Merge pull request #1764 from FCP-INDI/fix/pipeline-config-versions 🔖 Hotfix: Update version to 1.8.4 commit 94d80829eee156ead693da1bbacb1f7e2dca3fd0 Author: sgiavasis Date: Tue Sep 20 01:27:46 2022 +0000 Fix Neurostars link in the README. commit 8804f6180d5d5c6e69f3422691ab6c034348b23e Author: Jon Clucas Date: Fri Sep 16 15:23:00 2022 -0400 :art: Rename `f'cpac_{pipeline_name}'` directories to `f'pipeline_{pipeline_name}'` to match `output`, `log`, and `working` subdirectories Co-authored-by: Steve Giavasis commit 77a1c5c534f375a4b000e6adec569d7f9f8abc62 Author: Jon Clucas Date: Fri Sep 16 15:13:12 2022 -0400 :truck: Add pipeline-config-level directory for participant working directories commit 7718419c86d546923a48a659817200788db1fc88 Author: Jon Clucas Date: Fri Sep 16 14:49:31 2022 -0400 :goal_net: Sanitize `pipeline_name` during validation commit ce6dd09eea9a47b1d8fee0c1083205cd20c30ec4 Author: sgiavasis Date: Fri Sep 9 18:51:27 2022 +0000 Updated changelog. commit 7f42613670e58d2e14a4a1b475d05a34d09232f3 Author: sgiavasis Date: Fri Sep 9 18:45:47 2022 +0000 Improved the error message for missing epi field map meta-data. commit d43aa6c1b78b71bd4142d77891f0f1e9c596af0e Author: sgiavasis Date: Fri Sep 9 04:14:31 2022 +0000 Read in TotalReadoutTime from field map metadata, if it exists. Pass TotalReadoutTime to FSL TOPUP if present. commit 9453398fefcdc83bea2b82727c52b5d140a69989 Author: Theodore (Machine User) Date: Wed Aug 31 17:42:23 2022 +0000 :bookmark: Update version to 1.8.4 (:construction_worker: Differentiate between Git branches and tags for Docker tags) --- .pylintrc | 4 +- CHANGELOG.md | 11 +- CPAC/alff/alff.py | 6 +- CPAC/cwas/cwas.py | 48 +- CPAC/cwas/mdmr.py | 10 +- CPAC/cwas/pipeline.py | 31 +- .../distortion_correction.py | 203 ++++---- CPAC/distortion_correction/utils.py | 53 ++- CPAC/func_preproc/func_ingress.py | 194 +------- CPAC/image_utils/__init__.py | 24 +- CPAC/image_utils/spatial_smoothing.py | 28 +- CPAC/image_utils/statistical_transforms.py | 21 +- CPAC/info.py | 1 + .../longitudinal_workflow.py | 28 +- CPAC/nuisance/nuisance.py | 20 +- CPAC/pipeline/__init__.py | 1 + CPAC/pipeline/check_outputs.py | 2 +- CPAC/pipeline/cpac_group_runner.py | 308 ++++++------ CPAC/pipeline/cpac_pipeline.py | 22 +- CPAC/pipeline/cpac_runner.py | 161 ++++--- CPAC/pipeline/engine.py | 39 +- CPAC/pipeline/random_state/seed.py | 21 +- CPAC/pipeline/schema.py | 167 +------ CPAC/pipeline/test/test_schema_validation.py | 9 +- CPAC/qc/tests/test_qc.py | 11 +- CPAC/randomise/randomise.py | 2 +- CPAC/registration/registration.py | 44 +- CPAC/registration/tests/mocks.py | 7 +- .../configs/group_config_template.yml | 448 +++++++++--------- .../configs/pipeline_config_default.yml | 1 + .../configs/pipeline_config_rbc-options.yml | 34 +- CPAC/resources/cpac_outputs.tsv | 2 + CPAC/utils/__init__.py | 13 +- CPAC/utils/bids_utils.py | 6 +- CPAC/utils/build_data_config.py | 8 +- CPAC/utils/configuration/__init__.py | 8 +- CPAC/utils/configuration/configuration.py | 93 +++- CPAC/utils/datasource.py | 179 ++++--- CPAC/utils/strategy.py | 36 +- CPAC/utils/test_mocks.py | 7 +- CPAC/utils/utils.py | 224 +++++++-- CPAC/utils/yaml_template.py | 11 +- README.md | 2 +- dev/docker_data/default_pipeline.yml | 14 + dev/docker_data/run.py | 41 +- requirements.txt | 1 + 46 files changed, 1395 insertions(+), 1209 deletions(-) create mode 100644 dev/docker_data/default_pipeline.yml diff --git a/.pylintrc b/.pylintrc index 052ca1eecc..41277323fd 100644 --- a/.pylintrc +++ b/.pylintrc @@ -291,7 +291,7 @@ contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. -generated-members= +generated-members=CPAC.utils.configuration.configuration.Configuration.* # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). @@ -435,6 +435,7 @@ good-names=c, ex, nb, Run, + TR, v, wf, _, @@ -443,6 +444,7 @@ good-names=c, # they will always be accepted good-names-rgxs=^_version_(extra|m[a-n]{2}[or]{2})$, # version parts in info.py .*EPI.*, + .*TE.*, .*T1.*, .*T2.* diff --git a/CHANGELOG.md b/CHANGELOG.md index 2fb0bf1da2..4d58af9559 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,22 +14,31 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased] +## [unreleased] ### Added - Added the ability to downsample to 10K or 2K resolution for freesurfer runs +- Added the ability to ingress TotalReadoutTime from epi field map meta-data from the JSON sidecars. +- Added the ability to use TotalReadoutTime of epi field maps in the calculation of FSL topup distortion correction. - Added ability to set minimum quality measure thresholds to all registration steps - Difference method (``-``) for ``CPAC.utils.configuration.Configuration`` instances ### Changed +- Added a level of depth to `working` directories to match `log` and `output` directory structure +- Renamed participant-pipeline-level `output` directory prefix to `pipeline_` to match `log` and `working` paths - Changed the 1mm atlases chosen in the rbc-options preconfig to the 2mm versions - For Nilearn-generated correlation matrices, diagonals are now set to all `1`s (were all `0`s) - Added ability to apply nusiance correction to template-space BOLD images - Removed ability to run single-step-resampling on motion-corrected BOLD data - Moved default pipeline config into directory with other preconfigs - Added crash messages from during and before graph building to logs +- Added data-config-specific hash string to C-PAC-generated config files +- Updated `rbc-options` preconfig to use `fmriprep-options` preprocessing ### Fixed +- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1795) that was causing `cpac run` to fail when passing a manual random seed via `--random_seed`. +- Replaces ``DwellTime`` with ``EffectiveEchoSpacing`` for FSL usage of the term +- Fixed an issue that was causing some epi field maps to not be ingressed if the BIDS tags were not in the correct order. ## [v1.8.4] - 2022-06-27 diff --git a/CPAC/alff/alff.py b/CPAC/alff/alff.py index 15a09c90f2..e0f692182c 100644 --- a/CPAC/alff/alff.py +++ b/CPAC/alff/alff.py @@ -246,8 +246,8 @@ def alff_falff(wf, cfg, strat_pool, pipe_num, opt=None): "switch": ["run"], "option_key": "None", "option_val": "None", - "inputs": [["desc-cleaned_bold", "desc-brain_bold", "desc-preproc_bold", - "bold"], + "inputs": [["desc-cleanedNofilt_bold", "desc-brain_bold", + "desc-preproc_bold", "bold"], "space-bold_desc-brain_mask"], "outputs": ["alff", "falff"]} @@ -262,7 +262,7 @@ def alff_falff(wf, cfg, strat_pool, pipe_num, opt=None): alff.get_node('hp_input').iterables = ('hp', alff.inputs.hp_input.hp) alff.get_node('lp_input').iterables = ('lp', alff.inputs.lp_input.lp) - node, out = strat_pool.get_data(["desc-cleaned_bold", "desc-brain_bold", + node, out = strat_pool.get_data(["desc-cleanedNofilt_bold", "desc-brain_bold", "desc-preproc_bold", "bold"]) wf.connect(node, out, alff, 'inputspec.rest_res') diff --git a/CPAC/cwas/cwas.py b/CPAC/cwas/cwas.py index 5264b07bd0..5513ed8cac 100644 --- a/CPAC/cwas/cwas.py +++ b/CPAC/cwas/cwas.py @@ -3,6 +3,9 @@ import nibabel as nb import numpy as np import pandas as pd +import scipy.stats +from scipy.stats import t +from numpy import inf from CPAC.cwas.mdmr import mdmr from CPAC.utils import correlation @@ -35,7 +38,6 @@ def joint_mask(subjects, mask_file=None): mask_file = os.path.join(os.getcwd(), 'joint_mask.nii.gz') create_merged_copefile(files, cope_file) create_merge_mask(cope_file, mask_file) - return mask_file @@ -48,7 +50,6 @@ def calc_mdmrs(D, regressor, cols, permutations): def calc_subdists(subjects_data, voxel_range): subjects, voxels, _ = subjects_data.shape D = np.zeros((len(voxel_range), subjects, subjects)) - for i, v in enumerate(voxel_range): profiles = np.zeros((subjects, voxels)) for si in range(subjects): @@ -67,6 +68,12 @@ def calc_cwas(subjects_data, regressor, regressor_selected_cols, permutations, v D, regressor, regressor_selected_cols, permutations) return F_set, p_set +def pval_to_zval(p_set, permu): + inv_pval = 1 - p_set + zvals = t.ppf(inv_pval, (len(p_set) - 1)) + zvals[zvals == -inf] = permu / (permu + 1) + zvals[zvals == inf] = permu / (permu + 1) + return zvals def nifti_cwas(subjects, mask_file, regressor_file, participant_column, columns_string, permutations, voxel_range): @@ -130,7 +137,7 @@ def nifti_cwas(subjects, mask_file, regressor_file, participant_column, for sub_id in subject_ids: if str(sub_id).lstrip('0') == str(pheno_sub_id): regressor_data.at[index, participant_column] = str(sub_id) - + regressor_data.index = regressor_data[participant_column] # Keep only data from specific subjects @@ -144,33 +151,27 @@ def nifti_cwas(subjects, mask_file, regressor_file, participant_column, if len(regressor_selected_cols) == 0: regressor_selected_cols = [i for i, c in enumerate(regressor_cols)] regressor_selected_cols = np.array(regressor_selected_cols) - # Remove participant id column from the dataframe and convert it to a numpy matrix regressor = ordered_regressor_data \ .drop(columns=[participant_column]) \ .reset_index(drop=True) \ .values \ .astype(np.float64) - if len(regressor.shape) == 1: regressor = regressor[:, np.newaxis] elif len(regressor.shape) != 2: raise ValueError('Bad regressor shape: %s' % str(regressor.shape)) - if len(subject_files) != regressor.shape[0]: raise ValueError('Number of subjects does not match regressor size') - - mask = nb.load(mask_file).get_data().astype('bool') + mask = nb.load(mask_file).get_fdata().astype('bool') mask_indices = np.where(mask) - subjects_data = np.array([ - nb.load(subject_file).get_data().astype('float64')[mask_indices].T + nb.load(subject_file).get_fdata().astype('float64')[mask_indices] for subject_file in subject_files ]) F_set, p_set = calc_cwas(subjects_data, regressor, regressor_selected_cols, permutations, voxel_range) - cwd = os.getcwd() F_file = os.path.join(cwd, 'pseudo_F.npy') p_file = os.path.join(cwd, 'significance_p.npy') @@ -182,7 +183,7 @@ def nifti_cwas(subjects, mask_file, regressor_file, participant_column, def create_cwas_batches(mask_file, batches): - mask = nb.load(mask_file).get_data().astype('bool') + mask = nb.load(mask_file).get_fdata().astype('bool') voxels = mask.sum(dtype=int) return np.array_split(np.arange(voxels), batches) @@ -198,7 +199,7 @@ def volumize(mask_image, data): ) -def merge_cwas_batches(cwas_batches, mask_file): +def merge_cwas_batches(cwas_batches, mask_file, z_score, permutations): _, _, voxel_range = zip(*cwas_batches) voxels = np.array(np.concatenate(voxel_range)) @@ -211,18 +212,37 @@ def merge_cwas_batches(cwas_batches, mask_file): p_set[voxel_range] = np.load(p_file) log_p_set = -np.log10(p_set) + one_p_set = 1 - p_set F_vol = volumize(mask_image, F_set) p_vol = volumize(mask_image, p_set) log_p_vol = volumize(mask_image, log_p_set) + one_p_vol = volumize(mask_image, one_p_set) cwd = os.getcwd() F_file = os.path.join(cwd, 'pseudo_F_volume.nii.gz') p_file = os.path.join(cwd, 'p_significance_volume.nii.gz') log_p_file = os.path.join(cwd, 'neglog_p_significance_volume.nii.gz') + one_p_file = os.path.join(cwd, 'one_minus_p_values.nii.gz') F_vol.to_filename(F_file) p_vol.to_filename(p_file) log_p_vol.to_filename(log_p_file) + one_p_vol.to_filename(one_p_file) + + if 1 in z_score: + zvals = pval_to_zval(p_set, permutations) + z_file = zstat_image(zvals, mask_file) + + return F_file, p_file, log_p_file, one_p_file, z_file + +def zstat_image(zvals, mask_file): + mask_image = nb.load(mask_file) - return F_file, p_file, log_p_file + z_vol = volumize(mask_image, zvals) + + cwd = os.getcwd() + z_file = os.path.join(cwd, 'zstat.nii.gz') + + z_vol.to_filename(z_file) + return z_file diff --git a/CPAC/cwas/mdmr.py b/CPAC/cwas/mdmr.py index 5aed0505ca..80524ff4d2 100644 --- a/CPAC/cwas/mdmr.py +++ b/CPAC/cwas/mdmr.py @@ -83,15 +83,15 @@ def mdmr(D, X, columns, permutations): Gs[:, di] = gower(D[di]).flatten() X1 = np.hstack((np.ones((subjects, 1)), X)) - columns = columns.copy() + 1 + columns = columns.copy() #removed a +1 regressors = X1.shape[1] - - permutation_indexes = np.zeros((permutations + 1, subjects), dtype=np.int) + + permutation_indexes = np.zeros((permutations, subjects), dtype=np.int) permutation_indexes[0, :] = range(subjects) - for i in range(1, permutations + 1): + for i in range(1, permutations): permutation_indexes[i,:] = np.random.permutation(subjects) - + H2perms = gen_h2_perms(X1, columns, permutation_indexes) IHperms = gen_ih_perms(X1, columns, permutation_indexes) diff --git a/CPAC/cwas/pipeline.py b/CPAC/cwas/pipeline.py index 401c7fa336..5dcab83340 100644 --- a/CPAC/cwas/pipeline.py +++ b/CPAC/cwas/pipeline.py @@ -2,6 +2,7 @@ import os from CPAC.pipeline import nipype_pipeline_engine as pe import nipype.interfaces.utility as util +from nipype import config from CPAC.utils.interfaces.function import Function @@ -11,6 +12,7 @@ create_cwas_batches, merge_cwas_batches, nifti_cwas, + zstat_image, ) @@ -54,6 +56,8 @@ def create_cwas(name='cwas', working_dir=None, crash_dir=None): Pseudo F values of CWAS outputspec.p_map : string (nifti file) Significance p values calculated from permutation tests + outputspec.z_map : string (nifti file) + Significance p values converted to z-scores CWAS Procedure: @@ -83,10 +87,9 @@ def create_cwas(name='cwas', working_dir=None, crash_dir=None): References ---------- - .. [1] Shehzad Z, Kelly C, Reiss PT, Emerson JW, McMahon K, Copland DA, Castellanos FX, Milham MP. An Analytic Framework for Connectome-Wide Association Studies. Under Review. + .. [1] Shehzad Z, Kelly C, Reiss PT, Cameron Craddock R, Emerson JW, McMahon K, Copland DA, Castellanos FX, Milham MP. A multivariate distance-based analytic framework for connectome-wide association studies. Neuroimage. 2014 Jun;93 Pt 1(0 1):74-94. doi: 10.1016/j.neuroimage.2014.02.024. Epub 2014 Feb 28. PMID: 24583255; PMCID: PMC4138049. """ - if not working_dir: working_dir = os.path.join(os.getcwd(), 'MDMR_work_dir') if not crash_dir: @@ -95,7 +98,8 @@ def create_cwas(name='cwas', working_dir=None, crash_dir=None): workflow = pe.Workflow(name=name) workflow.base_dir = working_dir workflow.config['execution'] = {'hash_method': 'timestamp', - 'crashdump_dir': os.path.abspath(crash_dir)} + 'crashdump_dir': os.path.abspath(crash_dir), + 'crashfile_format': 'txt'} inputspec = pe.Node(util.IdentityInterface(fields=['roi', 'subjects', @@ -103,12 +107,15 @@ def create_cwas(name='cwas', working_dir=None, crash_dir=None): 'participant_column', 'columns', 'permutations', - 'parallel_nodes']), + 'parallel_nodes', + 'z_score']), name='inputspec') outputspec = pe.Node(util.IdentityInterface(fields=['F_map', 'p_map', - 'neglog_p_map']), + 'neglog_p_map', + 'one_p_map', + 'z_map']), name='outputspec') ccb = pe.Node(Function(input_names=['mask_file', @@ -139,10 +146,14 @@ def create_cwas(name='cwas', working_dir=None, crash_dir=None): name='joint_mask') mcwasb = pe.Node(Function(input_names=['cwas_batches', - 'mask_file'], + 'mask_file', + 'z_score', + 'permutations'], output_names=['F_file', 'p_file', - 'neglog_p_file'], + 'neglog_p_file', + 'one_p_file', + 'z_file'], function=merge_cwas_batches, as_module=True), name='cwas_volumes') @@ -181,9 +192,15 @@ def create_cwas(name='cwas', working_dir=None, crash_dir=None): mcwasb, 'cwas_batches') workflow.connect(jmask, 'joint_mask', mcwasb, 'mask_file') + workflow.connect(inputspec, 'z_score', + mcwasb, 'z_score') + workflow.connect(inputspec, 'permutations', + mcwasb, 'permutations') workflow.connect(mcwasb, 'F_file', outputspec, 'F_map') workflow.connect(mcwasb, 'p_file', outputspec, 'p_map') workflow.connect(mcwasb, 'neglog_p_file', outputspec, 'neglog_p_map') + workflow.connect(mcwasb, 'one_p_file', outputspec, 'one_p_map') + workflow.connect(mcwasb, 'z_file', outputspec, 'z_map') return workflow diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index 33ff232b58..d47398438e 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -1,5 +1,21 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- +# Copyright (C) 2017-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import subprocess @@ -104,8 +120,8 @@ def distcor_phasediff_fsl_fugue(wf, cfg, strat_pool, pipe_num, opt=None): "inputs": ["diffphase", "diffmag", "deltaTE", - "diffphase-dwell", - "dwell-asym-ratio"], + "effectiveEchoSpacing", + "ees-asym-ratio"], "outputs": ["despiked-fieldmap", "fieldmap-mask"]} ''' @@ -209,10 +225,10 @@ def distcor_phasediff_fsl_fugue(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(fslmath_mask, 'out_file', fugue1, 'mask_file') - node, out = strat_pool.get_data('diffphase-dwell') + # FSL calls EffectiveEchoSpacing "dwell_time" + node, out = strat_pool.get_data('effectiveEchoSpacing') wf.connect(node, out, fugue1, 'dwell_time') - - node, out = strat_pool.get_data('dwell-asym-ratio') + node, out = strat_pool.get_data('ees-asym-ratio') wf.connect(node, out, fugue1, 'dwell_to_asym_ratio') wf.connect(prepare, 'out_fieldmap', fugue1, 'fmap_in_file') @@ -467,8 +483,8 @@ def distcor_blip_afni_qwarp(wf, cfg, strat_pool, pipe_num, opt=None): } return (wf, outputs) - - + + def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): '''Execute FSL TOPUP to calculate the distortion "unwarp" for phase encoding direction EPI field map distortion correction. @@ -479,18 +495,19 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "switch": ["run"], "option_key": "using", "option_val": "Blip-FSL-TOPUP", - "inputs": [(["desc-preproc_bold", "bold"], - "space-bold_desc-brain_mask"), + "inputs": [("desc-mean_bold", "space-bold_desc-brain_mask"), "pe-direction", "epi-1", "epi-1-pedir", "epi-1-TE", "epi-1-dwell", + "epi-1-total-readout", "epi-2", "epi-2-pedir", "epi-2-TE", - "epi-2-dwell"], - "outputs": ["desc-reginput_bold", + "epi-2-dwell", + "epi-2-total-readout"], + "outputs": ["desc-mean_bold", "space-bold_desc-brain_mask", "blip-warp"]} ''' @@ -531,12 +548,12 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): else: ''' - + create_list = pe.Node(interface=util.Merge(2), name="create_list") node, out = strat_pool.get_data('epi-1') wf.connect(node, out, create_list, 'in1') - + node, out = strat_pool.get_data('epi-2') wf.connect(node, out, create_list, 'in2') @@ -550,26 +567,28 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): Mask.inputs.operand_value = 0 Mask.inputs.operation = "mul" Mask.inputs.args = "-add 1" - + node, out = strat_pool.get_data('epi-1') wf.connect(node, out, Mask, 'in_file') - #zpad_phases = z_pad("zpad_phases") - #wf.connect(merge_image, "merged_file", zpad_phases, "inputspec.input_image") + # zpad_phases = z_pad("zpad_phases") + # wf.connect(merge_image, "merged_file", zpad_phases, "inputspec.input_image") - #zpad_mask = z_pad("zpad_mask") - #wf.connect(Mask, "out_file", zpad_mask, "inputspec.input_image") + # zpad_mask = z_pad("zpad_mask") + # wf.connect(Mask, "out_file", zpad_mask, "inputspec.input_image") # extrapolate existing values beyond the mask - extrap_vals = pe.Node(interface=fsl.maths.BinaryMaths(), + extrap_vals = pe.Node(interface=fsl.maths.BinaryMaths(), name="extrap_vals") extrap_vals.inputs.operation = "add" extrap_vals.inputs.operand_value = 1 extrap_vals.inputs.args = "-abs -dilM -dilM -dilM -dilM -dilM" - - #wf.connect(zpad_phases, "outputspec.output_image", extrap_vals, "in_file") - #wf.connect(zpad_mask, "outputspec.output_image", extrap_vals, "operand_file") - + + # wf.connect(zpad_phases, "outputspec.output_image", + # extrap_vals, "in_file") + # wf.connect(zpad_mask, "outputspec.output_image", + # extrap_vals, "operand_file") + wf.connect(merge_image, "merged_file", extrap_vals, "in_file") wf.connect(Mask, "out_file", extrap_vals, "operand_file") @@ -588,7 +607,9 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "phase_one", "phase_two", "dwell_time_one", - "dwell_time_two" + "dwell_time_two", + "ro_time_one", + "ro_time_two" ], output_names=["acq_params"], function=phase_encode, @@ -598,18 +619,30 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): ) node, out = strat_pool.get_data('epi-1') wf.connect(node, out, phase_encoding, 'phase_one') - + node, out = strat_pool.get_data('epi-2') wf.connect(node, out, phase_encoding, 'phase_two') - + node, out = strat_pool.get_data('pe-direction') wf.connect(node, out, phase_encoding, 'unwarp_dir') - - node, out = strat_pool.get_data('epi-1-dwell') - wf.connect(node, out, phase_encoding, 'dwell_time_one') - node, out = strat_pool.get_data('epi-2-dwell') - wf.connect(node, out, phase_encoding, 'dwell_time_two') + if strat_pool.check_rpool('epi-1-dwell') and \ + strat_pool.check_rpool('epi-2-dwell'): + + node, out = strat_pool.get_data('epi-1-dwell') + wf.connect(node, out, phase_encoding, 'dwell_time_one') + + node, out = strat_pool.get_data('epi-2-dwell') + wf.connect(node, out, phase_encoding, 'dwell_time_two') + + if strat_pool.check_rpool('epi-1-total-readout') and \ + strat_pool.check_rpool('epi-2-total-readout'): + + node, out = strat_pool.get_data('epi-1-total-readout') + wf.connect(node, out, phase_encoding, 'ro_time_one') + + node, out = strat_pool.get_data('epi-2-total-readout') + wf.connect(node, out, phase_encoding, 'ro_time_two') topup_imports = ["import os", "import subprocess"] @@ -631,24 +664,23 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(phase_encoding, "acq_params", run_topup, "acqparams") choose_phase = pe.Node( - util.Function( - input_names=["phase_imgs", + util.Function( + input_names=["phase_imgs", "unwarp_dir"], output_names=["out_phase_image", "vnum"], function=choose_phase_image - ), - name="choose_phase", + ), name="choose_phase", ) - + wf.connect(create_list, 'out', choose_phase, 'phase_imgs') node, out = strat_pool.get_data("pe-direction") wf.connect(node, out, choose_phase, "unwarp_dir") vnum_base = pe.Node( - util.Function( - input_names=["vnum", + util.Function( + input_names=["vnum", "motion_mat_list", "jac_matrix_list", "warp_field_list"], @@ -656,72 +688,69 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "out_jacobian", "out_warp_field"], function=find_vnum_base - ), - name="Motion_Jac_Warp_matrices", - ) + ), name="Motion_Jac_Warp_matrices", + ) wf.connect(choose_phase, 'vnum', vnum_base, 'vnum') wf.connect(run_topup, 'out_xfms', vnum_base, 'motion_mat_list') wf.connect(run_topup, 'out_jacs', vnum_base, 'jac_matrix_list') wf.connect(run_topup, 'out_warps', vnum_base, 'warp_field_list') - create_scout = pe.Node(interface=afni_utils.Calc(), - name="topupwf_create_scout") - create_scout.inputs.set( - expr='a', - single_idx=0, - outputtype='NIFTI_GZ' - ) + # create_scout = pe.Node(interface=afni_utils.Calc(), + # name="topupwf_create_scout") + # create_scout.inputs.set( + # expr='a', + # single_idx=0, + # outputtype='NIFTI_GZ' + # ) - node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) - wf.connect(node, out, create_scout, 'in_file_a') + mean_bold = strat_pool.node_data("desc-mean_bold") + # node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) + # wf.connect(node, out, create_scout, 'in_file_a') flirt = pe.Node(interface=fsl.FLIRT(), name="flirt") flirt.inputs.dof = 6 flirt.inputs.interp = 'spline' flirt.inputs.out_matrix_file = 'SBRef2PhaseTwo_gdc.mat' - wf.connect(create_scout, 'out_file', flirt, 'in_file') + wf.connect(mean_bold.node, mean_bold.out, flirt, 'in_file') wf.connect(choose_phase, 'out_phase_image', flirt, 'reference') - - #fsl_convert_xfm + + # fsl_convert_xfm convert_xfm = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") convert_xfm.inputs.concat_xfm = True convert_xfm.inputs.out_file = 'SBRef2WarpField.mat' - wf.connect(flirt, 'out_matrix_file', convert_xfm,'in_file') - wf.connect(vnum_base, 'out_motion_mat', convert_xfm,'in_file2') + wf.connect(flirt, 'out_matrix_file', convert_xfm, 'in_file') + wf.connect(vnum_base, 'out_motion_mat', convert_xfm, 'in_file2') - #fsl_convert_warp + # fsl_convert_warp convert_warp = pe.Node(interface=fsl.ConvertWarp(), - name = "convert_warp") + name="convert_warp") convert_warp.inputs.relwarp = True convert_warp.inputs.out_relwarp = True convert_warp.inputs.out_file = 'WarpField.nii.gz' wf.connect(choose_phase, 'out_phase_image', convert_warp, 'reference') wf.connect(vnum_base, 'out_warp_field', convert_warp, 'warp1') - wf.connect(convert_xfm, 'out_file' ,convert_warp, 'premat') - - out_convert_warp = (convert_warp,'out_file') + wf.connect(convert_xfm, 'out_file', convert_warp, 'premat') - VolumeNumber = 1+1 + VolumeNumber = 1 + 1 vnum = str(VolumeNumber).zfill(2) name = "PhaseTwo_aw" vnum_base_two = pe.Node( - util.Function( + util.Function( input_names=["vnum", "motion_mat_list", "jac_matrix_list", "warp_field_list"], - output_names=["out_motion_mat", - "out_jacobian", + output_names=["out_motion_mat", + "out_jacobian", "out_warp_field"], function=find_vnum_base - ), - name=f"Motion_Jac_Warp_matrices_{name}", - ) + ), name=f"Motion_Jac_Warp_matrices_{name}", + ) vnum_base_two.inputs.vnum = vnum wf.connect(run_topup, 'out_xfms', vnum_base_two, 'motion_mat_list') @@ -732,7 +761,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): aw_two = pe.Node(interface=fsl.ApplyWarp(), name="aw_two") aw_two.inputs.relwarp = True aw_two.inputs.interp = 'spline' - + node, out = strat_pool.get_data('epi-2') wf.connect(node, out, aw_two, 'in_file') wf.connect(node, out, aw_two, 'ref_file') @@ -746,14 +775,15 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(aw_two, 'out_file', mul_phase_two, 'in_file') wf.connect(vnum_base_two, 'out_jacobian', mul_phase_two, 'operand_file') - - # PhaseOne (first vol) - warp and Jacobian modulate to get distortion corrected output - VolumeNumber= 0 + 1 + + # PhaseOne (first vol) - warp and Jacobian modulate to get + # distortion corrected output + VolumeNumber = 0 + 1 vnum = str(VolumeNumber).zfill(2) name = "PhaseOne_aw" vnum_base_one = pe.Node( - util.Function( + util.Function( input_names=["vnum", "motion_mat_list", "jac_matrix_list", @@ -762,9 +792,8 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "out_jacobian", "out_warp_field"], function=find_vnum_base - ), - name=f"Motion_Jac_Warp_matrices_{name}", - ) + ), name=f"Motion_Jac_Warp_matrices_{name}", + ) vnum_base_one.inputs.vnum = vnum wf.connect(run_topup, 'out_xfms', vnum_base_one, 'motion_mat_list') @@ -772,7 +801,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(run_topup, 'out_warps', vnum_base_one, 'warp_field_list') # fsl_applywarp to phaseOne - aw_one = pe.Node(interface=fsl.ApplyWarp(),name = "aw_one") + aw_one = pe.Node(interface=fsl.ApplyWarp(), name="aw_one") aw_one.inputs.relwarp = True aw_one.inputs.interp = 'spline' @@ -783,7 +812,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(vnum_base_one, 'out_motion_mat', aw_one, 'premat') wf.connect(vnum_base_one, 'out_warp_field', aw_one, 'field_file') - mul_phase_one = pe.Node(interface = fsl.BinaryMaths(), name="mul_phase_one") + mul_phase_one = pe.Node(interface=fsl.BinaryMaths(), name="mul_phase_one") mul_phase_one.inputs.operation = 'mul' wf.connect(aw_one, 'out_file', mul_phase_one, 'in_file') @@ -794,19 +823,20 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): aw_jac.inputs.relwarp = True aw_jac.inputs.interp = 'spline' - wf.connect(create_scout, 'out_file', aw_jac, 'in_file') #SBRef.nii.gz - wf.connect(create_scout, 'out_file', aw_jac, 'ref_file') #SBRef.nii.gz + wf.connect(mean_bold.node, mean_bold.out, aw_jac, 'in_file') # SBRef.nii.gz + wf.connect(mean_bold.node, mean_bold.out, + aw_jac, 'ref_file') # SBRef.nii.gz wf.connect(convert_warp, 'out_file', aw_jac, 'field_file') - mul_jac = pe.Node(interface = fsl.BinaryMaths(),name = "mul_jac") + mul_jac = pe.Node(interface=fsl.BinaryMaths(), name="mul_jac") mul_jac.inputs.operation = 'mul' mul_jac.inputs.out_file = "SBRef_dc_jac.nii.gz" wf.connect(aw_jac, 'out_file', mul_jac, 'in_file') wf.connect(vnum_base, 'out_jacobian', mul_jac, 'operand_file') - #Calculate Equivalent Field Map - tp_field_map = pe.Node(interface = fsl.BinaryMaths(),name = "tp_field_map") + # Calculate Equivalent Field Map + tp_field_map = pe.Node(interface=fsl.BinaryMaths(), name="tp_field_map") tp_field_map.inputs.operation = 'mul' tp_field_map.inputs.operand_value = 6.283 @@ -819,18 +849,17 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(run_topup, 'corrected_outfile', mag_field_map, 'in_file') - #fsl_bet - bet = pe.Node(interface = fsl.BET(), name="bet") + # fsl_bet + bet = pe.Node(interface=fsl.BET(), name="bet") bet.inputs.frac = 0.35 bet.inputs.mask = True wf.connect(mag_field_map, 'out_file', bet, 'in_file') outputs = { - 'desc-reginput_bold': (mul_jac, 'out_file'), + 'desc-mean_bold': (mul_jac, 'out_file'), 'space-bold_desc-brain_mask': (bet, 'out_file'), 'blip-warp': (convert_warp, 'out_file') } return (wf, outputs) - diff --git a/CPAC/distortion_correction/utils.py b/CPAC/distortion_correction/utils.py index e617bd6eb4..0649cc40c0 100644 --- a/CPAC/distortion_correction/utils.py +++ b/CPAC/distortion_correction/utils.py @@ -140,11 +140,9 @@ def gradient_distortion_correction(wf, inp_image, name): return (wf, out_warpmask, out_applywarp) -def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one, - dwell_time_two): - """ - - Calculate readout time and populate parameter file +def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one=None, + dwell_time_two=None, ro_time_one=None, ro_time_two=None): + """Calculate readout time and populate parameter file Parameters __________ @@ -159,8 +157,10 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one, echo spacing of phase one dwell_time_two echo spacing of phase two - fsl_dir - FSL directory + ro_time_one + total readout time of phase one + ro_time_two + total readout time of phase two Returns _______ @@ -170,6 +170,13 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one, """ + meta_data = [dwell_time_one, dwell_time_two, + ro_time_one, ro_time_two] + if not any(meta_data): + raise Exception("\n[!] Blip-FSL-TOPUP workflow: neither " + "TotalReadoutTime nor DwellTime is present in the " + "epi field map meta-data.") + # create text file acq_params = os.path.join(os.getcwd(), "acqparams.txt") @@ -177,22 +184,30 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one, unwarp_dir = unwarp_dir.decode() if unwarp_dir in ["x", "x-", "-x","i","-i","i-"]: - dim = nibabel.load(phase_one).shape[0] - n_PE_steps = dim - 1 - ro_time_one = np.round(dwell_time_one * n_PE_steps, 6) - ro_time_two = np.round(dwell_time_two * n_PE_steps, 6) - ro_times = [f"-1 0 0 {ro_time_one}", f"1 0 0 {ro_time_two}"] + if dwell_time_one and dwell_time_two: + dim = nibabel.load(phase_one).shape[0] + n_PE_steps = dim - 1 + ro_time_one = np.round(dwell_time_one * n_PE_steps, 6) + ro_time_two = np.round(dwell_time_two * n_PE_steps, 6) + elif ro_time_one and ro_time_two: + ro_times = [f"-1 0 0 {ro_time_one}", f"1 0 0 {ro_time_two}"] + else: + raise Exception("[!] No dwell time or total readout time " + "present for the acq-fMRI EPI field maps.") elif unwarp_dir in ["y", "y-", "-y","j","-j","j-"]: - dim = nibabel.load(phase_one).shape[1] - n_PE_steps = dim - 1 - ro_time_one = np.round(dwell_time_one * n_PE_steps, 6) - ro_time_two = np.round(dwell_time_two * n_PE_steps, 6) - ro_times = [f"0 -1 0 {ro_time_one}", f"0 1 0 {ro_time_two}"] + if dwell_time_one and dwell_time_two: + dim = nibabel.load(phase_one).shape[1] + n_PE_steps = dim - 1 + ro_time_one = np.round(dwell_time_one * n_PE_steps, 6) + ro_time_two = np.round(dwell_time_two * n_PE_steps, 6) + elif ro_time_one and ro_time_two: + ro_times = [f"0 -1 0 {ro_time_one}", f"0 1 0 {ro_time_two}"] + else: + raise Exception("[!] No dwell time or total readout time " + "present for the acq-fMRI EPI field maps.") else: raise Exception(f"unwarp_dir={unwarp_dir} is unsupported.") - - # get number of volumes dims = [ int(subprocess.check_output([f"fslval", phase_one, "dim4"]).decode(sys.stdout.encoding)), diff --git a/CPAC/func_preproc/func_ingress.py b/CPAC/func_preproc/func_ingress.py index a7c46824c1..d34758ff58 100644 --- a/CPAC/func_preproc/func_ingress.py +++ b/CPAC/func_preproc/func_ingress.py @@ -1,26 +1,28 @@ -from nipype import logging -logger = logging.getLogger('nipype.workflow') +# Copyright (C) 2020-2022 C-PAC Developers -from CPAC.pipeline import nipype_pipeline_engine as pe +# This file is part of C-PAC. -import nipype.interfaces.afni as afni +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -from CPAC.utils.interfaces.function import Function -from CPAC.utils.utils import ( - get_scan_params -) +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +from nipype import logging from CPAC.utils.datasource import ( create_func_datasource, - create_fmap_datasource, - get_fmap_phasediff_metadata, - calc_deltaTE_and_asym_ratio -) + ingress_func_metadata) +logger = logging.getLogger('nipype.workflow') def connect_func_ingress(workflow, strat_list, c, sub_dict, subject_id, input_creds_path, unique_id=None): - for num_strat, strat in enumerate(strat_list): if 'func' in sub_dict: @@ -49,168 +51,8 @@ def connect_func_ingress(workflow, strat_list, c, sub_dict, subject_id, 'scan': (func_wf, 'outputspec.scan') }) - # Grab field maps - diff = False - blip = False - fmap_rp_list = [] - fmap_TE_list = [] - - if "fmap" in sub_dict: - for key in sub_dict["fmap"]: - - gather_fmap = create_fmap_datasource(sub_dict["fmap"], - "fmap_gather_" - "{0}".format(key)) - gather_fmap.inputs.inputnode.set( - subject=subject_id, - creds_path=input_creds_path, - dl_dir=c.pipeline_setup['working_directory']['path'] - ) - gather_fmap.inputs.inputnode.scan = key - strat.update_resource_pool({ - key: (gather_fmap, 'outputspec.rest'), - "{0}_scan_params".format(key): (gather_fmap, - 'outputspec.scan_params') - }) - - fmap_rp_list.append(key) - - if key == "diff_phase" or key == "diff_mag_one" or \ - key == "diff_mag_two": - diff = True - - get_fmap_metadata_imports = ['import json'] - get_fmap_metadata = pe.Node(Function( - input_names=['data_config_scan_params'], - output_names=['echo_time', - 'dwell_time', - 'pe_direction'], - function=get_fmap_phasediff_metadata, - imports=get_fmap_metadata_imports), - name='{0}_get_metadata_{1}'.format(key, - num_strat)) - - node, out_file = strat["{}_scan_params".format(key)] - workflow.connect(node, out_file, get_fmap_metadata, - 'data_config_scan_params') - - strat.update_resource_pool({ - "{}_TE".format(key): (get_fmap_metadata, - 'echo_time'), - "{}_dwell".format(key): (get_fmap_metadata, - 'dwell_time'), - "{}_pedir".format(key): (get_fmap_metadata, - 'pe_direction') - }) - fmap_TE_list.append("{}_TE".format(key)) - - if "epi_" in key: - blip = True - - if diff: - calc_delta_ratio = pe.Node(Function( - input_names=['dwell_time', - 'echo_time_one', - 'echo_time_two', - 'echo_time_three'], - output_names=['deltaTE', - 'dwell_asym_ratio'], - function=calc_deltaTE_and_asym_ratio), - name='diff_distcor_calc_delta_{}'.format(num_strat)) - - node, out_file = strat['diff_phase_dwell'] - workflow.connect(node, out_file, calc_delta_ratio, - 'dwell_time') - - node, out_file = strat[fmap_TE_list[0]] - workflow.connect(node, out_file, calc_delta_ratio, - 'echo_time_one') - - node, out_file = strat[fmap_TE_list[1]] - workflow.connect(node, out_file, calc_delta_ratio, - 'echo_time_two') - - if len(fmap_TE_list) > 2: - node, out_file = strat[fmap_TE_list[2]] - workflow.connect(node, out_file, calc_delta_ratio, - 'echo_time_three') - - strat.update_resource_pool({ - 'deltaTE': (calc_delta_ratio, 'deltaTE'), - 'dwell_asym_ratio': (calc_delta_ratio, - 'dwell_asym_ratio') - }) - - # Add in nodes to get parameters from configuration file - # a node which checks if scan_parameters are present for each scan - if unique_id is None: - workflow_name=f'scan_params_{num_strat}' - else: - workflow_name=f'scan_params_{unique_id}_{num_strat}' - - scan_params = \ - pe.Node(Function( - input_names=['data_config_scan_params', - 'subject_id', - 'scan', - 'pipeconfig_tr', - 'pipeconfig_tpattern', - 'pipeconfig_start_indx', - 'pipeconfig_stop_indx'], - output_names=['tr', - 'tpattern', - 'ref_slice', - 'start_indx', - 'stop_indx', - 'pe_direction'], - function=get_scan_params, - as_module=True - ), name=workflow_name) - - if "Selected Functional Volume" in c.functional_registration['1-coregistration']['func_input_prep']['input']: - get_func_volume = pe.Node(interface=afni.Calc(), - name='get_func_volume_{0}'.format( - num_strat)) - - get_func_volume.inputs.set( - expr='a', - single_idx=c.functional_registration['1-coregistration']['func_input_prep']['Selected Functional Volume']['func_reg_input_volume'], - outputtype='NIFTI_GZ' - ) - workflow.connect(func_wf, 'outputspec.rest', - get_func_volume, 'in_file_a') - - # wire in the scan parameter workflow - workflow.connect(func_wf, 'outputspec.scan_params', - scan_params, 'data_config_scan_params') - - workflow.connect(func_wf, 'outputspec.subject', - scan_params, 'subject_id') - - workflow.connect(func_wf, 'outputspec.scan', - scan_params, 'scan') - - # connect in constants - scan_params.inputs.set( - pipeconfig_start_indx=c.functional_preproc['truncation']['start_tr'], - pipeconfig_stop_indx=c.functional_preproc['truncation']['stop_tr'] - ) - - strat.update_resource_pool({ - 'raw_functional': (func_wf, 'outputspec.rest'), - 'scan_id': (func_wf, 'outputspec.scan'), - 'tr': (scan_params, 'tr'), - 'tpattern': (scan_params, 'tpattern'), - 'start_idx': (scan_params, 'start_indx'), - 'stop_idx': (scan_params, 'stop_indx'), - 'pe_direction': (scan_params, 'pe_direction'), - }) - - strat.set_leaf_properties(func_wf, 'outputspec.rest') - - if "Selected Functional Volume" in c.functional_registration['1-coregistration']['func_input_prep']['input']: - strat.update_resource_pool({ - 'selected_func_volume': (get_func_volume, 'out_file') - }) + (workflow, strat.rpool, diff, blip, fmap_rp_list + ) = ingress_func_metadata(workflow, c, strat.rpool, sub_dict, + subject_id, input_creds_path, unique_id) return (workflow, diff, blip, fmap_rp_list) diff --git a/CPAC/image_utils/__init__.py b/CPAC/image_utils/__init__.py index 3bb29507ac..6d79f91bd8 100644 --- a/CPAC/image_utils/__init__.py +++ b/CPAC/image_utils/__init__.py @@ -1,2 +1,22 @@ -from .spatial_smoothing import * -from .statistical_transforms import * +# Copyright (C) 2018-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +from .spatial_smoothing import set_gauss, spatial_smoothing +from .statistical_transforms import calc_avg, fisher_z_score_standardize, \ + z_score_standardize + +__all__ = ['calc_avg', 'fisher_z_score_standardize', 'set_gauss', + 'spatial_smoothing', 'z_score_standardize'] diff --git a/CPAC/image_utils/spatial_smoothing.py b/CPAC/image_utils/spatial_smoothing.py index de5aa81fe3..f14dff8d87 100644 --- a/CPAC/image_utils/spatial_smoothing.py +++ b/CPAC/image_utils/spatial_smoothing.py @@ -1,8 +1,22 @@ -import nipype.interfaces.fsl as fsl +# Copyright (C) 2018-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +from nipype.interfaces import fsl, utility as util from nipype.interfaces.afni import preprocess as afni from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.utility as util -from CPAC.utils import Outputs def set_gauss(fwhm): @@ -64,17 +78,17 @@ def spatial_smoothing(wf_name, fwhm, input_image_type='func_derivative', elif opt == 'AFNI': if input_image_type == 'func_derivative_multi': - output_smooth = pe.MapNode(interface= afni.BlurToFWHM(), + output_smooth = pe.MapNode(interface=afni.BlurToFWHM(), name='smooth_multi', iterfield=['in_file']) else: - output_smooth = pe.Node(interface= afni.BlurToFWHM(), + output_smooth = pe.Node(interface=afni.BlurToFWHM(), name='smooth', iterfield=['in_file']) output_smooth.inputs.outputtype = 'NIFTI_GZ' - if opt =='FSL': - # wire in the resource to be smoothed + if opt == 'FSL': + # wire in the resource to be smoothed wf.connect(inputnode, 'in_file', output_smooth, 'in_file') # get the parameters for fwhm wf.connect(inputnode_fwhm, ('fwhm', set_gauss), diff --git a/CPAC/image_utils/statistical_transforms.py b/CPAC/image_utils/statistical_transforms.py index 490fe593e4..c3b989931c 100644 --- a/CPAC/image_utils/statistical_transforms.py +++ b/CPAC/image_utils/statistical_transforms.py @@ -1,7 +1,22 @@ -from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.utility as util -from nipype.interfaces.afni import preprocess +# Copyright (C) 2018-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +from nipype.interfaces import utility as util +from nipype.interfaces.afni import preprocess +from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils import function from CPAC.utils.utils import ( extract_output_mean, diff --git a/CPAC/info.py b/CPAC/info.py index 8e43b41c4a..e3c966a850 100644 --- a/CPAC/info.py +++ b/CPAC/info.py @@ -185,6 +185,7 @@ def get_cpac_gitversion(): "nose==1.3.7", "numpy==1.16.4", "pandas==0.23.4", + "pathvalidate==2.5.2", "patsy==0.5.0", "prov==1.5.2", "psutil==5.4.6", diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index 08f399c4ef..a7ee45c210 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -1,4 +1,20 @@ # -*- coding: utf-8 -*- +# Copyright (C) 2020-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import copy import time @@ -44,14 +60,12 @@ subject_specific_template ) -from CPAC.utils import Strategy, find_files, function, Outputs -from CPAC.utils.utils import check_prov_for_regtool - +from CPAC.utils import find_files, function +from CPAC.utils.outputs import Outputs +from CPAC.utils.strategy import Strategy from CPAC.utils.utils import ( check_config_resources, - check_system_deps, - get_scan_params, - get_tr + check_prov_for_regtool ) logger = logging.getLogger('nipype.workflow') @@ -446,7 +460,7 @@ def anat_longitudinal_wf(subject_id, sub_list, config): workflow.run() - cpac_dir = os.path.join(out_dir, f'cpac_{orig_pipe_name}', + cpac_dir = os.path.join(out_dir, f'pipeline_{orig_pipe_name}', f'{subject_id}_{unique_id}') cpac_dirs.append(os.path.join(cpac_dir, 'anat')) diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index 82e75969bd..6699358cd5 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -14,7 +14,6 @@ from nipype.interfaces import afni from nipype.interfaces.afni import utils as afni_utils from scipy.fftpack import fft, ifft -from CPAC import utils from CPAC.utils.interfaces.function import Function from CPAC.utils.interfaces.masktool import MaskTool from CPAC.utils.interfaces.pc import PC @@ -2453,7 +2452,8 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt, space): f'_{space}_{opt["Name"]}' f'_{pipe_num}') - desc_keys = ('desc-preproc_bold', 'desc-cleaned_bold') + desc_keys = ('desc-preproc_bold', 'desc-cleaned_bold', + 'desc-denoisedNofilt_bold') if space != 'native': desc_keys = tuple(f'space-{space}_{key}' for key in desc_keys) @@ -2507,6 +2507,7 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt, space): outputs = { desc_keys[0]: (filt, 'outputspec.residual_file_path'), desc_keys[1]: (filt, 'outputspec.residual_file_path'), + desc_keys[2]: (nuis, 'outputspec.residual_file_path'), 'regressors': (filt, 'outputspec.residual_regressor') } @@ -2531,6 +2532,7 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt, space): outputs = { desc_keys[0]: (nuis, 'outputspec.residual_file_path'), desc_keys[1]: (nuis, 'outputspec.residual_file_path'), + desc_keys[2]: (nuis, 'outputspec.residual_file_path') } return (wf, outputs) @@ -2553,10 +2555,13 @@ def nuisance_regression_native(wf, cfg, strat_pool, pipe_num, opt=None): "dvars"), "TR"], "outputs": {"desc-preproc_bold": { - "Description": "Preprocessed BOLD image that was nusiance-" + "Description": "Preprocessed BOLD image that was nuisance-" "regressed in native space"}, "desc-cleaned_bold": { - "Description": "Preprocessed BOLD image that was nusiance-" + "Description": "Preprocessed BOLD image that was nuisance-" + "regressed in native space"}, + "desc-denoisedNofilt_bold": { + "Description": "Preprocessed BOLD image that was nuisance-" "regressed in native space"}, "regressors": { "Description": "Regressors that were applied in native space"}}} @@ -2582,10 +2587,13 @@ def nuisance_regression_template(wf, cfg, strat_pool, pipe_num, opt=None): "dvars"), "TR"], "outputs": {"space-template_desc-preproc_bold": { - "Description": "Preprocessed BOLD image that was nusiance-" + "Description": "Preprocessed BOLD image that was nuisance-" "regressed in template space"}, "space-template_desc-cleaned_bold": { - "Description": "Preprocessed BOLD image that was nusiance-" + "Description": "Preprocessed BOLD image that was nuisance-" + "regressed in template space"}, + "space-template_desc-denoisedNofilt_bold": { + "Description": "Preprocessed BOLD image that was nuisance-" "regressed in template space"}, "regressors": { "Description": "Regressors that were applied in template space"}}} diff --git a/CPAC/pipeline/__init__.py b/CPAC/pipeline/__init__.py index 7fde15df3e..34d05e7728 100644 --- a/CPAC/pipeline/__init__.py +++ b/CPAC/pipeline/__init__.py @@ -28,4 +28,5 @@ if preconfig != 'benchmark-ANTS' and not preconfig.startswith('regtest-')] + __all__ = ['ALL_PIPELINE_CONFIGS', 'AVAILABLE_PIPELINE_CONFIGS'] diff --git a/CPAC/pipeline/check_outputs.py b/CPAC/pipeline/check_outputs.py index d35f5a30cc..380fc5783b 100644 --- a/CPAC/pipeline/check_outputs.py +++ b/CPAC/pipeline/check_outputs.py @@ -48,7 +48,7 @@ def check_outputs(output_dir, log_dir, pipe_name, unique_id): """ outputs_logger = getLogger(f'{unique_id}_expectedOutputs') missing_outputs = ExpectedOutputs() - container = os.path.join(f'cpac_{pipe_name}', unique_id) + container = os.path.join(f'pipeline_{pipe_name}', unique_id) if ( isinstance(outputs_logger, (Logger, MockLogger)) and len(outputs_logger.handlers) diff --git a/CPAC/pipeline/cpac_group_runner.py b/CPAC/pipeline/cpac_group_runner.py index 3e8b4082a2..5c27f0141f 100644 --- a/CPAC/pipeline/cpac_group_runner.py +++ b/CPAC/pipeline/cpac_group_runner.py @@ -16,7 +16,6 @@ License along with C-PAC. If not, see .""" import os import fnmatch -import pandas from CPAC.pipeline.nipype_pipeline_engine.plugins import MultiProcPlugin from CPAC.utils.monitoring import log_nodes_cb @@ -34,14 +33,13 @@ def load_config_yml(config_file, individual=False): import os import yaml import yamlordereddictloader - from CPAC.utils import Configuration try: config_path = os.path.realpath(config_file) config_dict = yaml.safe_load(open(config_path, 'r')) - config = Configuration(config_dict) + config = config_dict except Exception as e: err = "\n\n[!] CPAC says: Could not load or read the configuration " \ @@ -49,10 +47,10 @@ def load_config_yml(config_file, individual=False): raise Exception(err) if individual: - config.pipeline_setup['log_directory']['path'] = os.path.abspath(config.pipeline_setup['log_directory']['path']) - config.pipeline_setup['working_directory']['path'] = os.path.abspath(config.pipeline_setup['working_directory']['path']) - config.pipeline_setup['output_directory']['path'] = os.path.abspath(config.pipeline_setup['output_directory']['path']) - config.pipeline_setup['crash_log_directory']['path'] = os.path.abspath(config.pipeline_setup['crash_log_directory']['path']) + config.logDirectory = os.path.abspath(config["pipeline_setup"]["log_directory"]["path"]) + config.workingDirectory = os.path.abspath(config["pipeline_setup"]["working_directory"]["path"]) + config.outputDirectory = os.path.abspath(config["pipeline_setup"]["output_directory"]["output_path"]) + config.crashLogDirectory = os.path.abspath(config["pipeline_setup"]["crash_log_directory"]["path"]) return config @@ -133,29 +131,26 @@ def gather_nifti_globs(pipeline_output_folder, resource_list, import glob import pandas as pd import pkg_resources as p - from __builtin__ import any as b_any - ext = ".nii" + exts = ".nii" nifti_globs = [] - keys_csv = p.resource_filename('CPAC', 'resources/cpac_outputs.csv') + keys_tsv = p.resource_filename('CPAC', 'resources/cpac_outputs.tsv') try: - keys = pd.read_csv(keys_csv) + keys = pd.read_csv(keys_tsv, delimiter='\t') except Exception as e: - err = "\n[!] Could not access or read the cpac_outputs.csv " \ - "resource file:\n{0}\n\nError details {1}\n".format(keys_csv, e) + err = "\n[!] Could not access or read the cpac_outputs.tsv " \ + "resource file:\n{0}\n\nError details {1}\n".format(keys_tsv, e) raise Exception(err) derivative_list = list( - keys[keys['Derivative'] == 'yes'][keys['Space'] == 'template'][ - keys['Values'] == 'z-score']['Resource']) + keys[keys['Sub-Directory'] == 'func']['Resource']) derivative_list = derivative_list + list( - keys[keys['Derivative'] == 'yes'][keys['Space'] == 'template'][ - keys['Values'] == 'z-stat']['Resource']) + keys[keys['Sub-Directory'] == 'anat']['Resource']) if pull_func: derivative_list = derivative_list + list( - keys[keys['Functional timeseries'] == 'yes']['Resource']) + keys[keys['Space'] == 'functional']['Resource']) if len(resource_list) == 0: err = "\n\n[!] No derivatives selected!\n\n" @@ -176,25 +171,21 @@ def gather_nifti_globs(pipeline_output_folder, resource_list, dirs_to_grab.append(derivative_name) # grab MeanFD_Jenkinson just in case - dirs_to_grab.append("power_params") + dirs_to_grab.append("framewise-displacement-jenkinson") for resource_name in dirs_to_grab: - glob_string = os.path.join(pipeline_output_folder, "*", - resource_name, "*", "*") - + glob_string = os.path.join(pipeline_output_folder, "*", "*", + f"*{resource_name}*") # get all glob strings that result in a list of paths where every path # ends with a NIFTI file - prog_string = ".." - while len(glob.glob(glob_string)) != 0: - if b_any(ext in x for x in glob.glob(glob_string)) == True: + if any(exts in x for x in glob.glob(glob_string)) == True: nifti_globs.append(glob_string) glob_string = os.path.join(glob_string, "*") prog_string = prog_string + "." - print(prog_string) if len(nifti_globs) == 0: err = "\n\n[!] No output filepaths found in the pipeline output " \ @@ -339,23 +330,22 @@ def create_output_dict_list(nifti_globs, pipeline_output_folder, if derivatives is None: - keys_csv = p.resource_filename('CPAC', 'resources/cpac_outputs.csv') + keys_tsv = p.resource_filename('CPAC', 'resources/cpac_outputs.tsv') try: - keys = pd.read_csv(keys_csv) + keys = pd.read_csv(keys_tsv,delimiter='\t') except Exception as e: err = "\n[!] Could not access or read the cpac_outputs.csv " \ - "resource file:\n{0}\n\nError details {1}\n".format(keys_csv, e) + "resource file:\n{0}\n\nError details {1}\n".format(keys_tsv, e) raise Exception(err) derivatives = list( - keys[keys['Derivative'] == 'yes'][keys['Space'] == 'template'][ - keys['Values'] == 'z-score']['Resource']) + keys[keys['Sub-Directory'] == 'func']['Resource']) derivatives = derivatives + list( - keys[keys['Derivative'] == 'yes'][keys['Space'] == 'template'][ - keys['Values'] == 'z-stat']['Resource']) + keys[keys['Sub-Directory'] == 'anat']['Resource']) if pull_func: - derivatives = derivatives + list(keys[keys['Functional timeseries'] == 'yes']['Resource']) + derivatives = derivatives + list( + keys[keys['Space'] == 'functional']['Resource']) # remove any extra /'s pipeline_output_folder = pipeline_output_folder.rstrip("/") @@ -381,7 +371,7 @@ def create_output_dict_list(nifti_globs, pipeline_output_folder, ''' # grab MeanFD_Jenkinson just in case - search_dirs += ["power_params"] + search_dirs += ["framewise-displacement-jenkinson"] exts = ['.' + ext.lstrip('.') for ext in exts] @@ -392,23 +382,21 @@ def create_output_dict_list(nifti_globs, pipeline_output_folder, for filename in files: filepath = os.path.join(root, filename) - if not any(fnmatch.fnmatch(filepath, pattern) for pattern in nifti_globs): continue if not any(filepath.endswith(ext) for ext in exts): continue - relative_filepath = filepath.split(pipeline_output_folder)[1] filepath_pieces = [_f for _f in relative_filepath.split("/") if _f] - resource_id = filepath_pieces[1] + resource_id = '_'.join(filepath_pieces[2].split(".")[0].split("_")[3:]) if resource_id not in search_dirs: continue - series_id_string = filepath_pieces[2] - strat_info = "_".join(filepath_pieces[3:])[:-len(ext)] + series_id_string = filepath_pieces[2].split("_")[1] + strat_info = "_".join(filepath_pieces[2].split("_")[2:3]) unique_resource_id = (resource_id, strat_info) @@ -429,7 +417,7 @@ def create_output_dict_list(nifti_globs, pipeline_output_folder, new_row_dict["Filepath"] = filepath print('{0} - {1} - {2}'.format( - unique_id, + unique_id.split("_")[0], series_id, resource_id )) @@ -525,14 +513,12 @@ def pheno_sessions_to_repeated_measures(pheno_df, sessions_list): More info: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FEAT/ UserGuide#Paired_Two-Group_Difference_.28Two-Sample_Paired_T-Test.29 - Sample input: pheno_df sub01 sub02 sessions_list [ses01, ses02] - Expected output: pheno_df Sessions participant_sub01 participant_sub02 sub01 ses01 1 0 @@ -718,12 +704,12 @@ def prep_feat_inputs(group_config_file): import pandas as pd import pkg_resources as p - keys_csv = p.resource_filename('CPAC', 'resources/cpac_outputs.csv') + keys_tsv = p.resource_filename('CPAC', 'resources/cpac_outputs.tsv') try: - keys = pd.read_csv(keys_csv) + keys = pd.read_csv(keys_tsv, delimiter='\t') except Exception as e: - err = "\n[!] Could not access or read the cpac_outputs.csv " \ - "resource file:\n{0}\n\nError details {1}\n".format(keys_csv, e) + err = "\n[!] Could not access or read the cpac_outputs.tsv " \ + "resource file:\n{0}\n\nError details {1}\n".format(keys_tsv, e) raise Exception(err) derivatives = list(keys[keys['Derivative'] == 'yes'][keys['Space'] == 'template'][keys['Values'] == 'z-score']['Resource']) @@ -1113,9 +1099,9 @@ def run_feat(group_config_file, feat=True): # get group pipeline config loaded c = load_config_yml(group_config_file) - pipeline_dir = c.pipeline_dir - model_name = c.model_name - out_dir = c.output_dir + pipeline_dir = c["pipeline_setup"]["output_directory"]["source_outputs_path"] + model_name = c["fsl_feat"]["model_name"] + out_dir = c["pipeline_setup"]["output_directory"]["output_path"] pipeline_name = pipeline_dir.rstrip('/').split('/')[-1] @@ -1178,7 +1164,7 @@ def run_feat(group_config_file, feat=True): models[id_tuple]['dir_path'].replace(out_dir, '').lstrip('/')) work_dir = work_dir.replace('cpac_group_analysis', 'cpac_group_analysis_workdir') work_dir = work_dir.replace('model_files/', '') - log_dir = os.path.join(c.log_dir, + log_dir = os.path.join(c["pipeline_setup"]["log_directory"]["path"], models[id_tuple]['dir_path'].replace(out_dir, '').lstrip('/')) log_dir = log_dir.replace('cpac_group_analysis', 'cpac_group_analysis_logdir') log_dir = log_dir.replace('model_files/', '') @@ -1204,9 +1190,9 @@ def run_feat(group_config_file, feat=True): design_matrix.columns, None, None, custom_contrasts_csv, - None, c.group_sep, + None, c["fsl_feat"]["group_sep"], grp_vector, - c.coding_scheme, + c["fsl_feat"]["coding_scheme"], model_name, id_tuple[0], input_files_dir) @@ -1245,17 +1231,18 @@ def run_feat(group_config_file, feat=True): f_test, mat, con, grp, model_out_dir, work_dir, log_dir, model_name, fts))) - manage_processes(procss, out_dir, c.num_models_at_once) + manage_processes(procss, out_dir, c["fsl_feat"]["num_models_at_once"]) def run_cwas_group(pipeline_dir, out_dir, working_dir, crash_dir, roi_file, regressor_file, participant_column, columns, - permutations, parallel_nodes, inclusion=None): + permutations, parallel_nodes, plugin_args, z_score, inclusion=None): import os import numpy as np from multiprocessing import pool from CPAC.cwas.pipeline import create_cwas + from nipype import config pipeline_dir = os.path.abspath(pipeline_dir) @@ -1269,12 +1256,13 @@ def run_cwas_group(pipeline_dir, out_dir, working_dir, crash_dir, roi_file, os.path.basename(pipeline_dir)) inclusion_list = None + if inclusion: inclusion_list = load_text_file(inclusion, "MDMR participant " "inclusion list") output_df_dct = gather_outputs(pipeline_dir, - ["functional_to_standard"], + ['space-template_desc-preproc_bold'], inclusion_list, False, False, get_func=True) @@ -1293,7 +1281,6 @@ def run_cwas_group(pipeline_dir, out_dir, working_dir, crash_dir, roi_file, df_dct[strat_scan] = strat_df[strat_df["Series"] == strat_scan] else: df_dct[list(set(strat_df["Series"]))[0]] = strat_df - for df_scan in df_dct.keys(): func_paths = { p.split("_")[0]: f @@ -1303,7 +1290,12 @@ def run_cwas_group(pipeline_dir, out_dir, working_dir, crash_dir, roi_file, df_dct[df_scan].Filepath ) } - + + if plugin_args['n_procs'] == 1: + plugin = 'Linear' + else: + plugin = 'MultiProc' + cwas_wf = create_cwas(name="MDMR_{0}".format(df_scan), working_dir=working_dir, crash_dir=crash_dir) @@ -1314,7 +1306,8 @@ def run_cwas_group(pipeline_dir, out_dir, working_dir, crash_dir, roi_file, cwas_wf.inputs.inputspec.columns = columns cwas_wf.inputs.inputspec.permutations = permutations cwas_wf.inputs.inputspec.parallel_nodes = parallel_nodes - cwas_wf.run() + cwas_wf.inputs.inputspec.z_score = z_score + cwas_wf.run(plugin=plugin, plugin_args=plugin_args) def run_cwas(pipeline_config): @@ -1325,37 +1318,44 @@ def run_cwas(pipeline_config): pipeline_config = os.path.abspath(pipeline_config) pipeconfig_dct = yaml.safe_load(open(pipeline_config, 'r')) - - pipeline = pipeconfig_dct["pipeline_dir"] - output_dir = pipeconfig_dct["output_dir"] - working_dir = pipeconfig_dct["work_dir"] - crash_dir = pipeconfig_dct["log_dir"] - - roi_file = pipeconfig_dct["mdmr_roi_file"] - regressor_file = pipeconfig_dct["mdmr_regressor_file"] - participant_column = pipeconfig_dct["mdmr_regressor_participant_column"] - columns = pipeconfig_dct["mdmr_regressor_columns"] - permutations = pipeconfig_dct["mdmr_permutations"] - parallel_nodes = pipeconfig_dct["mdmr_parallel_nodes"] - inclusion = pipeconfig_dct["participant_list"] + + num_cpus = pipeconfig_dct["pipeline_setup"]["system_config"]["num_cpus"] + mem_gb = pipeconfig_dct["pipeline_setup"]["system_config"]["num_memory"] + + plugin_args = {'n_procs' : num_cpus, 'memory_gb' : mem_gb} + + pipeline = pipeconfig_dct["pipeline_setup"]["output_directory"]["source_outputs_path"] + output_dir = pipeconfig_dct["pipeline_setup"]["output_directory"]["output_path"] + working_dir = pipeconfig_dct["pipeline_setup"]["working_directory"]["path"] + crash_dir = pipeconfig_dct["pipeline_setup"]["log_directory"]["path"] + + roi_file = pipeconfig_dct["mdmr"]["roi_file"] + regressor_file = pipeconfig_dct["mdmr"]["regressor_file"] + participant_column = pipeconfig_dct["mdmr"]["regressor_participant_column"] + columns = pipeconfig_dct["mdmr"]["regressor_columns"] + permutations = pipeconfig_dct["mdmr"]["permutations"] + parallel_nodes = pipeconfig_dct["mdmr"]["parallel_nodes"] + inclusion = pipeconfig_dct["mdmr"]["inclusion_list"] + z_score = pipeconfig_dct["mdmr"]["zscore"] if not inclusion or "None" in inclusion or "none" in inclusion: inclusion = None run_cwas_group(pipeline, output_dir, working_dir, crash_dir, roi_file, regressor_file, participant_column, columns, - permutations, parallel_nodes, + permutations, parallel_nodes, plugin_args, z_score, inclusion=inclusion) def find_other_res_template(template_path, new_resolution): - """Find the same template/standard file in another resolution, if it + """ + Find the same template/standard file in another resolution, if it exists. - template_path: file path to the template NIfTI file + new_resolution: (int) the resolution of the template file you need - NOTE: Makes an assumption regarding the filename format of the files. + """ # TODO: this is assuming there is a mm resolution in the file path - not @@ -1373,8 +1373,8 @@ def find_other_res_template(template_path, new_resolution): template_parts[0] = str(new_resolution).join(template_parts[0].rsplit(template_parts[0][-1], 1)) ref_file = "{0}{1}".format(template_parts[0], template_parts[1]) - elif "${func_resolution}" in template_path: - ref_file = template_path.replace("${func_resolution}", + elif "${resolution_for_func_preproc}" in template_path: + ref_file = template_path.replace("${resolution_for_func_preproc}", "{0}mm".format(new_resolution)) if ref_file: @@ -1474,11 +1474,11 @@ def launch_PyBASC(pybasc_config): def run_basc(pipeline_config): - """Run the PyBASC module. - + """ + Run the PyBASC module. + PyBASC is a separate Python package built and maintained by Aki Nikolaidis which implements the BASC analysis via Python. - PyBASC is based off of the following work: - Garcia-Garcia, M., Nikolaidis, A., Bellec, P., Craddock, R. C., Cheung, B., Castellanos, F. X., & Milham, M. P. (2017). Detecting stable individual differences in the functional organization of the human basal ganglia. NeuroImage. @@ -1486,16 +1486,12 @@ def run_basc(pipeline_config): Multi-level bootstrap analysis of stable clusters in resting-state fMRI. Neuroimage, 51(3), 1126-1139. - Bellec, P., Marrelec, G., & Benali, H. (2008). A bootstrap test to investigate changes in brain connectivity for functional MRI. Statistica Sinica, 1253-1268. - PyBASC GitHub repository: https://github.com/AkiNikolaidis/PyBASC - PyBASC author: https://www.researchgate.net/profile/Aki_Nikolaidis - Inputs pipeline_config: path to C-PAC pipeline configuration YAML file - Steps (of the C-PAC interface for PyBASC, not PyBASC itself) 1. Read in the PyBASC-relevant pipeline config items and create a new PyBASC config dictionary. @@ -1508,7 +1504,7 @@ def run_basc(pipeline_config): selected to run PyBASC for (preprocessed and template-space functional time series are pulled from each pipeline output directory, for input into PyBASC). - 6. Gather functional_to_standard outputs from each pipeline. + 6. Gather space-template_bold outputs from each pipeline. 7. Create further sub-directories for each nuisance regression strategy and functional scan within each C-PAC pipeline, and separate the functional outputs by strategy and scan as well. @@ -1518,7 +1514,6 @@ def run_basc(pipeline_config): into a config YAML file for each pipeline-strategy-scan we are running. 10. Launch PyBASC for each configuration generated. - """ import os @@ -1530,27 +1525,28 @@ def run_basc(pipeline_config): pipeconfig_dct = yaml.safe_load(open(pipeline_config, 'r')) - output_dir = os.path.abspath(pipeconfig_dct["output_dir"]) - working_dir = os.path.abspath(pipeconfig_dct['work_dir']) - if pipeconfig_dct['pipeline_setup']['Amazon-AWS']['aws_output_bucket_credentials']: - creds_path = os.path.abspath(pipeconfig_dct['pipeline_setup']['Amazon-AWS']['aws_output_bucket_credentials']) + output_dir = os.path.abspath(pipeconfig_dct["pipeline_setup"]["output_directory"]["output_path"]) + working_dir = os.path.abspath(pipeconfig_dct["pipeline_setup"]["working_directory"]["path"]) + if pipeconfig_dct["pipeline_setup"]["Amazon-AWS"]['aws_output_bucket_credentials']: + creds_path = os.path.abspath( + pipeconfig_dct["pipeline_setup"]["Amazon-AWS"]['aws_output_bucket_credentials']) - func_template = pipeconfig_dct["template_brain_only_for_func"] + func_template = pipeconfig_dct["basc"]["template_brain_only_for_func"] if '$FSLDIR' in func_template: if os.environ.get('FSLDIR'): func_template = func_template.replace('$FSLDIR', os.environ['FSLDIR']) - basc_inclusion = pipeconfig_dct["participant_list"] - basc_scan_inclusion = pipeconfig_dct["basc_scan_inclusion"] - basc_resolution = pipeconfig_dct["basc_resolution"] + basc_inclusion = pipeconfig_dct["pipeline_setup"]["output_directory"]["participant_list"] + basc_scan_inclusion = pipeconfig_dct["basc"]["scan_inclusion"] + basc_resolution = pipeconfig_dct["basc"]["resolution"] basc_config_dct = {'run': True, 'reruns': 1} for key in pipeconfig_dct.keys(): if 'basc' in key: - basc_config_dct[key.replace('basc_', '')] = pipeconfig_dct[key] + basc_config_dct = pipeconfig_dct[key] iterables = ['dataset_bootstrap_list', 'timeseries_bootstrap_list', 'blocklength_list', 'n_clusters_list', 'output_sizes'] @@ -1610,7 +1606,8 @@ def run_basc(pipeline_config): roi_file_two = resample_cpac_output_image(roi_two_cmd_args) basc_config_dct['cross_cluster_mask_file'] = roi_file_two - pipeline_dir = os.path.abspath(pipeconfig_dct["pipeline_dir"]) + pipeline_dir = os.path.abspath(pipeconfig_dct["pipeline_setup"] + ["output_directory"]["source_outputs_path"]) out_dir = os.path.join(output_dir, 'cpac_group_analysis', 'PyBASC', '{0}mm_resolution'.format(basc_resolution), @@ -1638,8 +1635,7 @@ def run_basc(pipeline_config): # - each dataframe will contain output filepaths and their associated # information, and each dataframe will include ALL SERIES/SCANS output_df_dct = gather_outputs(pipeline_dir, - ["functional_to_standard", - "functional_mni"], + ["space-template_bold"], inclusion_list, False, False, get_func=True) @@ -1671,8 +1667,8 @@ def run_basc(pipeline_config): if df_scan not in scan_inclusion: continue - basc_config_dct['analysis_ID'] = '{0}_{1}'.format(os.path.basename(pipeline_dir), - df_scan) + basc_config_dct['analysis_ID'] = '{0}_{1}'.format( + os.path.basename(pipeline_dir), df_scan) # add scan label and nuisance regression strategy label to the # output directory path @@ -1688,7 +1684,7 @@ def run_basc(pipeline_config): # affinity threshold is an iterable, and must match the number of # functional file paths for the MapNodes - affinity_thresh = pipeconfig_dct['basc_affinity_thresh'] * len(func_paths) + affinity_thresh = pipeconfig_dct["basc"]["affinity_thresh"] * len(func_paths) # resampling if necessary # each run should take the file, resample it and write it @@ -1746,11 +1742,11 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, output_df_dct = gather_outputs( pipeline_dir, - ["functional_to_standard", "roi_timeseries"], + ["space-template_bold", "desc-Mean_timeseries"], inclusion_list=None, get_motion=False, get_raw_score=False, get_func=True, - derivatives=["functional_to_standard", "roi_timeseries"], - exts=['nii', 'nii.gz', 'csv'] + derivatives=["space-template_bold", "desc-Mean_timeseries"], + #exts=['nii', 'nii.gz', 'csv'] ) iteration_ids = [] @@ -1759,13 +1755,13 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, derivative, _ = preproc_strat - if "voxel" not in levels and derivative == "functional_to_standard": + if "voxel" not in levels and derivative == "space-template_bold": continue - if "roi" not in levels and derivative == "roi_timeseries": + if "roi" not in levels and derivative == "desc-Mean_timeseries": continue - if derivative == "roi_timeseries": + if derivative == "desc-Mean_timeseries": if roi_inclusion: # backwards because ROI labels embedded as substrings for roi_label in roi_inclusion: @@ -1776,7 +1772,6 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, "{1}/{2}\n".format(roi_label, derivative, _)) continue - df_dct = {} strat_df = output_df_dct[preproc_strat] @@ -1803,10 +1798,12 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, ) } - unique_out_dir = os.path.join(out_dir, "ISC", derivative, _, df_scan) + unique_out_dir = os.path.join(out_dir, "ISC", derivative, _, + df_scan) it_id = "ISC_{0}_{1}_{2}".format(df_scan, derivative, - _.replace('.', '').replace('+', '')) + _.replace('.', '').replace( + '+', '')) isc_wf = create_isc(name=it_id, output_dir=unique_out_dir, @@ -1816,10 +1813,8 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, isc_wf.inputs.inputspec.permutations = permutations isc_wf.inputs.inputspec.std = std_filter isc_wf.inputs.inputspec.collapse_subj = False - plugin_args = {'n_procs': num_cpus, - 'status_callback': log_nodes_cb} - isc_wf.run(plugin=MultiProcPlugin(plugin_args), - plugin_args=plugin_args) + isc_wf.run(plugin='MultiProc', + plugin_args={'n_procs': num_cpus}) if isfc: for df_scan in df_dct.keys(): @@ -1835,10 +1830,12 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, ) } - unique_out_dir = os.path.join(out_dir, "ISFC", derivative, _, df_scan) + unique_out_dir = os.path.join(out_dir, "ISFC", derivative, _, + df_scan) it_id = "ISFC_{0}_{1}_{2}".format(df_scan, derivative, - _.replace('.', '').replace('+', '')) + _.replace('.', '').replace( + '+', '')) isfc_wf = create_isfc(name=it_id, output_dir=unique_out_dir, @@ -1848,10 +1845,8 @@ def run_isc_group(pipeline_dir, out_dir, working_dir, crash_dir, isfc_wf.inputs.inputspec.permutations = permutations isfc_wf.inputs.inputspec.std = std_filter isfc_wf.inputs.inputspec.collapse_subj = False - plugin_args = {'n_procs': num_cpus, - 'status_callback': log_nodes_cb} - isfc_wf.run(plugin=MultiProcPlugin(plugin_args), - plugin_args=plugin_args) + isfc_wf.run(plugin='MultiProc', + plugin_args={'n_procs': num_cpus}) def run_isc(pipeline_config): @@ -1863,23 +1858,23 @@ def run_isc(pipeline_config): pipeconfig_dct = yaml.safe_load(open(pipeline_config, 'r')) - pipeline_dir = pipeconfig_dct["pipeline_dir"] + pipeline_dir = pipeconfig_dct["pipeline_setup"]["output_directory"]["source_outputs_path"] - output_dir = pipeconfig_dct["output_dir"] - working_dir = pipeconfig_dct["work_dir"] - crash_dir = pipeconfig_dct["log_dir"] + output_dir = pipeconfig_dct["pipeline_setup"]["output_directory"]["output_path"] + working_dir = pipeconfig_dct["pipeline_setup"]["working_directory"]["path"] + crash_dir = pipeconfig_dct["pipeline_setup"]["log_directory"]["path"] scan_inclusion = None if "scan_inclusion" in pipeconfig_dct.keys(): - scan_inclusion = pipeconfig_dct["scan_inclusion"] + scan_inclusion = pipeconfig_dct["pipeline_setup"]["system_config"]["scan_inclusion"] roi_inclusion = None if "isc_roi_inclusion" in pipeconfig_dct.keys(): - roi_inclusion = pipeconfig_dct["isc_roi_inclusion"] + roi_inclusion = pipeconfig_dct["isc_isfc"]["roi_inclusion"] num_cpus = 1 if "num_cpus" in pipeconfig_dct.keys(): - num_cpus = pipeconfig_dct["num_cpus"] + num_cpus = pipeconfig_dct["pipeline_setup"]["system_config"]["num_cpus"] isc = 1 in pipeconfig_dct.get("runISC", []) isfc = 1 in pipeconfig_dct.get("runISFC", []) @@ -1933,12 +1928,12 @@ def run_qpp(group_config_file): c = load_config_yml(group_config_file) - pipeline_dir = os.path.abspath(c.pipeline_dir) - out_dir = os.path.join(c.output_dir, 'cpac_group_analysis', 'QPP', + pipeline_dir = os.path.abspath(c["pipeline_setup"]["output_directory"]["source_outputs_path"]) + out_dir = os.path.join(c["pipeline_setup"]["output_directory"]["output_path"], 'cpac_group_analysis', 'QPP', os.path.basename(pipeline_dir)) - working_dir = os.path.join(c.work_dir, 'cpac_group_analysis', 'QPP', + working_dir = os.path.join(c["pipeline_setup"]["working_directory"]["path"], 'cpac_group_analysis', 'QPP', os.path.basename(pipeline_dir)) - crash_dir = os.path.join(c.log_dir, 'cpac_group_analysis', 'QPP', + crash_dir = os.path.join(c["pipeline_setup"]["crash_log_directory"]["path"], 'cpac_group_analysis', 'QPP', os.path.basename(pipeline_dir)) try: @@ -1950,28 +1945,29 @@ def run_qpp(group_config_file): outputs = gather_outputs( pipeline_dir, - ["functional_to_standard"], - inclusion_list=c.participant_list, + ["space-template_bold"], + inclusion_list=c["pipeline_setup"]["output_directory"] + ["participant_list"], get_motion=False, get_raw_score=False, get_func=True, - derivatives=["functional_to_standard"], - exts=['nii', 'nii.gz'] + derivatives=["space-template_bold"], + #exts=['nii', 'nii.gz'] ) - if c.qpp_stratification == 'Scan': + if c["qpp"]["stratification"] == 'Scan': qpp_stratification = ['Series'] - elif c.qpp_stratification == 'Session': + elif c["qpp"]["stratification"] == 'Session': qpp_stratification = ['Sessions'] - elif c.qpp_stratification in ['Session and Scan', 'Scan and Session']: + elif c["qpp"]["stratification"] in ['Session and Scan', 'Scan and Session']: qpp_stratification = ['Sessions', 'Series'] else: qpp_stratification = [] for (resource_id, strat_info), output_df in outputs.items(): - if c.qpp_session_inclusion: - output_df = output_df[output_df["Sessions"].isin(c.qpp_session_inclusion)] - if c.qpp_scan_inclusion: - output_df = output_df[output_df["Series"].isin(c.qpp_scan_inclusion)] + if c["qpp"]["session_inclusion"]: + output_df = output_df[output_df["Sessions"].isin(c["qpp"]["session_inclusion"])] + if c["qpp"]["scan_inclusion"]: + output_df = output_df[output_df["Series"].isin(c["qpp"]["scan_inclusion"])] if qpp_stratification: output_df_groups = output_df.groupby(by=qpp_stratification) @@ -2000,12 +1996,12 @@ def run_qpp(group_config_file): wf = create_qpp(name="QPP", working_dir=group_working_dir, crash_dir=group_crash_dir) - wf.inputs.inputspec.window_length = c.qpp_window - wf.inputs.inputspec.permutations = c.qpp_permutations - wf.inputs.inputspec.lower_correlation_threshold = c.qpp_initial_threshold - wf.inputs.inputspec.higher_correlation_threshold = c.qpp_final_threshold - wf.inputs.inputspec.iterations = c.qpp_iterations - wf.inputs.inputspec.correlation_threshold_iteration = c.qpp_initial_threshold_iterations + wf.inputs.inputspec.window_length = c["qpp"]["window"] + wf.inputs.inputspec.permutations = c["qpp"]["permutations"] + wf.inputs.inputspec.lower_correlation_threshold = c["qpp"]["initial_threshold"] + wf.inputs.inputspec.higher_correlation_threshold = c["qpp"]["final_threshold"] + wf.inputs.inputspec.iterations = c["qpp"]["iterations"] + wf.inputs.inputspec.correlation_threshold_iteration = c["qpp"]["initial_threshold_iterations"] wf.inputs.inputspec.convergence_iterations = 1 wf.inputs.inputspec.datasets = output_df_group.Filepath.tolist() @@ -2069,25 +2065,25 @@ def run(config_file): c = load_config_yml(config_file) # Run MDMR, if selected - if 1 in c.runMDMR: + if 1 in c["mdmr"]["run"]: run_cwas(config_file) # Run ISC, if selected - if 1 in c.runISC or 1 in c.runISFC: + if 1 in c["isc_isfc"]["runISC"] or 1 in c["isc_isfc"]["runISFC"]: run_isc(config_file) # Run PyBASC, if selected - if 1 in c.run_basc: + if 1 in c["basc"]["run"]: run_basc(config_file) # Run FSL FEAT group analysis, if selected - if 1 in c.run_fsl_feat: + if 1 in c["fsl_feat"]["run"]: run_feat(config_file) # Run randomise, if selected - if 1 in c.run_randomise: + if 1 in c["fsl_randomise"]["run"]: run_feat(config_file, feat=False) #Run QPP, if selected - if 1 in c.run_qpp: - run_qpp(config_file) + if 1 in c["qpp"]["run"]: + run_qpp(config_file) \ No newline at end of file diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 58ba949961..370b00c418 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -197,10 +197,9 @@ ) from CPAC.pipeline.random_state import set_up_random_state_logger -from CPAC.utils.datasource import bidsier_prefix, gather_extraction_maps from CPAC.pipeline.schema import valid_options from CPAC.utils.trimmer import the_trimmer -from CPAC.utils import Configuration +from CPAC.utils import Configuration, set_subject from CPAC.qc.pipeline import create_qc_workflow from CPAC.qc.xcp import qc_xcp @@ -248,6 +247,8 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, the prepared nipype workflow object containing the parameters specified in the config ''' + from CPAC.utils.datasource import bidsier_prefix + if plugin is not None and not isinstance(plugin, str): raise TypeError( 'CPAC.pipeline.cpac_pipeline.run_workflow requires a ' @@ -258,18 +259,9 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, # Assure that changes on config will not affect other parts c = copy.copy(c) - subject_id = sub_dict['subject_id'] - if sub_dict['unique_id']: - subject_id += "_" + sub_dict['unique_id'] - + subject_id, p_name, log_dir = set_subject(sub_dict, c) c['subject_id'] = subject_id - log_dir = os.path.join(c.pipeline_setup['log_directory']['path'], - f'pipeline_{c.pipeline_setup["pipeline_name"]}', - subject_id) - if not os.path.exists(log_dir): - os.makedirs(os.path.join(log_dir)) - set_up_logger(f'{subject_id}_expectedOutputs', filename=f'{bidsier_prefix(c["subject_id"])}_' 'expectedOutputs.yml', @@ -430,8 +422,9 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, check_centrality_lfcd=check_centrality_lfcd) # absolute paths of the dirs - c.pipeline_setup['working_directory']['path'] = os.path.abspath( - c.pipeline_setup['working_directory']['path']) + c.pipeline_setup['working_directory']['path'] = os.path.join( + os.path.abspath(c.pipeline_setup['working_directory']['path']), + p_name) if 's3://' not in c.pipeline_setup['output_directory']['path']: c.pipeline_setup['output_directory']['path'] = os.path.abspath( c.pipeline_setup['output_directory']['path']) @@ -1088,6 +1081,7 @@ def connect_pipeline(wf, cfg, rpool, pipeline_blocks): def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None, num_ants_cores=1): + from CPAC.utils.datasource import gather_extraction_maps # Workflow setup wf = initialize_nipype_wf(cfg, sub_dict) diff --git a/CPAC/pipeline/cpac_runner.py b/CPAC/pipeline/cpac_runner.py index def55e9e96..7191942aa7 100644 --- a/CPAC/pipeline/cpac_runner.py +++ b/CPAC/pipeline/cpac_runner.py @@ -17,18 +17,15 @@ import os import sys import warnings -import yaml from multiprocessing import Process from time import strftime +import yaml from voluptuous.error import Invalid -from CPAC.utils.configuration import Configuration +from CPAC.utils.configuration import check_pname, Configuration, set_subject from CPAC.utils.ga import track_run from CPAC.utils.monitoring import failed_to_start, log_nodes_cb -from CPAC.longitudinal_pipeline.longitudinal_workflow import ( - anat_longitudinal_wf, - func_preproc_longitudinal_wf, - func_longitudinal_template_wf -) +from CPAC.longitudinal_pipeline.longitudinal_workflow import \ + anat_longitudinal_wf from CPAC.utils.yaml_template import upgrade_pipeline_to_1_8 @@ -324,10 +321,11 @@ def run(subject_list_file, config_file=None, p_name=None, plugin=None, warnings.warn("We recommend that the working directory full path " "should have less then 70 characters. " "Long paths might not work in your operating system.") - warnings.warn("Current working directory: %s" % c.pipeline_setup['working_directory']['path']) + warnings.warn("Current working directory: " + f"{c.pipeline_setup['working_directory']['path']}") # Get the pipeline name - p_name = p_name or c.pipeline_setup['pipeline_name'] + p_name = check_pname(p_name, c) # Load in subject list try: @@ -577,82 +575,89 @@ def replace_index(target1, target2, file_path): p_name, plugin, plugin_args, test_config) except Exception as exception: # pylint: disable=broad-except exitcode = 1 - failed_to_start(c['pipeline_setup', 'log_directory', - 'path'], exception) + failed_to_start(set_subject(sub, c)[2], exception) return exitcode - pid = open(os.path.join( - c.pipeline_setup['working_directory']['path'], 'pid.txt' - ), 'w') - # Init job queue job_queue = [] # Allocate processes - processes = [ - Process(target=run_workflow, - args=(sub, c, True, pipeline_timing_info, - p_name, plugin, plugin_args, test_config)) - for sub in sublist - ] - - # If we're allocating more processes than are subjects, run them all - if len(sublist) <= c.pipeline_setup['system_config']['num_participants_at_once']: - for p in processes: - try: - p.start() - print(p.pid, file=pid) - except Exception: # pylint: disable=broad-except - exitcode = 1 - failed_to_start(c['pipeline_setup', 'log_directory', - 'path']) - - # Otherwise manage resources to run processes incrementally - else: - idx = 0 - while idx < len(sublist): - # If the job queue is empty and we haven't started indexing - if len(job_queue) == 0 and idx == 0: - # Init subject process index - idc = idx - # Launch processes (one for each subject) - for p in processes[idc: idc + c.pipeline_setup[ - 'system_config']['num_participants_at_once']]: - try: - p.start() - print(p.pid, file=pid) - job_queue.append(p) - idx += 1 - except Exception: # pylint: disable=broad-except - exitcode = 1 - failed_to_start(c['pipeline_setup', - 'log_directory', 'path']) - # Otherwise, jobs are running - check them - else: - # Check every job in the queue's status - for job in job_queue: - # If the job is not alive - if not job.is_alive(): - # Find job and delete it from queue - print('found dead job ', job) - loc = job_queue.index(job) - del job_queue[loc] - # ...and start the next available process - # (subject) + processes = [Process(target=run_workflow, + args=(sub, c, True, pipeline_timing_info, p_name, + plugin, plugin_args, test_config)) for + sub in sublist] + working_dir = os.path.join(c['pipeline_setup', 'working_directory', + 'path'], p_name) + # Create pipeline-specific working dir if not exists + if not os.path.exists(working_dir): + os.makedirs(working_dir) + # Set PID context to pipeline-specific file + with open(os.path.join(working_dir, 'pid.txt'), 'w', encoding='utf-8' + ) as pid: + # If we're allocating more processes than are subjects, run + # them all + if len(sublist) <= c.pipeline_setup['system_config'][ + 'num_participants_at_once']: + for i, _p in enumerate(processes): + try: + _p.start() + print(_p.pid, file=pid) + # pylint: disable=broad-except + except Exception as exception: + exitcode = 1 + failed_to_start(set_subject(sublist[i], c)[2], + exception) + # Otherwise manage resources to run processes incrementally + else: + idx = 0 + while idx < len(sublist): + # If the job queue is empty and we haven't started indexing + if len(job_queue) == 0 and idx == 0: + # Init subject process index + idc = idx + # Launch processes (one for each subject) + for _p in processes[idc: idc + c.pipeline_setup[ + 'system_config']['num_participants_at_once']]: try: - processes[idx].start() - # Append this to job queue and increment index - job_queue.append(processes[idx]) + _p.start() + print(_p.pid, file=pid) + job_queue.append(_p) idx += 1 - except Exception: # pylint: disable=broad-except + # pylint: disable=broad-except + except Exception as exception: exitcode = 1 - failed_to_start(c['pipeline_setup', - 'log_directory', 'path']) - # Add sleep so while loop isn't consuming 100% of CPU - time.sleep(2) - # set exitcode to 1 if any exception - if hasattr(pid, 'exitcode'): - exitcode = exitcode or pid.exitcode - # Close PID txt file to indicate finish - pid.close() + failed_to_start(set_subject(sublist[idx], + c)[2], exception) + # Otherwise, jobs are running - check them + else: + # Check every job in the queue's status + for job in job_queue: + # If the job is not alive + if not job.is_alive(): + # Find job and delete it from queue + print('found dead job ', job) + loc = job_queue.index(job) + del job_queue[loc] + # ...and start the next available + # process (subject) + try: + processes[idx].start() + # Append this to job queue and + # increment index + # pylint: disable=modified-iterating-list + job_queue.append(processes[idx]) + idx += 1 + # pylint: disable=broad-except + except Exception as exception: + exitcode = 1 + failed_to_start(set_subject(sublist[idx], + c)[2], + exception) + # Add sleep so while loop isn't consuming 100% of CPU + time.sleep(2) + # set exitcode to 1 if any exception + if hasattr(pid, 'exitcode'): + exitcode = exitcode or pid.exitcode + # Close PID txt file to indicate finish + pid.close() sys.exit(exitcode) diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index e836aa61d4..f58a4da299 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -1,19 +1,19 @@ -"""Copyright (C) 2022 C-PAC Developers +# Copyright (C) 2021-2022 C-PAC Developers -This file is part of C-PAC. +# This file is part of C-PAC. -C-PAC is free software: you can redistribute it and/or modify it under -the terms of the GNU Lesser General Public License as published by the -Free Software Foundation, either version 3 of the License, or (at your -option) any later version. +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -C-PAC is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public -License for more details. +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. -You should have received a copy of the GNU Lesser General Public -License along with C-PAC. If not, see .""" +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import ast import logging import os @@ -31,7 +31,7 @@ fisher_z_score_standardize from CPAC.pipeline.check_outputs import ExpectedOutputs from CPAC.registration.registration import transform_derivative -from CPAC.utils import Outputs +from CPAC.utils.outputs import Outputs from CPAC.utils.datasource import ( create_anat_datasource, create_func_datasource, @@ -853,7 +853,7 @@ def gather_pipes(self, wf, cfg, all=False, add_incl=None, add_excl=None): out_dir = cfg.pipeline_setup['output_directory']['path'] pipe_name = cfg.pipeline_setup['pipeline_name'] - container = os.path.join(f'cpac_{pipe_name}', unique_id) + container = os.path.join(f'pipeline_{pipe_name}', unique_id) filename = f'{unique_id}_{resource}' out_path = os.path.join(out_dir, container, subdir, filename) @@ -1580,9 +1580,9 @@ def ingress_output_dir(cfg, rpool, unique_id, creds_path=None): print(f"\nOutput directory {out_dir} does not exist yet, " f"initializing.") return rpool - - cpac_dir = os.path.join(out_dir, - f'cpac_{cfg.pipeline_setup["pipeline_name"]}', + + cpac_dir = os.path.join(out_dir, 'pipeline_' + f'{cfg.pipeline_setup["pipeline_name"]}', unique_id) else: if os.path.isdir(out_dir): @@ -1947,9 +1947,6 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): ingress_raw_func_data(wf, rpool, cfg, data_paths, unique_id, part_id, ses_id) - # grab already-processed data from the output directory - rpool = ingress_output_dir(cfg, rpool, unique_id, creds_path) - # grab any file paths from the pipeline config YAML rpool = ingress_pipeconfig_paths(cfg, rpool, unique_id, creds_path) @@ -1959,7 +1956,7 @@ def initiate_rpool(wf, cfg, data_paths=None, part_id=None): def run_node_blocks(blocks, data_paths, cfg=None): import os from CPAC.pipeline import nipype_pipeline_engine as pe - from CPAC.utils.strategy import NodeBlock + from CPAC.pipeline.engine import NodeBlock if not cfg: cfg = { diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index 662ee7dc17..7839bf69be 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -25,7 +25,6 @@ from nipype.interfaces.fsl.maths import MathsCommand from nipype.interfaces.fsl.utils import ImageMaths -from CPAC.registration.utils import hardcoded_reg from CPAC.utils.interfaces.ants import AI from CPAC.utils.monitoring.custom_logging import set_up_logger @@ -108,6 +107,7 @@ def random_seed_flags(): ... 'functions', 'interfaces']]) True ''' + from CPAC.registration.utils import hardcoded_reg seed = random_seed() if seed is None: return {'functions': {}, 'interfaces': {}} @@ -182,16 +182,15 @@ def set_up_random_state(seed): if seed is not None: if seed == 'random': seed = random_random_seed() - if (seed != 'random' and not ( - isinstance(seed, int) and - (0 < int(seed) <= MAX_SEED) - )): - raise ValueError('Valid random seeds are positive integers up to ' - f'2147483647, "random", or None, not {seed}') - try: - _seed['seed'] = int(seed) - except (TypeError, ValueError): - _seed['seed'] = seed + else: + try: + seed = int(seed) + assert 0 < seed <= MAX_SEED + except(ValueError, TypeError, AssertionError): + raise ValueError('Valid random seeds are positive integers up to ' + f'2147483647, "random", or None, not {seed}') + + _seed['seed'] = seed return random_seed() diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index a69994dfea..edd5dee4ae 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -16,7 +16,10 @@ # License along with C-PAC. If not, see . """Validation schema for C-PAC pipeline configurations""" # pylint: disable=too-many-lines +import re from itertools import chain, permutations +import numpy as np +from pathvalidate import sanitize_filename from voluptuous import All, ALLOW_EXTRA, Any, Capitalize, Coerce, \ ExactSequence, ExclusiveInvalid, In, Length, Lower, \ Match, Maybe, Optional, Range, Required, Schema @@ -201,170 +204,15 @@ def permutation_message(key, options): ''' -def _combine_labels(config_dict, list_to_combine, new_key): - ''' - Helper function to combine formerly separate keys into a - combined key. - - Parameters - ---------- - config_dict: dict - - key_sequence: iterable of lists or tuples - - new_key: list or tuple - - Returns - ------- - updated_config_dict: dict - ''' - new_value = [] - any_old_values = False - for _to_combine in list_to_combine: - try: - old_value = lookup_nested_value(config_dict, _to_combine) - except KeyError: - old_value = None - if old_value is not None: - any_old_values = True - if isinstance(old_value, (list, set, tuple)): - for value in old_value: - new_value.append(value) - else: - new_value.append(old_value) - config_dict = delete_nested_value(config_dict, _to_combine) - if any_old_values: - return set_nested_value(config_dict, new_key, new_value) - return config_dict - - -def _now_runswitch(config_dict, key_sequence): - ''' - Helper function to convert a formerly forkable value to a - runswitch. - - Parameters - ---------- - config_dict: dict - - key_sequence: list or tuple - - Returns - ------- - updated_config_dict: dict - ''' - try: - old_forkable = lookup_nested_value(config_dict, key_sequence) - except KeyError: - return config_dict - if isinstance(old_forkable, bool) or isinstance(old_forkable, list): - return set_nested_value( - config_dict, key_sequence, {'run': old_forkable}) - return config_dict - - -def _changes_1_8_0_to_1_8_1(config_dict): - ''' - Examples - -------- - Starting with 1.8.0 - >>> zero = {'anatomical_preproc': { - ... 'non_local_means_filtering': True, - ... 'n4_bias_field_correction': True - ... }, 'functional_preproc': { - ... 'motion_estimates_and_correction': { - ... 'calculate_motion_first': False - ... } - ... }, 'segmentation': { - ... 'tissue_segmentation': { - ... 'ANTs_Prior_Based': { - ... 'CSF_label': 0, - ... 'left_GM_label': 1, - ... 'right_GM_label': 2, - ... 'left_WM_label': 3, - ... 'right_WM_label': 4}}}} - >>> updated_apb = _changes_1_8_0_to_1_8_1(zero)[ - ... 'segmentation']['tissue_segmentation']['ANTs_Prior_Based'] - >>> updated_apb['CSF_label'] - [0] - >>> updated_apb['GM_label'] - [1, 2] - >>> updated_apb['WM_label'] - [3, 4] - - Starting with 1.8.1 - >>> one = {'anatomical_preproc': { - ... 'non_local_means_filtering': True, - ... 'n4_bias_field_correction': True - ... }, 'functional_preproc': { - ... 'motion_estimates_and_correction': { - ... 'calculate_motion_first': False - ... } - ... }, 'segmentation': { - ... 'tissue_segmentation': { - ... 'ANTs_Prior_Based': { - ... 'CSF_label': [0], - ... 'GM_label': [1, 2], - ... 'WM_label': [3, 4]}}}} - >>> updated_apb = _changes_1_8_0_to_1_8_1(one)[ - ... 'segmentation']['tissue_segmentation']['ANTs_Prior_Based'] - >>> updated_apb['CSF_label'] - [0] - >>> updated_apb['GM_label'] - [1, 2] - >>> updated_apb['WM_label'] - [3, 4] - ''' - for key_sequence in { - ('anatomical_preproc', 'non_local_means_filtering'), - ('anatomical_preproc', 'n4_bias_field_correction') - }: - config_dict = _now_runswitch(config_dict, key_sequence) - for combiners in { - (( - ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'CSF_label'), - ), ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'CSF_label')), - (( - ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'left_GM_label'), - ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'right_GM_label') - ), ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'GM_label')), - (( - ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'left_WM_label'), - ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'right_WM_label') - ), ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', - 'WM_label')) - }: - config_dict = _combine_labels(config_dict, *combiners) - try: - calculate_motion_first = lookup_nested_value( - config_dict, - ['functional_preproc', 'motion_estimates_and_correction', - 'calculate_motion_first'] - ) - except KeyError: - calculate_motion_first = None - if calculate_motion_first is not None: - del config_dict['functional_preproc'][ - 'motion_estimates_and_correction']['calculate_motion_first'] - config_dict = set_nested_value(config_dict, [ - 'functional_preproc', 'motion_estimates_and_correction', - 'motion_estimates', 'calculate_motion_first' - ], calculate_motion_first) - - return config_dict +def sanitize(filename): + '''Sanitize a filename and replace whitespaces with underscores''' + return re.sub(r'\s+', '_', sanitize_filename(filename)) latest_schema = Schema({ 'FROM': Maybe(str), 'pipeline_setup': { - 'pipeline_name': All(str, Length(min=1)), + 'pipeline_name': All(str, Length(min=1), sanitize), 'output_directory': { 'path': str, 'source_outputs_dir': Maybe(str), @@ -1106,6 +954,7 @@ def schema(config_dict): ------- dict ''' + from CPAC.utils.utils import _changes_1_8_0_to_1_8_1 partially_validated = latest_schema(_changes_1_8_0_to_1_8_1(config_dict)) try: if (partially_validated['registration_workflows'][ diff --git a/CPAC/pipeline/test/test_schema_validation.py b/CPAC/pipeline/test/test_schema_validation.py index 97f3946313..092f51c6b7 100644 --- a/CPAC/pipeline/test/test_schema_validation.py +++ b/CPAC/pipeline/test/test_schema_validation.py @@ -1,8 +1,7 @@ '''Tests for schema.py''' import pytest - -from CPAC.utils.configuration import Configuration from voluptuous.error import Invalid +from CPAC.utils.configuration import Configuration @pytest.mark.parametrize('run_value', [ @@ -26,3 +25,9 @@ def test_motion_estimates_and_correction(run_value): assert "func#motion_estimate_filter_valid_options" in str(e.value) else: Configuration(d) + + +def test_pipeline_name(): + '''Test that pipeline_name sucessfully sanitizes''' + c = Configuration({'pipeline_setup': {'pipeline_name': ':va:lid name'}}) + assert c['pipeline_setup', 'pipeline_name'] == 'valid_name' diff --git a/CPAC/qc/tests/test_qc.py b/CPAC/qc/tests/test_qc.py index 9407da8004..0abf56132d 100644 --- a/CPAC/qc/tests/test_qc.py +++ b/CPAC/qc/tests/test_qc.py @@ -1,14 +1,13 @@ import os import pytest - +from nipype.interfaces import utility as util from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.utility as util - +from CPAC.pipeline.cpac_group_runner import gather_outputs from CPAC.qc.pipeline import create_qc_workflow from CPAC.qc.utils import generate_qc_pages -from CPAC.utils import Configuration, Strategy, Outputs - -from CPAC.pipeline.cpac_group_runner import gather_outputs +from CPAC.utils.configuration import Configuration +from CPAC.utils.outputs import Outputs +from CPAC.utils.strategy import Strategy def file_node(path): diff --git a/CPAC/randomise/randomise.py b/CPAC/randomise/randomise.py index 77ee89e7c4..188d0fd436 100644 --- a/CPAC/randomise/randomise.py +++ b/CPAC/randomise/randomise.py @@ -1,6 +1,5 @@ from CPAC.pipeline import nipype_pipeline_engine as pe -from CPAC.pipeline.cpac_group_runner import load_config_yml def select(input_list): @@ -122,6 +121,7 @@ def run(group_config_path): import sys import pickle import yaml + from CPAC.pipeline.cpac_group_runner import load_config_yml group_config_obj = load_config_yml(group_config_path) pipeline_output_folder = group_config_obj.pipeline_dir diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index e49b3f93ba..a6eef5f1af 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -105,7 +105,8 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, interp_string.inputs.reg_tool = reg_tool wf.connect(inputNode, 'interpolation', interp_string, 'interpolation') - wf.connect(interp_string, 'interpolation', apply_warp, 'interpolation') + wf.connect(interp_string, 'interpolation', + apply_warp, 'interpolation') ants_xfm_list = \ pe.Node(util.Function(input_names=['transform'], @@ -899,8 +900,9 @@ def create_register_func_to_anat_use_T2(config, linear_reg_func_to_t1, 'out_matrix_file', outputspec, 'func_to_anat_linear_xfm_nobbreg') register_func_to_anat_use_T2.connect(convert_warp, 'out_file', - outputspec, 'func_to_anat_linear_' - 'warp_nobbreg') + outputspec, + 'func_to_anat_linear_warp_nobbreg') + register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_file', guardrail_t1, 'registered') register_func_to_anat_use_T2.connect(guardrail_t1, 'registered', @@ -2710,7 +2712,9 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): "desc-motion_bold", "space-bold_label-WM_mask", "despiked-fieldmap", - "fieldmap-mask"), + "fieldmap-mask", + "effectiveEchoSpacing", + "diffphase-pedir"), ("desc-brain_T1w", "desc-restore-brain_T1w", "desc-preproc_T2w", @@ -2718,9 +2722,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): "T2w", ["label-WM_probseg", "label-WM_mask"], ["label-WM_pveseg", "label-WM_mask"], - "T1w"), - "diffphase-dwell", - "diffphase-pedir"], + "T1w")], "outputs": ["space-T1w_desc-mean_bold", "from-bold_to-T1w_mode-image_desc-linear_xfm", "from-bold_to-T1w_mode-image_desc-linear_warp"]} @@ -2783,9 +2785,8 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('desc-restore-brain_T1w') wf.connect(node, out, func_to_anat, 'inputspec.anat') - if diff_complete: - node, out = strat_pool.get_data('diffphase-dwell') + node, out = strat_pool.get_data('effectiveEchoSpacing') wf.connect(node, out, func_to_anat, 'echospacing_input.echospacing') node, out = strat_pool.get_data('diffphase-pedir') @@ -2866,7 +2867,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') if diff_complete: - node, out = strat_pool.get_data('diffphase-dwell') + node, out = strat_pool.get_data('effectiveEchoSpacing') wf.connect(node, out, func_to_anat_bbreg, 'echospacing_input.echospacing') @@ -3692,18 +3693,18 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, output_names=['itk_transform'], function=run_c3d), name=f'convert_bbr2itk_{pipe_num}') - guardrail_preproc = registration_guardrail_node( - 'single-step-resampling-preproc_guardrail') + if cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'whole-head': node, out = strat_pool.get_data('T1w') + wf.connect(node, out, bbr2itk, 'reference_file') + elif cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'brain': node, out = strat_pool.get_data('desc-brain_T1w') - wf.connect(node, out, bbr2itk, 'reference_file') - wf.connect(node, out, guardrail_preproc, 'reference') + wf.connect(node, out, bbr2itk, 'reference_file') node, out = strat_pool.get_data(['desc-reginput_bold', 'desc-mean_bold']) wf.connect(node, out, bbr2itk, 'source_file') @@ -3769,16 +3770,15 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, applyxfm_func_to_standard.inputs.float = True applyxfm_func_to_standard.inputs.interpolation = 'LanczosWindowedSinc' - guardrail_brain = registration_guardrail_node( - 'single-step-resampling-brain_guardrail') wf.connect(split_func, 'out_files', applyxfm_func_to_standard, 'input_image') node, out = strat_pool.get_data('T1w-brain-template-funcreg') wf.connect(node, out, applyxfm_func_to_standard, 'reference_image') - wf.connect(node, out, guardrail_brain, 'reference') - wf.connect(collectxfm, 'out', applyxfm_func_to_standard, 'transforms') + + wf.connect(collectxfm, 'out', + applyxfm_func_to_standard, 'transforms') ### Loop ends! ### @@ -3818,13 +3818,11 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, apply_mask, 'in_file') wf.connect(applyxfm_func_mask_to_standard, 'output_image', apply_mask, 'mask_file') - wf.connect(merge_func_to_standard, 'merged_file', - guardrail_preproc, 'registered') - wf.connect(apply_mask, 'out_file', guardrail_brain, 'registered') outputs = { - 'space-template_desc-preproc_bold': (guardrail_preproc, 'registered'), - 'space-template_desc-brain_bold': (guardrail_brain, 'registered'), + 'space-template_desc-preproc_bold': (merge_func_to_standard, + 'merged_file'), + 'space-template_desc-brain_bold': (apply_mask, 'out_file'), 'space-template_desc-bold_mask': (applyxfm_func_mask_to_standard, 'output_image'), } diff --git a/CPAC/registration/tests/mocks.py b/CPAC/registration/tests/mocks.py index b460fb5675..f3d19bda14 100644 --- a/CPAC/registration/tests/mocks.py +++ b/CPAC/registration/tests/mocks.py @@ -1,9 +1,10 @@ import os +from nipype.interfaces import utility as util from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.utility as util -from CPAC.utils import Configuration, Strategy -from CPAC.utils.interfaces.function import Function +from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import resolve_resolution +from CPAC.utils.interfaces.function import Function +from CPAC.utils.strategy import Strategy def file_node(path, file_node_num=0): input_node = pe.Node( diff --git a/CPAC/resources/configs/group_config_template.yml b/CPAC/resources/configs/group_config_template.yml index 544b996dc5..ec5b223410 100644 --- a/CPAC/resources/configs/group_config_template.yml +++ b/CPAC/resources/configs/group_config_template.yml @@ -7,334 +7,328 @@ # General Group-Level Analysis Settings -############################################################################## -# The main input of group-level analysis- the output directory of your individual-level analysis pipeline run (pre-processing & derivatives for each participant). This should be a path to your C-PAC individual-level run's pipeline folder, which includes the sub-directories labeled with the participant IDs. -pipeline_dir: /path/to/output_dir +pipeline_setup: + # Name for this pipeline configuration - useful for identification. + pipeline_name: cpac-group-template -# (Optional) Full path to a list of participants to be included in the model. You can use this to easily prune participants from your model. In group-level analyses involving phenotype files, this allows you to prune participants without removing them from the phenotype CSV/TSV file. This should be a text file with one subject per line. An easy way to manually create this file is to copy the participant ID column from your phenotype file. -participant_list: None + output_directory: + # The main input of group-level analysis- the output directory of your individual-level analysis pipeline run (pre-processing & derivatives for each participant). This should be a path to your C-PAC individual-level run's pipeline folder, which includes the sub-directories labeled with the participant IDs. + source_outputs_path : /source_output -# Full path to the directory where CPAC should place group-level analysis outputs and any applicable statistical model files. -output_dir: /path/to/output/dir + # (Optional) Full path to a list of participants to be included in the model. You can use this to easily prune participants from your model. In group-level analyses involving phenotype files, this allows you to prune participants without removing them from the phenotype CSV/TSV file. This should be a text file with one subject per line. An easy way to manually create this file is to copy the participant ID column from your phenotype file. + participant_list: None + # Full path to the directory where CPAC should place group-level analysis outputs and any applicable statistical model files. + output_path: /output -#Much like the working directory for individual-level analysis, this is where the intermediate and working files will be stored during your run. This directory can be deleted later on. However, saving this directory allows the group analysis run to skip steps that have been already completed, in the case of re-runs. -work_dir: /path/to/work/dir + working_directory: + #Much like the working directory for individual-level analysis, this is where the intermediate and working files will be stored during your run. This directory can be deleted later on. However, saving this directory allows the group analysis run to skip steps that have been already completed, in the case of re-runs. + path: /tmp -#Where to write out log information for your group analysis run. -log_dir: /path/to/log/dir + #Deletes the contents of the Working Directory after running. + # This saves disk space, but any additional preprocessing or analysis will have to be completely re-run. + remove_working_dir: True + log_directory: + + # Whether to write log details of the pipeline run to the logging files. + run_logging: True -# The path to your FSL installation directory. This can be left as 'FSLDIR' to grab your system's default FSL installation. However, if you prefer to use a specific install of FSL, you can enter the path here. -FSLDIR: FSLDIR + #Where to write out log information for your group analysis run. + path: /logs + crash_log_directory: -# Number of CPUs to dedicate to the group-level analysis run. Parallelizes the pipeline where applicable. -num_cpus: 1 + # Directory where CPAC should write crash logs. + path: /crash + system_config: -# Scan inclusion list. For most group-level analyses, a separate model is run for each scan/series in your individual-level analysis pipeline directory. -# Use this list to prune your run to only specific scans. -# Example: -# scan_inclusion: ['rest_run-1', 'rest_run-2'] -scan_inclusion: [] + # The path to your FSL installation directory. This can be left as 'FSLDIR' to grab your system's default FSL installation. However, if you prefer to use a specific install of FSL, you can enter the path here. + FSLDIR: /usr/share/fsl/5.0 + # Number of CPUs to dedicate to the group-level analysis run. Parallelizes the pipeline where applicable. + num_cpus: 1 -# FSL-FEAT -############################################################################## - -# Run FSL FEAT group-level analysis. -run_fsl_feat : [1] - - -# How many statistical models to run in parallel. This number depends on computing resources. -num_models_at_once : 1 - - -# Specify a name for the new model. -model_name: model_name_here - + # The maximum amount of memory each participant's workflow can allocate. + # Use this to place an upper bound of memory usage. + # - Warning: 'Memory Per Participant' multiplied by 'Number of Participants to Run Simultaneously' + # must not be more than the total amount of RAM. + # - Conversely, using too little RAM can impede the speed of a pipeline run. + # - It is recommended that you set this to a value that when multiplied by + # 'Number of Participants to Run Simultaneously' is as much RAM you can safely allocate. + num_memory: 10 -# Phenotype file -# Full path to a .csv or .tsv file containing EV/regressor information for each subject. -pheno_file: /path/to/phenotypic/file.csv + # Scan inclusion list. For most group-level analyses, a separate model is run for each scan/series in your individual-level analysis pipeline directory. + # Use this list to prune your run to only specific scans. + # Example: + # scan_inclusion: ['rest_run-1', 'rest_run-2'] + scan_inclusion: [] + Amazon-AWS: -# Name of the participants column in your phenotype file. -participant_id_label: Participant + # If setting the 'Output Directory' to an S3 bucket, insert the path to your AWS credentials file here. + aws_output_bucket_credentials: + # Enable server-side 256-AES encryption on data to the S3 bucket + s3_encryption: False -# Specify which EVs from your phenotype are categorical or numerical. Of those which are numerical, specify which are to be demeaned. -# ev_selections: {'demean': ['Age'], 'categorical': ['Sex', 'Diagnosis']} -ev_selections: {'demean': [], 'categorical': []} + Debugging: - -# Specify the formula to describe your model design. Essentially, including EVs in this formula inserts them into the model. The most basic format to include each EV you select would be 'EV + EV + EV + ..', etc. You can also select to include MeanFD, Measure_Mean, and Custom_ROI_Mean here. See the C-PAC User Guide for more detailed information regarding formatting your design formula. -# design_formula: Sex + Diagnosis + Age + MeanFD_Jenkinson + Custom_ROI_Mean -design_formula: - - -# Choose the derivatives to run the group model on. -# -# These must be written out as a list, and must be one of the options listed below. -# -# For z-scored analyses: -# 'alff_to_standard_zstd', 'alff_to_standard_smooth_zstd', 'falff_to_standard_zstd', 'falff_to_standard_smooth_zstd', 'reho_to_standard_zstd', 'reho_to_standard_smooth_zstd', 'sca_roi_files_to_standard_fisher_zstd', 'sca_roi_files_to_standard_smooth_fisher_zstd', 'vmhc_fisher_zstd_zstat_map', 'dr_tempreg_maps_zstat_files_to_standard', 'dr_tempreg_maps_zstat_files_to_standard_smooth', 'sca_tempreg_maps_zstat_files', 'sca_tempreg_maps_zstat_files_smooth', 'centrality_outputs_zstd', 'centrality_outputs_smoothed_zstd' -# -# Example input: derivative_list : ['alff_to_standard_smooth_zstd', 'sca_roi_files_to_standard_smooth_fisher_zstd'] -# -derivative_list: [] + # Verbose developer messages. + verbose: Off -# Choose whether to use a group mask or individual-specific mask when calculating the output means to be used as a regressor. -# -# This only takes effect if you include the 'Measure_Mean' regressor in your Design Matrix Formula. -mean_mask: ['Group Mask'] +# FSL-FEAT +fsl_feat: -# Full path to a NIFTI file containing one or more ROI masks. The means of the masked regions will then be computed for each subject's output and will be included in the model as regressors (one for each ROI in the mask file) if you include 'Custom_ROI_Mean' in the Design Matrix Formula. -# custom_roi_mask: /path/to/mask.nii.gz -custom_roi_mask: None + # Run FSL FEAT group-level analysis. + run: Off + # How many statistical models to run in parallel. This number depends on computing resources. + num_models_at_once: 1 -# Choose the coding scheme to use when generating your model. 'Treatment' encoding is generally considered the typical scheme. Consult the User Guide for more information. -# -# Available options: -# 'Treatment', 'Sum' -# -coding_scheme: ['Treatment'] + # Specify a name for the new model. + model_name: model_name_here + # Phenotype file + # Full path to a .csv or .tsv file containing EV/regressor information for each subject. + pheno_file: /path -# Specify whether FSL should model the variance for each group separately. -# -# If this option is enabled, you must specify a grouping variable below. -group_sep: Off - + # Name of the participants column in your phenotype file. + participant_id_label: Participant -# The name of the EV that should be used to group subjects when modeling variances. -# -# If you do not wish to model group variances separately, set this value to None. -grouping_var: None + # Specify which EVs from your phenotype are categorical or numerical. Of those which are numerical, specify which are to be demeaned. + # ev_selections: {'demean': ['Age'], 'categorical': ['Sex', 'Diagnosis']} + ev_selections: {'demean': [], 'categorical': []} + # Specify the formula to describe your model design. Essentially, including EVs in this formula inserts them into the model. The most basic format to include each EV you select would be 'EV + EV + EV + ..', etc. You can also select to include MeanFD, Measure_Mean, and Custom_ROI_Mean here. See the C-PAC User Guide for more detailed information regarding formatting your design formula. + # design_formula: Sex + Diagnosis + Age + MeanFD_Jenkinson + Custom_ROI_Mean + design_formula: -# Only voxels with a Z-score higher than this value will be considered significant. -z_threshold: ['2.3'] + # Choose the derivatives to run the group model on. + # + # These must be written out as a list, and must be one of the options listed below. + # + # For z-scored analyses: + # 'desc-zstd_alff', 'desc-sm-zstd_alff', 'desc-zstd_falff', 'desc-sm-zstd_falff', 'desc-zstd_reho', 'desc-sm-zstd_reho', 'desc-zstd_sca_roi', 'desc-sm-zstd_sca_roi', 'desc-zstd_vmhc', 'desc-zstd_dr_tempreg_maps', 'desc-sm-zstd_dr_tempreg_maps', 'desc-zstd_sca_tempreg_maps', 'desc-sm-zstd_sca_tempreg_maps', 'desc-zstd_centrality', 'desc-sm-zstd_centrality' + # + # Example input: derivative_list : ['desc-sm-zstd_alff', 'desc-sm-zstd_sca_roi'] + # + derivative_list: [] + # Choose whether to use a group mask or individual-specific mask when calculating the output means to be used as a regressor. + # + # This only takes effect if you include the 'Measure_Mean' regressor in your Design Matrix Formula. + mean_mask: ['Group Mask'] -# Significance threshold (P-value) to use when doing cluster correction for multiple comparisons. -p_threshold: ['0.05'] + # Full path to a NIFTI file containing one or more ROI masks. The means of the masked regions will then be computed for each subject's output and will be included in the model as regressors (one for each ROI in the mask file) if you include 'Custom_ROI_Mean' in the Design Matrix Formula. + # custom_roi_mask: /path/to/mask.nii.gz + custom_roi_mask: None + # Choose the coding scheme to use when generating your model. 'Treatment' encoding is generally considered the typical scheme. Consult the User Guide for more information. + # + # Available options: + # 'Treatment', 'Sum' + # + coding_scheme: ['Treatment'] -# For repeated measures only. Enter the session names in your dataset that you wish to include within the same model (this is for repeated measures / within-subject designs).\n\nTip: These will be the names listed as "unique_id" in the original individual-level participant list, or the labels in the original data directories you marked as {session} while creating the CPAC participant list. -# sessions_list: ['ses-01', 'ses-02'] -sessions_list: [] + # Specify whether FSL should model the variance for each group separately. + # + # If this option is enabled, you must specify a grouping variable below. + group_sep: Off + # The name of the EV that should be used to group subjects when modeling variances. + # + # If you do not wish to model group variances separately, set this value to None. + grouping_var: None -# For repeated measures only. Enter the series names in your dataset that you wish to include within the same model (this is for repeated measures / within-subject designs).\n\nTip: These will be the labels listed under "func:" in the original individual-level participant list, or the labels in the original data directories you marked as {series} while creating the CPAC participant list. -# series_list: ['task-rest_run-1', 'task-rest_run-2'] -series_list: [] + # Only voxels with a Z-score higher than this value will be considered significant. + z_threshold: ['2.3'] + # Significance threshold (P-value) to use when doing cluster correction for multiple comparisons. + p_threshold: ['0.05'] -# Specify your contrasts here. For example, if two of your available contrasts are EV1 and EV0, you can enter contrast descriptions such as 'EV1 - EV0 = 0' or 'EV1 = 0'. Consult the User Guide for more information about describing contrasts. Alternatively, you can provide your own custom-written contrasts matrix in a CSV file in the 'Custom Contrasts Matrix' field below. -# contrasts: ['C(Diagnosis)[T.ADHD] - C(Diagnosis)[T.Typical] = 0', 'C(Diagnosis)[T.Typical] - C(Diagnosis)[T.ADHD] = 0'] -contrasts: [] + # For repeated measures only. Enter the session names in your dataset that you wish to include within the same model (this is for repeated measures / within-subject designs).\n\nTip: These will be the names listed as "unique_id" in the original individual-level participant list, or the labels in the original data directories you marked as {session} while creating the CPAC participant list. + # sessions_list: ['ses-01', 'ses-02'] + sessions_list: [] + # For repeated measures only. Enter the series names in your dataset that you wish to include within the same model (this is for repeated measures / within-subject designs).\n\nTip: These will be the labels listed under "func:" in the original individual-level participant list, or the labels in the original data directories you marked as {series} while creating the CPAC participant list. + # series_list: ['task-rest_run-1', 'task-rest_run-2'] + series_list: [] -# Optional: A list of f-test strings containing contrasts. If you do not wish to run f-tests, leave this blank. -f_tests: [] + # Specify your contrasts here. For example, if two of your available contrasts are EV1 and EV0, you can enter contrast descriptions such as 'EV1 - EV0 = 0' or 'EV1 = 0'. Consult the User Guide for more information about describing contrasts. Alternatively, you can provide your own custom-written contrasts matrix in a CSV file in the 'Custom Contrasts Matrix' field below. + # contrasts: ['C(Diagnosis)[T.ADHD] - C(Diagnosis)[T.Typical] = 0', 'C(Diagnosis)[T.Typical] - C(Diagnosis)[T.ADHD] = 0'] + contrasts: [] + # Optional: A list of f-test strings containing contrasts. If you do not wish to run f-tests, leave this blank. + f_tests: [] -# Optional: Full path to a CSV file which specifies the contrasts you wish to run in group analysis. Consult the User Guide for proper formatting. -# If you wish to use the standard contrast builder, leave this field blank. If you provide a path for this option, CPAC will use your custom contrasts matrix instead, and will use the f-tests described in this custom file only (ignoring those you have input in the f-tests field above). -# If you wish to include f-tests, create a new column in your CSV file for each f-test named 'f_test_1', 'f_test_2', .. etc. Then, mark the contrasts you would like to include in each f-test with a 1, and mark the rest 0. Note that you must select at least two contrasts per f-test. -custom_contrasts: None - + # Optional: Full path to a CSV file which specifies the contrasts you wish to run in group analysis. Consult the User Guide for proper formatting. + # If you wish to use the standard contrast builder, leave this field blank. If you provide a path for this option, CPAC will use your custom contrasts matrix instead, and will use the f-tests described in this custom file only (ignoring those you have input in the f-tests field above). + # If you wish to include f-tests, create a new column in your CSV file for each f-test named 'f_test_1', 'f_test_2', .. etc. Then, mark the contrasts you would like to include in each f-test with a 1, and mark the rest 0. Note that you must select at least two contrasts per f-test. + custom_contrasts: None # FSL-Randomise -############################################################################## - -# Run Randomise -run_randomise : [0] - +fsl_randomise: -# Number of permutations you would like to use when building up the null distribution to test against. -randomise_permutation : 500 + # Run Randomise + run: [0] + # Number of permutations you would like to use when building up the null distribution to test against. + permutation: 500 -# Cluster-based thresholding corrected for multiple comparisons by using the null distribution of the max (across the image) cluster mask. -randomise_thresh : 5 + # Cluster-based thresholding corrected for multiple comparisons by using the null distribution of the max (across the image) cluster mask. + thresh: 5 + # Demean data temporally before model fitting. + demean: True -# Demean data temporally before model fitting. -randomise_demean : True - - -# From the FMRIB FSL-Randomise user guide: TFCE (Threshold-Free Cluster Enhancement) is a new method for finding 'clusters' in your data without having to define clusters in a binary way. Cluster-like structures are enhanced but the image remains fundamentally voxelwise. -randomise_tfce : True + # From the FMRIB FSL-Randomise user guide: TFCE (Threshold-Free Cluster Enhancement) is a new method for finding 'clusters' in your data without having to define clusters in a binary way. Cluster-like structures are enhanced but the image remains fundamentally voxelwise. + tfce: True # Bootstrap Analysis of Stable Clusters (BASC) - via PyBASC -############################################################################## - -# Run Bootstrap Analysis of Stable Clusters -run_basc : [0] +basc: + # Run Bootstrap Analysis of Stable Clusters + run: [0] -# If there are multiple series or scans in any of the pipeline outputs for which PyBASC is being run, and you only want to run for some of them, you can list them here - scan labels separated by commas (ex. 'rest_run-1, rest_run-3'). -# If nothing is listed, all available pipelines will be run. -basc_scan_inclusion : None + # If there are multiple series or scans in any of the pipeline outputs for which PyBASC is being run, and you only want to run for some of them, you can list them here - scan labels separated by commas (ex. 'rest_run-1, rest_run-3'). + # If nothing is listed, all available pipelines will be run. + scan_inclusion: None + # The resolution to run PyBASC with. + resolution: 4mm -# The resolution to run PyBASC with. -basc_resolution : 4mm + # Maximum amount of processors to use while performing BASC. + proc: 2 + # Maximum amount of RAM (in GB) to be used when running BASC. + memory: 4 -# Maximum amount of processors to use while performing BASC. -basc_proc : 2 + # Standard FSL Skull Stripped Template. + template_brain_only_for_func: $FSLDIR/data/standard/MNI152_T1_${basc_resolution}_brain.nii.gz + # Full path to a mask file to be used when running BASC. Voxels outside this mask will be excluded from analysis. This is the region that you’d like to parcellate. + # If you do not wish to use a mask, set this field to None. + # Note: BASC is very computationally intensive, we strongly recommend you limit your analysis to specific brain areas of interest. + roi_mask_file: None -# Maximum amount of RAM (in GB) to be used when running BASC. -basc_memory : 4 + # If cross clustering is enabled, then clustering of the first region will be calculated based on pairwise similarity between the timeseries of the ROI Mask File, and this second ROI. + cross_cluster_mask_file: None + # The metric used to compare similarity between voxel timeseries. + # Options: ['correlation', 'euclidean', 'cityblock', 'cosine'] + similarity_metric_list: ['correlation'] -# Standard FSL Skull Stripped Template. -template_brain_only_for_func : $FSLDIR/data/standard/MNI152_T1_${basc_resolution}_brain.nii.gz + # How many times individual level circular block bootstrapping of the timeseries will be applied. + timeseries_bootstrap_list: 100 + # Number of bootstraps to apply to the original dataset. + dataset_bootstrap_list: 30 -# Full path to a mask file to be used when running BASC. Voxels outside this mask will be excluded from analysis. This is the region that you’d like to parcellate. -# If you do not wish to use a mask, set this field to None. -# Note: BASC is very computationally intensive, we strongly recommend you limit your analysis to specific brain areas of interest. -basc_roi_mask_file : None + # Number of clusters to create during clustering at both the individual and group levels. + n_clusters_list: 2 + # The similarity threshold at which the similarity matrices will be set to 0. + affinity_thresh: [0.0] -# If cross clustering is enabled, then clustering of the first region will be calculated based on pairwise similarity between the timeseries of the ROI Mask File, and this second ROI. -basc_cross_cluster_mask_file : None + # This is the amount of feature agglomeration that will be applied. Smaller values mean more feature agglomeration. + output_sizes: 800 + # If set to true, then the ROI Mask file parcellation will be based on the similarity between ROI Mask file voxels based on their connectivity to each voxel in ROI mask file for cross-clustering. + cross_cluster: True -# The metric used to compare similarity between voxel timeseries. -# Options: ['correlation', 'euclidean', 'cityblock', 'cosine'] -basc_similarity_metric_list : ['correlation'] + # This parameter determines the width of the time window used in the circular block bootstrap. + blocklength_list: 1 - -# How many times individual level circular block bootstrapping of the timeseries will be applied. -basc_timeseries_bootstrap_list : 100 - - -# Number of bootstraps to apply to the original dataset. -basc_dataset_bootstrap_list : 30 - - -# Number of clusters to create during clustering at both the individual and group levels. -basc_n_clusters_list : 2 - - -# The similarity threshold at which the similarity matrices will be set to 0. -basc_affinity_thresh : [0.0] - - -# This is the amount of feature agglomeration that will be applied. Smaller values mean more feature agglomeration. -basc_output_sizes : 800 - - -# If set to true, then the ROI Mask file parcellation will be based on the similarity between ROI Mask file voxels based on their connectivity to each voxel in ROI mask file for cross-clustering. -basc_cross_cluster : True - - -# This parameter determines the width of the time window used in the circular block bootstrap. -basc_blocklength_list : 1 - - -# If this is set to true, the all individuals will have feature agglomeration applied together, resulting in the same mapping across subjects. Use this only when memory demands limit ability to process ROIs with a high number of voxels. -basc_group_dim_reduce : False + # If this is set to true, the all individuals will have feature agglomeration applied together, resulting in the same mapping across subjects. Use this only when memory demands limit ability to process ROIs with a high number of voxels. + group_dim_reduce: False # Multivariate Distance Matrix Regression (MDMR) -############################################################################## - -# Used to determine if Multivariate Distance Matrix Regression (MDMR) will be added to the pipeline or not. -runMDMR : [0] - - -# Inclusion list text file listing the participant IDs you wish to include in the MDMR analysis. If left as None, will include all subjects. -mdmr_inclusion : None - - -# Path to a mask file. Voxels outside of the mask will be excluded from MDMR. -mdmr_roi_file : - +mdmr: -# Path to a CSV file containing the phenotypic regressor. -mdmr_regressor_file : + # Used to determine if Multivariate Distance Matrix Regression (MDMR) will be added to the pipeline or not. + run: [0] + # Inclusion list text file listing the participant IDs you wish to include in the MDMR analysis. If left as None, will include all subjects. + inclusion_list : None -# Name of the participants column in your regressor file. -mdmr_regressor_participant_column : + # Path to a mask file. Voxels outside of the mask will be excluded from MDMR. + roi_file: /path + # Path to a CSV file containing the phenotypic regressor. + regressor_file: -# Columns from the CSV file indicating factor variables. Other columns will be handled as covariates. Separated by commas. -mdmr_regressor_columns : + # Name of the participants column in your regressor file. + regressor_participant_column: '' + # Columns from the CSV file indicating factor variables. Other columns will be handled as covariates. Separated by commas. + regressor_columns: '' -# Number of permutation tests to run on the Pseudo-F statistics. -mdmr_permutations : 500 + # Number of permutation tests to run on the Pseudo-F statistics. + permutations: 15000 + # Number of Nipype nodes created while computing MDMR. Dependent upon computing resources. + parallel_nodes: 10 -# Number of Nipype nodes created while computing MDMR. Dependent upon computing resources. -mdmr_parallel_nodes : 1 + # If you want to create zstat maps + zscore: [1] # Inter-Subject Correlation (ISC) & Inter-Subject Functional Correlation (ISFC) -############################################################################### - -# Used to determine if Inter-subject Correlation (ISC) will be added to the pipeline or not. -runISC : [0] - - -# Used to determine if Inter-subject Functional Correlation (ISFC) will be added to the pipeline or not. -runISFC : [0] - - -# Used to determine if the ISC and ISFC will run in the ROI level. -isc_level_roi : [0] - +isc_isfc: -# Used to determine if the ISC and ISFC will run in the voxel level. Depending on the image resolution, it may take several hours and consume a great amount of available memory. -isc_level_voxel : [0] + # Used to determine if Inter-subject Correlation (ISC) will be added to the pipeline or not. + runISC: [0] + # Used to determine if Inter-subject Functional Correlation (ISFC) will be added to the pipeline or not. + runISFC: [0] -# Filter out voxels that, in the correlation distribution, is greater then the informed standard deviation. Zero value will disable the filter. -isc_level_voxel_std_filter : 0.0 + # Used to determine if the ISC and ISFC will run in the ROI level. + level_roi: [0] + # Used to determine if the ISC and ISFC will run in the voxel level. Depending on the image resolution, it may take several hours and consume a great amount of available memory. + level_voxel: [0] -# Number of permutation tests to compute the statistics. -isc_permutations : 1000 + # Filter out voxels that, in the correlation distribution, is greater then the informed standard deviation. Zero value will disable the filter. + level_voxel_std_filter: 0.0 + # Number of permutation tests to compute the statistics. + permutations: 1000 -# ROI/atlases to include in the analysis. For ROI-level ISC/ISFC runs. -# This should be a list of names/strings of the ROI names used in individual-level analysis, if ROI timeseries extraction was performed. -isc_roi_inclusion: [""] + # ROI/atlases to include in the analysis. For ROI-level ISC/ISFC runs. + # This should be a list of names/strings of the ROI names used in individual-level analysis, if ROI timeseries extraction was performed. + roi_inclusion: [""] #Quasi Periodic Patterns (QPP) -############################### +qpp: -# Run Quasi Periodic Pattern Analysis -runQPP : [1] + # Run Quasi Periodic Pattern Analysis + run: [1] + scan_inclusion: -qpp_scan_inclusion : -qpp_session_inclusion : -qpp_stratification : + session_inclusion: + + stratification: -qpp_permutations: 100 -qpp_window: 30 + permutations: 100 + + window: 30 -qpp_initial_threshold: 0.2 -qpp_final_threshold: 0.3 -qpp_initial_threshold_iterations : 20 + initial_threshold: 0.2 + + final_threshold: 0.3 + + initial_threshold_iterations : 20 -qpp_iterations : 15 + qpp_iterations : 15 diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 4dfcba571c..72ec972c16 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -10,6 +10,7 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. + # This string will be sanitized and used in filepaths pipeline_name: cpac-default-pipeline output_directory: diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index b8950b759c..0385026393 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -7,7 +7,7 @@ # # Tip: This file can be edited manually with a text editor for quick modifications. -FROM: fx-options +FROM: fmriprep-options pipeline_setup: # Name for this pipeline configuration - useful for identification. @@ -33,24 +33,12 @@ pipeline_setup: # This saves disk space, but any additional preprocessing or analysis will have to be completely re-run. remove_working_dir: On -anatomical_preproc: - # N4 bias field correction via ANTs - n4_bias_field_correction: - # this is a fork option - run: - - On - registration_workflows: anatomical_registration: - T1w_brain_template: /usr/share/fsl/5.0/data/standard/MNI152_T1_${resolution_for_anat}_brain.nii.gz - T1w_brain_template_mask: /usr/share/fsl/5.0/data/standard/MNI152_T1_${resolution_for_anat}_brain_mask.nii.gz - T1w_template: /usr/share/fsl/5.0/data/standard/MNI152_T1_${resolution_for_anat}.nii.gz - + T1w_brain_template: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}_brain.nii.gz + T1w_brain_template_mask: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}_brain_mask.nii.gz + T1w_template: $FSLDIR/data/standard/MNI152_T1_${resolution_for_anat}.nii.gz functional_registration: - coregistration: - func_input_prep: - input: [fmriprep_reference] - func_registration_to_template: # these options modify the application (to the functional data), not the calculation, of the # T1-to-template and EPI-to-template transforms calculated earlier during registration @@ -73,11 +61,10 @@ registration_workflows: target_template: T1_template: - T1w_brain_template_funcreg: /usr/share/fsl/5.0/data/standard/MNI152_T1_${func_resolution}_brain.nii.gz - T1w_brain_template_mask_funcreg: /usr/share/fsl/5.0/data/standard/MNI152_T1_${func_resolution}_brain_mask.nii.gz - T1w_template_for_resample: - T1w_template_funcreg: /usr/share/fsl/5.0/data/standard/MNI152_T1_${func_resolution}.nii.gz - + T1w_brain_template_funcreg: $FSLDIR/data/standard/MNI152_T1_${func_resolution}_brain.nii.gz + T1w_brain_template_mask_funcreg: $FSLDIR/data/standard/MNI152_T1_${func_resolution}_brain_mask.nii.gz + T1w_template_for_resample: $FSLDIR/data/standard/MNI152_T1_${func_resolution}_brain.nii.gz + T1w_template_funcreg: $FSLDIR/data/standard/MNI152_T1_${func_resolution}.nii.gz functional_preproc: truncation: @@ -94,10 +81,6 @@ functional_preproc: run: - On - motion_estimates_and_correction: - motion_correction: - using: [3dvolreg] - distortion_correction: using: - PhaseDiff @@ -186,7 +169,6 @@ nuisance_corrections: # CSF erosion default is using millimeter erosion method when use erosion for CSF. csf_mask_erosion_mm: - timeseries_extraction: run: On connectivity_matrix: diff --git a/CPAC/resources/cpac_outputs.tsv b/CPAC/resources/cpac_outputs.tsv index ac6c1e6cb5..989eef7fbf 100644 --- a/CPAC/resources/cpac_outputs.tsv +++ b/CPAC/resources/cpac_outputs.tsv @@ -218,3 +218,5 @@ space-template_label-GM_mask mask template anat NIfTI space-EPItemplate_label-CSF_mask mask template func NIfTI space-EPItemplate_label-WM_mask mask template func NIfTI space-EPItemplate_label-GM_mask mask template func NIfTI +mdmr group functional group_analysis NIfTI +desc-zstd-mdmr group functional group_analysis NIfTI Yes \ No newline at end of file diff --git a/CPAC/utils/__init__.py b/CPAC/utils/__init__.py index 9d4106cbb2..c5c791ec03 100644 --- a/CPAC/utils/__init__.py +++ b/CPAC/utils/__init__.py @@ -4,14 +4,8 @@ from . import build_data_config from .interfaces import function, masktool from .extract_data import run -from .datasource import create_anat_datasource -from .datasource import create_func_datasource -from .datasource import create_fmap_datasource -from .datasource import create_roi_mask_dataflow -from .datasource import create_grp_analysis_dataflow -from .datasource import create_spatial_map_dataflow from .datatypes import ListFromItem -from .configuration import Configuration +from .configuration import check_pname, Configuration, set_subject from .strategy import Strategy from .outputs import Outputs @@ -43,6 +37,5 @@ repickle, ) -__all__ = [ - 'function', 'ListFromItem' -] +__all__ = ['check_pname', 'Configuration', 'function', 'ListFromItem', + 'set_subject'] diff --git a/CPAC/utils/bids_utils.py b/CPAC/utils/bids_utils.py index 5e91585228..ad7be177d9 100755 --- a/CPAC/utils/bids_utils.py +++ b/CPAC/utils/bids_utils.py @@ -702,7 +702,7 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=''): file_paths = [] config_dict = {} - suffixes = ['T1w', 'T2w', 'bold', 'acq-fMRI_epi', 'phasediff', 'magnitude', + suffixes = ['T1w', 'T2w', 'bold', 'epi', 'phasediff', 'magnitude', 'magnitude1', 'magnitude2'] if bids_dir.lower().startswith("s3://"): @@ -724,6 +724,8 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=''): for s3_obj in bucket.objects.filter(Prefix=prefix): for suf in suffixes: if suf in str(s3_obj.key): + if suf == 'epi' and 'acq-fMRI' not in s3_obj.key: + continue if str(s3_obj.key).endswith("json"): try: config_dict[s3_obj.key.replace(prefix, "") @@ -743,6 +745,8 @@ def collect_bids_files_configs(bids_dir, aws_input_creds=''): if files: for f in files: for suf in suffixes: + if suf == 'epi' and 'acq-fMRI' not in f: + continue if 'nii' in f and suf in f: file_paths += [os.path.join(root, f) .replace(bids_dir, '').lstrip('/')] diff --git a/CPAC/utils/build_data_config.py b/CPAC/utils/build_data_config.py index c21ae8ba26..684562c149 100644 --- a/CPAC/utils/build_data_config.py +++ b/CPAC/utils/build_data_config.py @@ -564,11 +564,11 @@ def get_BIDS_data_dct(bids_base_dir, file_list=None, anat_scan=None, fmap_pedir_sess = os.path.join(bids_base_dir, "sub-{participant}/ses-{session}/fmap/" "sub-{participant}_ses-{session}/" - "dir-*_acq-fMRI_epi.nii.gz") + "*acq-fMR*_epi.nii.gz") fmap_pedir = os.path.join(bids_base_dir, "sub-{participant}/fmap/sub-{participant}" - "_dir-*_acq-fMRI_epi.nii.gz") + "*acq-fMR*_epi.nii.gz") sess_glob = os.path.join(bids_base_dir, "sub-*/ses-*/*") @@ -582,7 +582,7 @@ def get_BIDS_data_dct(bids_base_dir, file_list=None, anat_scan=None, fmap_pedir_scan_glob = os.path.join(bids_base_dir, "sub-*fmap/" - "sub-*_dir-*_acq-fMRI_epi.nii.gz") + "sub-*_*acq-fMR*_epi.nii.gz") part_tsv_glob = os.path.join(bids_base_dir, "*participants.tsv") @@ -648,7 +648,7 @@ def get_BIDS_data_dct(bids_base_dir, file_list=None, anat_scan=None, fmap_mag = os.path.join(bids_base_dir, "sub-{participant}/fmap/sub-{participant}" "*magnitud*.nii.gz") - + ''' if fnmatch.fnmatch(filepath, fmap_pedir_scan_glob): # check if there is a scan level for the fmap magnitude files diff --git a/CPAC/utils/configuration/__init__.py b/CPAC/utils/configuration/__init__.py index afed809515..58a524d89d 100644 --- a/CPAC/utils/configuration/__init__.py +++ b/CPAC/utils/configuration/__init__.py @@ -16,9 +16,9 @@ You should have received a copy of the GNU Lesser General Public License along with C-PAC. If not, see .""" -from .configuration import Configuration, DEFAULT_PIPELINE_FILE, \ - Preconfiguration +from .configuration import check_pname, Configuration, DEFAULT_PIPELINE_FILE, \ + Preconfiguration, set_subject from . import configuration, diff -__all__ = ['Configuration', 'configuration', 'DEFAULT_PIPELINE_FILE', 'diff', - 'Preconfiguration'] +__all__ = ['check_pname', 'Configuration', 'configuration', + 'DEFAULT_PIPELINE_FILE', 'diff', 'Preconfiguration', 'set_subject'] diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index 80c46e89e1..2df09feb93 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -19,12 +19,10 @@ import re import os import warnings - from itertools import repeat +from typing import Optional, Tuple from warnings import warn - import yaml - from CPAC.qc import REGISTRATION_GUARDRAIL_THRESHOLDS from CPAC.utils.utils import load_preconfig from .diff import dct_diff @@ -362,6 +360,43 @@ def key_type_error(self, key): ])) +def check_pname(p_name: str, pipe_config: Configuration) -> str: + '''Function to check / set `p_name`, the string representation of a + pipeline for use in filetrees + + Parameters + ---------- + p_name : str or None + + pipe_config : Configuration + + Returns + ------- + p_name + + Examples + -------- + >>> c = Configuration() + >>> check_pname(None, c) + 'pipeline_cpac-default-pipeline' + >>> check_pname('cpac-default-pipeline', c) + 'pipeline_cpac-default-pipeline' + >>> check_pname('pipeline_cpac-default-pipeline', c) + 'pipeline_cpac-default-pipeline' + >>> check_pname('different-name', Configuration()) + 'pipeline_different-name' + >>> p_name = check_pname(None, Preconfiguration('blank')) + Loading the 'blank' pre-configured pipeline. + >>> p_name + 'pipeline_cpac-blank-template' + ''' + if p_name is None: + p_name = f'pipeline_{pipe_config["pipeline_setup", "pipeline_name"]}' + elif not p_name.startswith('pipeline_'): + p_name = f'pipeline_{p_name}' + return p_name + + def collect_key_list(config_dict): '''Function to return a list of lists of keys for a nested dictionary @@ -459,3 +494,55 @@ def set_from_ENV(conf): # pylint: disable=invalid-name conf = re.sub( _pattern, os.environ.get(_match, f'${_match}'), conf) return conf + + +def set_subject(sub_dict: dict, pipe_config: 'Configuration', + p_name: Optional[str] = None) -> Tuple[str, str, str]: + '''Function to set pipeline name and log directory path for a given + sub_dict + + Parameters + ---------- + sub_dict : dict + + pipe_config : CPAC.utils.configuration.Configuration + + p_name : str, optional + pipeline name string + + Returns + ------- + subject_id : str + + p_name : str + pipeline name string + + log_dir : str + path to subject log directory + + Examples + -------- + >>> from tempfile import TemporaryDirectory + >>> from CPAC.utils.configuration import Configuration + >>> sub_dict = {'site_id': 'site1', 'subject_id': 'sub1', + ... 'unique_id': 'uid1'} + >>> with TemporaryDirectory() as tmpdir: + ... subject_id, p_name, log_dir = set_subject( + ... sub_dict, Configuration({'pipeline_setup': {'log_directory': + ... {'path': tmpdir}}})) + >>> subject_id + 'sub1_uid1' + >>> p_name + 'pipeline_cpac-default-pipeline' + >>> log_dir.endswith(f'{p_name}/{subject_id}') + True + ''' + subject_id = sub_dict['subject_id'] + if sub_dict.get('unique_id'): + subject_id += f'_{sub_dict["unique_id"]}' + p_name = check_pname(p_name, pipe_config) + log_dir = os.path.join(pipe_config.pipeline_setup['log_directory']['path'], + p_name, subject_id) + if not os.path.exists(log_dir): + os.makedirs(os.path.join(log_dir)) + return subject_id, p_name, log_dir diff --git a/CPAC/utils/datasource.py b/CPAC/utils/datasource.py index bb660d1ab7..5b9271922b 100644 --- a/CPAC/utils/datasource.py +++ b/CPAC/utils/datasource.py @@ -1,11 +1,26 @@ +# Copyright (C) 2012-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import csv import json -import nipype.interfaces.utility as util +import re +from typing import Optional, Tuple from nipype import logging -# pylint: disable=ungrouped-imports, wrong-import-order +from nipype.interfaces import utility as util from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.afni as afni - from CPAC.utils import function from CPAC.utils.interfaces.function import Function from CPAC.utils.utils import get_scan_params @@ -327,20 +342,46 @@ def create_fmap_datasource(fmap_dct, wf_name='fmap_datasource'): def get_fmap_phasediff_metadata(data_config_scan_params): - if not isinstance(data_config_scan_params, dict) and \ - ".json" in data_config_scan_params: - with open(data_config_scan_params, 'r') as f: - data_config_scan_params = json.load(f) + if (not isinstance(data_config_scan_params, dict) and + ".json" in data_config_scan_params): + with open(data_config_scan_params, 'r', encoding='utf-8') as _f: + data_config_scan_params = json.load(_f) echo_time = data_config_scan_params.get("EchoTime") dwell_time = data_config_scan_params.get("DwellTime") pe_direction = data_config_scan_params.get("PhaseEncodingDirection") + total_readout = data_config_scan_params.get("TotalReadoutTime") + + return (echo_time, dwell_time, pe_direction, total_readout) + + +def calc_delta_te_and_asym_ratio(effective_echo_spacing: float, + echo_time_one: float, echo_time_two: float, + echo_time_three: Optional[float] = None + ) -> Tuple[float, float]: + """Calcluate ``deltaTE`` and ``ees_asym_ratio`` from given metadata + + Parameters + ---------- + effective_echo_spacing : float + EffectiveEchoSpacing from sidecar JSON - return (echo_time, dwell_time, pe_direction) + echo_time_one : float + echo_time_two : float -def calc_deltaTE_and_asym_ratio(dwell_time, echo_time_one, echo_time_two, - echo_time_three=None): + echo_time_three : float, optional + + Returns + ------- + deltaTE : float + + ees_asym_ratio : float + """ + if not isinstance(effective_echo_spacing, float): + raise LookupError('C-PAC could not find `EffectiveEchoSpacing` in ' + 'either fmap or func sidecar JSON, but that field ' + 'is required for PhaseDiff distortion correction.') echo_times = [echo_time_one, echo_time_two] if echo_time_three: # get only the two different ones @@ -354,9 +395,8 @@ def calc_deltaTE_and_asym_ratio(dwell_time, echo_time_one, echo_time_two, echo_times[1] = echo_times[1] * 1000 deltaTE = abs(echo_times[0] - echo_times[1]) - dwell_asym_ratio = (dwell_time / deltaTE) - - return (deltaTE, dwell_asym_ratio) + ees_asym_ratio = (effective_echo_spacing / deltaTE) + return deltaTE, ees_asym_ratio def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, @@ -405,24 +445,24 @@ def match_epi_fmaps(bold_pedir, epi_fmap_one, epi_fmap_params_one, def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, - input_creds_path, unique_id=None): + input_creds_path, unique_id=None, num_strat=None): + name_suffix = '' + for suffix_part in (unique_id, num_strat): + if suffix_part is not None: + name_suffix += f'_{suffix_part}' # Grab field maps diff = False blip = False fmap_rp_list = [] fmap_TE_list = [] - if "fmap" in sub_dict: second = False for key in sub_dict["fmap"]: - gather_fmap = create_fmap_datasource(sub_dict["fmap"], - f"fmap_gather_{key}_" - f"{subject_id}") + gather_fmap = create_fmap_datasource( + sub_dict["fmap"], f"fmap_gather_{key}_{subject_id}") gather_fmap.inputs.inputnode.set( - subject=subject_id, - creds_path=input_creds_path, - dl_dir=cfg.pipeline_setup['working_directory']['path'] - ) + subject=subject_id, creds_path=input_creds_path, + dl_dir=cfg.pipeline_setup['working_directory']['path']) gather_fmap.inputs.inputnode.scan = key orig_key = key @@ -445,10 +485,11 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, input_names=['data_config_scan_params'], output_names=['echo_time', 'dwell_time', - 'pe_direction'], + 'pe_direction', + 'total_readout'], function=get_fmap_phasediff_metadata, imports=get_fmap_metadata_imports), - name=f'{key}_get_metadata') + name=f'{key}_get_metadata{name_suffix}') wf.connect(gather_fmap, 'outputspec.scan_params', get_fmap_metadata, 'data_config_scan_params') @@ -459,33 +500,32 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, 'dwell_time', {}, "", "fmap_dwell_ingress") rpool.set_data(f'{key}-pedir', get_fmap_metadata, 'pe_direction', {}, "", "fmap_pedir_ingress") + rpool.set_data(f'{key}-total-readout', get_fmap_metadata, + 'total_readout', {}, "", "fmap_readout_ingress") fmap_TE_list.append(f"{key}-TE") - keywords = ['diffphase', 'diffmag'] - if key in keywords: + if re.search('diff.*(phase|mag)', key): diff = True - if orig_key == "epi_AP" or orig_key == "epi_PA": + if re.match('epi_[AP]{2}', orig_key): blip = True if diff: calc_delta_ratio = pe.Node(Function( - input_names=['dwell_time', + input_names=['effective_echo_spacing', 'echo_time_one', 'echo_time_two', 'echo_time_three'], output_names=['deltaTE', - 'dwell_asym_ratio'], - function=calc_deltaTE_and_asym_ratio), - name='diff_distcor_calc_delta') - - node, out_file = rpool.get('diffphase-dwell')[ - "['diffphase-dwell:fmap_dwell_ingress']"]['data'] # <--- there will only be one pipe_idx - wf.connect(node, out_file, calc_delta_ratio, 'dwell_time') + 'ees_asym_ratio'], + function=calc_delta_te_and_asym_ratio, + imports=['from typing import Optional, Tuple']), + name=f'diff_distcor_calc_delta{name_suffix}') node, out_file = rpool.get(f'{fmap_TE_list[0]}')[ - f"['{fmap_TE_list[0]}:fmap_TE_ingress']"]['data'] + f"['{fmap_TE_list[0]}:fmap_TE_ingress']" + ]['data'] # <--- there will only be one pipe_idx wf.connect(node, out_file, calc_delta_ratio, 'echo_time_one') node, out_file = rpool.get(f'{fmap_TE_list[1]}')[ @@ -498,40 +538,31 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, wf.connect(node, out_file, calc_delta_ratio, 'echo_time_three') - rpool.set_data('deltaTE', calc_delta_ratio, 'deltaTE', {}, "", - "deltaTE_ingress") - rpool.set_data('dwell-asym-ratio', - calc_delta_ratio, 'dwell_asym_ratio', {}, "", - "dwell_asym_ratio_ingress") - # Add in nodes to get parameters from configuration file # a node which checks if scan_parameters are present for each scan - scan_params_imports = ['from CPAC.utils.utils import check, ' - 'try_fetch_parameter'] - scan_params = \ - pe.Node(Function( - input_names=['data_config_scan_params', - 'subject_id', - 'scan', - 'pipeconfig_tr', - 'pipeconfig_tpattern', - 'pipeconfig_start_indx', - 'pipeconfig_stop_indx'], - output_names=['tr', - 'tpattern', - 'ref_slice', - 'start_indx', - 'stop_indx', - 'pe_direction'], - function=get_scan_params, - imports=scan_params_imports - ), name=f"bold_scan_params_{subject_id}_{unique_id}") + scan_params = pe.Node(Function( + input_names=['data_config_scan_params', + 'subject_id', + 'scan', + 'pipeconfig_tr', + 'pipeconfig_tpattern', + 'pipeconfig_start_indx', + 'pipeconfig_stop_indx'], + output_names=['tr', + 'tpattern', + 'ref_slice', + 'start_indx', + 'stop_indx', + 'pe_direction', + 'effective_echo_spacing'], + function=get_scan_params, + imports=['from CPAC.utils.utils import check, try_fetch_parameter'] + ), name=f"bold_scan_params_{subject_id}{name_suffix}") scan_params.inputs.subject_id = subject_id scan_params.inputs.set( pipeconfig_start_indx=cfg.functional_preproc['truncation'][ 'start_tr'], - pipeconfig_stop_indx=cfg.functional_preproc['truncation']['stop_tr'] - ) + pipeconfig_stop_indx=cfg.functional_preproc['truncation']['stop_tr']) # wire in the scan parameter workflow node, out = rpool.get('scan-params')[ @@ -551,7 +582,21 @@ def ingress_func_metadata(wf, cfg, rpool, sub_dict, subject_id, rpool.set_data('pe-direction', scan_params, 'pe_direction', {}, "", "func_metadata_ingress") - return (wf, rpool, diff, blip, fmap_rp_list) + if diff: + # Connect EffectiveEchoSpacing from functional metadata + rpool.set_data('effectiveEchoSpacing', scan_params, + 'effective_echo_spacing', {}, '', + 'func_metadata_ingress') + node, out_file = rpool.get('effectiveEchoSpacing')[ + "['effectiveEchoSpacing:func_metadata_ingress']"]['data'] + wf.connect(node, out_file, calc_delta_ratio, 'effective_echo_spacing') + rpool.set_data('deltaTE', calc_delta_ratio, 'deltaTE', {}, '', + 'deltaTE_ingress') + rpool.set_data('ees-asym-ratio', calc_delta_ratio, + 'ees_asym_ratio', {}, '', + 'ees_asym_ratio_ingress') + + return wf, rpool, diff, blip, fmap_rp_list def create_general_datasource(wf_name): @@ -836,7 +881,7 @@ def res_string_to_tuple(resolution): def resolve_resolution(resolution, template, template_name, tag=None): - import nipype.interfaces.afni as afni + from nipype.interfaces import afni from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import check_for_s3 diff --git a/CPAC/utils/strategy.py b/CPAC/utils/strategy.py index 8eab5caf19..b176b06073 100644 --- a/CPAC/utils/strategy.py +++ b/CPAC/utils/strategy.py @@ -1,15 +1,29 @@ -import os -import six -import warnings -import logging +# Copyright (C) 2018-2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +import logging +import six +from CPAC.pipeline.engine import ResourcePool logger = logging.getLogger('nipype.workflow') -class Strategy(object): +class Strategy: def __init__(self): - self.resource_pool = {} + self._resource_pool = ResourcePool({}) self.leaf_node = None self.leaf_out_file = None self.name = [] @@ -42,6 +56,16 @@ def get_node_from_resource_pool(self, resource_key): logger.error('No node for output: %s', resource_key) raise + @property + def resource_pool(self): + '''Strategy's ResourcePool dict''' + return self._resource_pool.get_entire_rpool() + + @property + def rpool(self): + '''Strategy's ResourcePool''' + return self._resource_pool + def update_resource_pool(self, resources, override=False): for key, value in resources.items(): if key in self.resource_pool and not override: diff --git a/CPAC/utils/test_mocks.py b/CPAC/utils/test_mocks.py index e76ea96750..2c88a10a2f 100644 --- a/CPAC/utils/test_mocks.py +++ b/CPAC/utils/test_mocks.py @@ -1,9 +1,10 @@ import os +from nipype.interfaces import utility as util from CPAC.pipeline import nipype_pipeline_engine as pe -import nipype.interfaces.utility as util -from CPAC.utils import Configuration, Strategy -from CPAC.utils.interfaces.function import Function +from CPAC.utils.configuration import Configuration from CPAC.utils.datasource import resolve_resolution +from CPAC.utils.interfaces.function import Function +from CPAC.utils.strategy import Strategy def file_node(path, file_node_num=0): diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 5a8f65ac0e..1bd6953a40 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -1,19 +1,19 @@ -"""Copyright (C) 2022 C-PAC Developers +# Copyright (C) 2012-2022 C-PAC Developers -This file is part of C-PAC. +# This file is part of C-PAC. -C-PAC is free software: you can redistribute it and/or modify it under -the terms of the GNU Lesser General Public License as published by the -Free Software Foundation, either version 3 of the License, or (at your -option) any later version. +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -C-PAC is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public -License for more details. +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. -You should have received a copy of the GNU Lesser General Public -License along with C-PAC. If not, see .""" +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import collections.abc import fnmatch @@ -715,6 +715,8 @@ def get_scan_params(subject_id, scan, pipeconfig_start_indx, starting TR or starting volume index last_tr : an integer ending TR or ending volume index + pe_direction : str + effective_echo_spacing : float """ import os @@ -733,6 +735,7 @@ def get_scan_params(subject_id, scan, pipeconfig_start_indx, last_tr = '' unit = 's' pe_direction = '' + effective_echo_spacing = None if isinstance(pipeconfig_stop_indx, str): if "End" in pipeconfig_stop_indx or "end" in pipeconfig_stop_indx: @@ -755,16 +758,20 @@ def get_scan_params(subject_id, scan, pipeconfig_start_indx, # TODO: better handling of errant key values!!! if "RepetitionTime" in params_dct.keys(): TR = float(check(params_dct, subject_id, scan, - 'RepetitionTime', False)) + "RepetitionTime", False)) if "SliceTiming" in params_dct.keys(): pattern = str(check(params_dct, subject_id, scan, - 'SliceTiming', False)) + "SliceTiming", False)) elif "SliceAcquisitionOrder" in params_dct.keys(): pattern = str(check(params_dct, subject_id, scan, - 'SliceAcquisitionOrder', False)) + "SliceAcquisitionOrder", False)) if "PhaseEncodingDirection" in params_dct.keys(): pe_direction = str(check(params_dct, subject_id, scan, - 'PhaseEncodingDirection', False)) + "PhaseEncodingDirection", False)) + if "EffectiveEchoSpacing" in params_dct.keys(): + effective_echo_spacing = float( + check(params_dct, subject_id, scan, + "EffectiveEchoSpacing", False)) elif len(data_config_scan_params) > 0 and \ isinstance(data_config_scan_params, dict): @@ -808,16 +815,20 @@ def get_scan_params(subject_id, scan, pipeconfig_start_indx, pe_direction = check(params_dct, subject_id, scan, 'PhaseEncodingDirection', False) + effective_echo_spacing = float( + try_fetch_parameter(params_dct, subject_id, scan, + ["EffectiveEchoSpacing"])) + else: err = "\n\n[!] Could not read the format of the scan parameters " \ "information included in the data configuration file for " \ - "the participant {0}.\n\n".format(subject_id) + f"the participant {subject_id}.\n\n" raise Exception(err) - if first_tr == '' or first_tr == None: + if first_tr == '' or first_tr is None: first_tr = pipeconfig_start_indx - if last_tr == '' or last_tr == None: + if last_tr == '' or last_tr is None: last_tr = pipeconfig_stop_indx unit = 's' @@ -920,14 +931,13 @@ def get_scan_params(subject_id, scan, pipeconfig_start_indx, start_indx = first_tr stop_indx = last_tr - return ( - tr if tr else None, - tpattern if tpattern else None, - ref_slice, - start_indx, - stop_indx, - pe_direction - ) + return (tr if tr else None, + tpattern if tpattern else None, + ref_slice, + start_indx, + stop_indx, + pe_direction, + effective_echo_spacing) def get_tr(tr): @@ -1608,6 +1618,141 @@ def _pickle2(p, z=False): return False +def _changes_1_8_0_to_1_8_1(config_dict): + ''' + Examples + -------- + Starting with 1.8.0 + >>> zero = {'anatomical_preproc': { + ... 'non_local_means_filtering': True, + ... 'n4_bias_field_correction': True + ... }, 'functional_preproc': { + ... 'motion_estimates_and_correction': { + ... 'calculate_motion_first': False + ... } + ... }, 'segmentation': { + ... 'tissue_segmentation': { + ... 'ANTs_Prior_Based': { + ... 'CSF_label': 0, + ... 'left_GM_label': 1, + ... 'right_GM_label': 2, + ... 'left_WM_label': 3, + ... 'right_WM_label': 4}}}} + >>> updated_apb = _changes_1_8_0_to_1_8_1(zero)[ + ... 'segmentation']['tissue_segmentation']['ANTs_Prior_Based'] + >>> updated_apb['CSF_label'] + [0] + >>> updated_apb['GM_label'] + [1, 2] + >>> updated_apb['WM_label'] + [3, 4] + + Starting with 1.8.1 + >>> one = {'anatomical_preproc': { + ... 'non_local_means_filtering': True, + ... 'n4_bias_field_correction': True + ... }, 'functional_preproc': { + ... 'motion_estimates_and_correction': { + ... 'calculate_motion_first': False + ... } + ... }, 'segmentation': { + ... 'tissue_segmentation': { + ... 'ANTs_Prior_Based': { + ... 'CSF_label': [0], + ... 'GM_label': [1, 2], + ... 'WM_label': [3, 4]}}}} + >>> updated_apb = _changes_1_8_0_to_1_8_1(one)[ + ... 'segmentation']['tissue_segmentation']['ANTs_Prior_Based'] + >>> updated_apb['CSF_label'] + [0] + >>> updated_apb['GM_label'] + [1, 2] + >>> updated_apb['WM_label'] + [3, 4] + ''' + for key_sequence in { + ('anatomical_preproc', 'non_local_means_filtering'), + ('anatomical_preproc', 'n4_bias_field_correction') + }: + config_dict = _now_runswitch(config_dict, key_sequence) + for combiners in { + (( + ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'CSF_label'), + ), ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'CSF_label')), + (( + ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'left_GM_label'), + ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'right_GM_label') + ), ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'GM_label')), + (( + ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'left_WM_label'), + ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'right_WM_label') + ), ('segmentation', 'tissue_segmentation', 'ANTs_Prior_Based', + 'WM_label')) + }: + config_dict = _combine_labels(config_dict, *combiners) + try: + calculate_motion_first = lookup_nested_value( + config_dict, + ['functional_preproc', 'motion_estimates_and_correction', + 'calculate_motion_first'] + ) + except KeyError: + calculate_motion_first = None + if calculate_motion_first is not None: + del config_dict['functional_preproc'][ + 'motion_estimates_and_correction']['calculate_motion_first'] + config_dict = set_nested_value(config_dict, [ + 'functional_preproc', 'motion_estimates_and_correction', + 'motion_estimates', 'calculate_motion_first' + ], calculate_motion_first) + + return config_dict + + +def _combine_labels(config_dict, list_to_combine, new_key): + ''' + Helper function to combine formerly separate keys into a + combined key. + + Parameters + ---------- + config_dict: dict + + key_sequence: iterable of lists or tuples + + new_key: list or tuple + + Returns + ------- + updated_config_dict: dict + ''' + new_value = [] + any_old_values = False + for _to_combine in list_to_combine: + try: + old_value = lookup_nested_value(config_dict, _to_combine) + except KeyError: + old_value = None + if old_value is not None: + any_old_values = True + if isinstance(old_value, (list, set, tuple)): + for value in old_value: + new_value.append(value) + else: + new_value.append(old_value) + config_dict = delete_nested_value(config_dict, _to_combine) + if any_old_values: + return set_nested_value(config_dict, new_key, new_value) + return config_dict + + def concat_list(in_list1=None, in_list2=None): """ Parameters @@ -1708,6 +1853,31 @@ def lookup_nested_value(d, keys): raise +def _now_runswitch(config_dict, key_sequence): + ''' + Helper function to convert a formerly forkable value to a + runswitch. + + Parameters + ---------- + config_dict: dict + + key_sequence: list or tuple + + Returns + ------- + updated_config_dict: dict + ''' + try: + old_forkable = lookup_nested_value(config_dict, key_sequence) + except KeyError: + return config_dict + if isinstance(old_forkable, (bool, list)): + return set_nested_value( + config_dict, key_sequence, {'run': old_forkable}) + return config_dict + + def _remove_somethings(value, things_to_remove): '''Helper function to remove instances of any in a given set of values from a list. diff --git a/CPAC/utils/yaml_template.py b/CPAC/utils/yaml_template.py index 1800c03dfd..d3ac3ef6ab 100644 --- a/CPAC/utils/yaml_template.py +++ b/CPAC/utils/yaml_template.py @@ -21,7 +21,7 @@ import yaml from click import BadParameter from CPAC.utils.configuration import Configuration, DEFAULT_PIPELINE_FILE -from CPAC.utils.configuration.diff import dct_diff, diff_dict +from CPAC.utils.configuration.diff import dct_diff from CPAC.utils.utils import load_preconfig, \ lookup_nested_value, update_config_dict, \ update_pipeline_values_1_8 @@ -346,6 +346,15 @@ def hash_data_config(sub_list): Returns ------- data_config_hash : str, len(8) + + Examples + -------- + >>> sub_list = [{'site_id': f'site{i}', 'subject_id': f'sub{i}', + ... 'unique_id': f'uid{i}'} for i in range(1, 4)] + >>> sub_list[0] + {'site_id': 'site1', 'subject_id': 'sub1', 'unique_id': 'uid1'} + >>> hash_data_config(sub_list) + '6f49a278' ''' return sha1('_'.join([','.join([run.get(key, '') for run in sub_list]) for key in ['site_id', 'subject_id', diff --git a/README.md b/README.md index e753f1625b..137bc57972 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ Documentation pertaining to this latest release can be found here: https://fcp-i Discussion Forum --------------- -If you are stuck and need help or have any other questions or comments about C-PAC, there is a C-PAC discussion forum here: ttps://neurostars.org/tag/cpac +If you are stuck and need help or have any other questions or comments about C-PAC, there is a C-PAC discussion forum here: https://neurostars.org/tag/cpac Issue Tracker and Bugs ---------------------- diff --git a/dev/docker_data/default_pipeline.yml b/dev/docker_data/default_pipeline.yml new file mode 100644 index 0000000000..afe3bd37f8 --- /dev/null +++ b/dev/docker_data/default_pipeline.yml @@ -0,0 +1,14 @@ +%YAML 1.1 +--- +# CPAC Pipeline Configuration YAML file +# Version 1.8.5.dev +# +# http://fcp-indi.github.io for more info. +# +# Tip: This file can be edited manually with a text editor for quick modifications. + +# The default C-PAC pipeline was relocated from `dev/docker_data/default_pipeline.yml` to `CPAC/resources/configs/pipeline_config_default.yml` +# This file (`dev/docker_data/default_pipeline.yml`) is included for backwards-compatibility and will be removed in a future version. + +# import full default pipeline +FROM: default diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index 22ebc4222e..99107dcf72 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -32,7 +32,8 @@ load_cpac_data_config, \ load_yaml_config, \ sub_list_filter_by_labels -from CPAC.utils.configuration import Configuration, DEFAULT_PIPELINE_FILE +from CPAC.utils.configuration import Configuration, DEFAULT_PIPELINE_FILE, \ + set_subject from CPAC.utils.docs import DOCS_URL_PREFIX from CPAC.utils.monitoring import failed_to_start, log_nodes_cb from CPAC.utils.yaml_template import create_yaml_from_template, \ @@ -515,6 +516,7 @@ def run_main(): else: c['pipeline_setup']['log_directory']['path'] = os.path.join( DEFAULT_TMP_DIR, "log") + log_dir = c['pipeline_setup']['log_directory']['path'] if args.mem_gb: c['pipeline_setup']['system_config'][ @@ -714,14 +716,11 @@ def run_main(): sys.exit(1) else: data_hash = hash_data_config(sub_list) - # write out the data configuration file data_config_file = (f"cpac_data_config_{data_hash}_{st}.yml") - if not output_dir_is_s3: - data_config_file = os.path.join(output_dir, data_config_file) - else: - data_config_file = os.path.join(DEFAULT_TMP_DIR, data_config_file) - + sublogdirs = [set_subject(sub, c)[2] for sub in sub_list] + # write out the data configuration file + data_config_file = os.path.join(sublogdirs[0], data_config_file) with open(data_config_file, 'w', encoding='utf-8') as _f: noalias_dumper = yaml.dumper.SafeDumper noalias_dumper.ignore_aliases = lambda self, data: True @@ -729,18 +728,28 @@ def run_main(): Dumper=noalias_dumper) # update and write out pipeline config file - if not output_dir_is_s3: - pipeline_config_file = os.path.join( - output_dir, f"cpac_pipeline_config_{data_hash}_{st}.yml") - else: - pipeline_config_file = os.path.join( - DEFAULT_TMP_DIR, f"cpac_pipeline_config_{data_hash}_{st}.yml") - + pipeline_config_file = os.path.join( + sublogdirs[0], f"cpac_pipeline_config_{data_hash}_{st}.yml") with open(pipeline_config_file, 'w', encoding='utf-8') as _f: _f.write(create_yaml_from_template(c, DEFAULT_PIPELINE_FILE, True)) - with open(f'{pipeline_config_file[:-4]}_min.yml', 'w', + minimized_config = f'{pipeline_config_file[:-4]}_min.yml' + with open(minimized_config, 'w', encoding='utf-8') as _f: - _f.write(create_yaml_from_template(c, DEFAULT_PIPELINE_FILE, False)) + _f.write(create_yaml_from_template(c, DEFAULT_PIPELINE_FILE, + False)) + for config_file in (data_config_file, pipeline_config_file, + minimized_config): + os.chmod(config_file, 0x444) # Make config files readonly + + if len(sublogdirs) > 1: + # If more than one run is included in the given data config + # file, an identical copy of the data and pipeline config + # will be included in the log directory for each run + for sublogdir in sublogdirs[1:]: + for config_file in (data_config_file, pipeline_config_file, + minimized_config): + os.link(config_file, config_file.replace( + sublogdirs[0], sublogdir)) if args.analysis_level in ["participant", "test_config"]: # build pipeline easy way diff --git a/requirements.txt b/requirements.txt index c37cfe918f..b4cc92d941 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,7 @@ nipype==1.5.1 nose==1.3.7 numpy==1.21.0 pandas==0.23.4 +pathvalidate==2.5.2 patsy==0.5.0 prov==1.5.2 psutil==5.6.6 From f6b85402e86cf054dce3a07181f8a8218201b384 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 18 Oct 2022 13:54:31 -0400 Subject: [PATCH 23/72] :pencil2: Fix typo (octal, not hexadecimal) [skip ci] --- dev/docker_data/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index 99107dcf72..db3ec4aaf5 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -739,7 +739,7 @@ def run_main(): False)) for config_file in (data_config_file, pipeline_config_file, minimized_config): - os.chmod(config_file, 0x444) # Make config files readonly + os.chmod(config_file, 0o444) # Make config files readonly if len(sublogdirs) > 1: # If more than one run is included in the given data config From 6c81eec1af49a1c8ab7fea0f89345bbd6c446109 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 19 Oct 2022 21:14:10 +0000 Subject: [PATCH 24/72] :construction: WIP :goal_net: Iterate guardrail installation --- CPAC/pipeline/nipype_pipeline_engine/utils.py | 3 +- CPAC/pipeline/random_state/seed.py | 20 ++- CPAC/pipeline/schema.py | 13 +- CPAC/registration/__init__.py | 2 - CPAC/registration/guardrails.py | 42 +++--- CPAC/registration/registration.py | 122 ++++++++++++------ .../configs/pipeline_config_default.yml | 3 +- 7 files changed, 137 insertions(+), 68 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/utils.py b/CPAC/pipeline/nipype_pipeline_engine/utils.py index 3542b4f70c..d49be0c5ca 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/utils.py +++ b/CPAC/pipeline/nipype_pipeline_engine/utils.py @@ -17,7 +17,7 @@ """Custom nipype utilities""" -def connect_from_spec(wf, spec, original_spec, exclude=None): +def connect_from_spec(spec, original_spec, exclude=None): """Function to connect all original inputs to a new spec""" for _item, _value in original_spec.items(): if isinstance(exclude, (list, tuple)): @@ -25,4 +25,3 @@ def connect_from_spec(wf, spec, original_spec, exclude=None): setattr(spec.inputs, _item, _value) elif _item != exclude: setattr(spec.inputs, _item, _value) - return wf diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index 7839bf69be..f5cf2d87db 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -62,7 +62,7 @@ def random_seed(): ------- seed : int or None ''' - if _seed['seed'] == 'random': + if _seed['seed'] in ['random', None]: _seed['seed'] = random_random_seed() return _seed['seed'] @@ -153,6 +153,24 @@ def _reusable_flags(): } +def seed_plus_1(seed=None): + '''Increment seed, looping back to 1 at MAX_SEED + + Parameters + ---------- + seed : int, optional + Uses configured seed if not specified + + Returns + ------- + int + ''' + seed = random_seed() if seed is None else int(seed) + if seed < MAX_SEED: # increment random seed + return seed + 1 + return 1 # loop back to 1 + + def set_up_random_state(seed): '''Set global random seed diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index edd5dee4ae..e68a4f07ea 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -18,9 +18,8 @@ # pylint: disable=too-many-lines import re from itertools import chain, permutations -import numpy as np from pathvalidate import sanitize_filename -from voluptuous import All, ALLOW_EXTRA, Any, Capitalize, Coerce, \ +from voluptuous import All, ALLOW_EXTRA, Any, Capitalize, Coerce, Equal, \ ExactSequence, ExclusiveInvalid, In, Length, Lower, \ Match, Maybe, Optional, Range, Required, Schema from CPAC import docs_prefix @@ -484,7 +483,6 @@ def sanitize(filename): 'interpolation': In({'trilinear', 'sinc', 'spline'}), 'using': str, 'input': str, - 'interpolation': str, 'cost': str, 'dof': int, 'arguments': Maybe(str), @@ -502,11 +500,14 @@ def sanitize(filename): }, }, 'boundary_based_registration': { - 'run': forkable, + 'run': All(Coerce(ListFromItem), + [Any(bool, All(Lower, Equal('fallback')))], + Length(max=3)), 'bbr_schedule': str, - 'bbr_wm_map': In({'probability_map', 'partial_volume_map'}), + 'bbr_wm_map': In(('probability_map', + 'partial_volume_map')), 'bbr_wm_mask_args': str, - 'reference': In({'whole-head', 'brain'}) + 'reference': In(('whole-head', 'brain')) }, }, 'EPI_registration': { diff --git a/CPAC/registration/__init__.py b/CPAC/registration/__init__.py index 1ab40d425a..2faacbda44 100644 --- a/CPAC/registration/__init__.py +++ b/CPAC/registration/__init__.py @@ -3,7 +3,6 @@ create_fsl_fnirt_nonlinear_reg_nhp, \ create_register_func_to_anat, \ create_register_func_to_anat_use_T2, \ - create_bbregister_func_to_anat, \ create_wf_calculate_ants_warp from .output_func_to_standard import output_func_to_standard @@ -13,6 +12,5 @@ 'create_fsl_fnirt_nonlinear_reg_nhp', 'create_register_func_to_anat', 'create_register_func_to_anat_use_T2', - 'create_bbregister_func_to_anat', 'create_wf_calculate_ants_warp', 'output_func_to_standard'] diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index e53e7bca1f..da173dfe3d 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,6 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" +import logging from copy import deepcopy from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT @@ -23,7 +24,7 @@ from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec from CPAC.qc import qc_masks, REGISTRATION_GUARDRAIL_THRESHOLDS - +logger = logging.getLogger('nipype.workflow') _SPEC_KEYS = { FLIRT: {'reference': 'reference', 'registered': 'out_file'}, Registration: {'reference': 'reference', 'registered': 'out_file'}} @@ -54,7 +55,7 @@ def __init__(self, *args, metric=None, value=None, threshold=None, def registration_guardrail(registered: str, reference: str, retry: bool = False - ) -> str: + ): """Check QC metrics post-registration and throw an exception if metrics are below given thresholds. @@ -78,23 +79,29 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False ------- registered_mask : str path to mask + + failed_qc : int + metrics met specified thresholds?, used as index for selecting + outputs """ qc_metrics = qc_masks(registered, reference) + failed_qc = 0 for metric, threshold in REGISTRATION_GUARDRAIL_THRESHOLDS.items(): if threshold is not None: value = qc_metrics.get(metric) if isinstance(value, list): value = value[0] if value < threshold: + failed_qc = 1 with open(f'{registered}.failed_qc', 'w', encoding='utf-8') as _f: _f.write(f'{metric}: {value} < {threshold}') if retry: registered = f'{registered}-failed' else: - raise BadRegistrationError(metric=metric, value=value, - threshold=threshold) - return registered + logger.error(str(BadRegistrationError( + metric=metric, value=value, threshold=threshold))) + return registered, failed_qc def registration_guardrail_node(name=None): @@ -112,7 +119,8 @@ def registration_guardrail_node(name=None): name = 'registration_guardrail' return Node(Function(input_names=['registered', 'reference'], - output_names=['registered'], + output_names=['registered', + 'failed_qc'], imports=['from CPAC.qc import qc_masks, ' 'REGISTRATION_GUARDRAIL_THRESHOLDS', 'from CPAC.registration.guardrails ' @@ -146,10 +154,10 @@ def registration_guardrail_workflow(registration_node, retry=True): (registration_node, guardrail, [(outkey, 'registered')])]) if retry: wf = retry_registration(wf, registration_node, - guardrail.outputs.registered) + guardrail.outputs.registered)[0] else: wf.connect(guardrail, 'registered', outputspec, outkey) - wf = connect_from_spec(wf, outputspec, registration_node, outkey) + connect_from_spec(outputspec, registration_node, outkey) return wf @@ -167,6 +175,8 @@ def retry_registration(wf, registration_node, registered): Returns ------- Workflow + + Node """ name = f'retry_{registration_node.name}' retry_node = Node(Function(function=retry_registration_node, @@ -177,14 +187,14 @@ def retry_registration(wf, registration_node, registered): outputspec = registration_node.outputs outkey = spec_key(registration_node, 'registered') guardrail = registration_guardrail_node(f'{name}_guardrail') - wf = connect_from_spec(wf, inputspec, retry_node) + connect_from_spec(inputspec, retry_node) wf.connect([ (inputspec, guardrail, [ (spec_key(retry_node, 'reference'), 'reference')]), (retry_node, guardrail, [(outkey, 'registered')]), (guardrail, outputspec, [('registered', outkey)])]) - wf = connect_from_spec(wf, retry_node, outputspec, registered) - return wf + connect_from_spec(retry_node, outputspec, registered) + return wf, retry_node def retry_registration_node(registered, registration_node): @@ -200,16 +210,12 @@ def retry_registration_node(registered, registration_node): ------- Node """ - from CPAC.pipeline.random_state.seed import MAX_SEED, random_seed - seed = random_seed() + from CPAC.pipeline.random_state.seed import seed_plus_1 if registered.endswith('-failed'): retry_node = registration_node.clone( name=f'{registration_node.name}-retry') - if isinstance(seed, int): - if seed < MAX_SEED: # increment random seed - retry_node.seed = seed + 1 - else: # loop back to minumum seed - retry_node.seed = 1 + if isinstance(retry_node.seed, int): + retry_node.seed = seed_plus_1() return retry_node return registration_node diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index a6eef5f1af..60c585c16f 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -21,7 +21,7 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils - +from nipype.interfaces.utility import Merge, Select from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.registration.guardrails import registration_guardrail_node @@ -912,8 +912,8 @@ def create_register_func_to_anat_use_T2(config, def create_bbregister_func_to_anat(phase_diff_distcor=False, - name='bbregister_func_to_anat'): - + name='bbregister_func_to_anat', + retry=False): """ Registers a functional scan in native space to structural. This is meant to be used after create_nonlinear_register() has been run and @@ -926,6 +926,8 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, take in the appropriate field map-related inputs. name : string, optional Name of the workflow. + retry : bool + Is this a second attempt? Returns ------- @@ -954,7 +956,7 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, outputspec.anat_func : string (nifti file) Functional data in anatomical space """ - + from CPAC.pipeline.random_state.seed import seed_plus_1 register_bbregister_func_to_anat = pe.Workflow(name=name) inputspec = pe.Node(util.IdentityInterface(fields=['func', @@ -979,6 +981,9 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, wm_bb_mask = pe.Node(interface=fsl.ImageMaths(), name='wm_bb_mask') + if retry: + seed = seed_plus_1() + wm_bb_mask.seed = seed register_bbregister_func_to_anat.connect( inputspec, 'bbr_wm_mask_args', @@ -994,6 +999,8 @@ def bbreg_args(bbreg_target): bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name='bbreg_func_to_anat') bbreg_func_to_anat.inputs.dof = 6 + if retry: + bbreg_func_to_anat.seed = seed register_bbregister_func_to_anat.connect( inputspec, 'bbr_schedule', @@ -2706,8 +2713,8 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): "config": ["registration_workflows", "functional_registration", "coregistration"], "switch": ["run"], - "option_key": "None", - "option_val": "None", + "option_key": ["boundary_based_registration", "run"], + "option_val": [True, False, "fallback"], "inputs": [("desc-reginput_bold", "desc-motion_bold", "space-bold_label-WM_mask", @@ -2727,17 +2734,14 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): "from-bold_to-T1w_mode-image_desc-linear_xfm", "from-bold_to-T1w_mode-image_desc-linear_warp"]} ''' - - diff_complete = False - if strat_pool.check_rpool("despiked-fieldmap") and \ - strat_pool.check_rpool("fieldmap-mask"): - diff_complete = True - + diff_complete = (strat_pool.check_rpool("despiked-fieldmap") and + strat_pool.check_rpool("fieldmap-mask")) + bbreg_status = "On" if opt is True else "Off" if isinstance( + opt, bool) else opt.title() + subwfname = f'func_to_anat_FLIRT_bbreg{bbreg_status}_{pipe_num}' if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: # monkey data - func_to_anat = create_register_func_to_anat_use_T2(cfg, - f'func_to_anat_FLIRT_' - f'{pipe_num}') + func_to_anat = create_register_func_to_anat_use_T2(cfg, subwfname) # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc @@ -2765,8 +2769,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): # if field map-based distortion correction is on, but BBR is off, # send in the distortion correction files here func_to_anat = create_register_func_to_anat(cfg, diff_complete, - f'func_to_anat_FLIRT_' - f'{pipe_num}') + subwfname) func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ 'functional_registration']['coregistration']['dof'] @@ -2815,22 +2818,19 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): (func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg') } - if True in cfg.registration_workflows['functional_registration'][ - 'coregistration']["boundary_based_registration"]["run"]: - - func_to_anat_bbreg = create_bbregister_func_to_anat(diff_complete, - f'func_to_anat_' - f'bbreg_' - f'{pipe_num}') + if opt in [True, "fallback"]: + func_to_anat_bbreg = create_bbregister_func_to_anat( + diff_complete, f'func_to_anat_bbreg{bbreg_status}_{pipe_num}') func_to_anat_bbreg.inputs.inputspec.bbr_schedule = \ cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'bbr_schedule'] - func_to_anat_bbreg.inputs.inputspec.bbr_wm_mask_args = \ cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'bbr_wm_mask_args'] + bbreg_guardrail = registration_guardrail_node('bbreg_guardrail_' + f'{pipe_num}') node, out = strat_pool.get_data('desc-reginput_bold') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.func') @@ -2840,12 +2840,14 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): 'reference'] == 'whole-head': node, out = strat_pool.get_data('T1w') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') + wf.connect(node, out, bbreg_guardrail, 'reference') elif cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'brain': node, out = strat_pool.get_data('desc-brain_T1w') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') + wf.connect(node, out, bbreg_guardrail, 'reference') wf.connect(func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg', func_to_anat_bbreg, 'inputspec.linear_reg_matrix') @@ -2855,12 +2857,14 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') else: - if cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'probability_map': + if cfg['registration_workflows', 'functional_registration', + 'coregistration', 'boundary_based_registration', + 'bbr_wm_map'] == 'probability_map': node, out = strat_pool.get_data(["label-WM_probseg", "label-WM_mask"]) - elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['boundary_based_registration']['bbr_wm_map'] == 'partial_volume_map': + elif cfg['registration_workflows', 'functional_registration', + 'coregistration', 'boundary_based_registration', + 'bbr_wm_map'] == 'partial_volume_map': node, out = strat_pool.get_data(["label-WM_pveseg", "label-WM_mask"]) wf.connect(node, out, @@ -2881,14 +2885,56 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmapmask') + wf.connect(func_to_anat_bbreg, 'outputspec.anat_func', + bbreg_guardrail, 'registered') + + mean_bolds = pe.Node(Merge(2), run_without_submitting=True, + name=f'bbreg_mean_bold_choices_{pipe_num}') + xfms = pe.Node(Merge(2), run_without_submitting=True, + name=f'bbreg_xfm_choices_{pipe_num}') + fallback_mean_bolds = pe.Node(Select, run_without_submitting=True, + name='bbreg_choose_mean_bold_' + f'{pipe_num}') + fallback_xfms = pe.Node(Select, run_without_submitting=True, + name=f'bbreg_choose_xfm_{pipe_num}') + if opt is True: + # Retry once on failure + retry_node = create_bbregister_func_to_anat(diff_complete, + f'retry_func_to_anat_' + f'bbreg_{pipe_num}', + retry=True) + wf.connect([ + (func_to_anat_bbreg, mean_bolds, ['outputspec.anat_func', + 'in1']), + (retry_node, mean_bolds, ['outputspec.anat_func_nobbreg', + 'in1']), + (func_to_anat_bbreg, xfms, [ + 'outputspec.func_to_anat_linear_xfm', 'in2']), + (retry_node, xfms, [ + 'outputspec.func_to_anat_linear_xfm_nobbreg', 'in2'])]) + else: + # Fall back to no-BBReg + wf.connect([ + (func_to_anat_bbreg, mean_bolds, ['outputspec.anat_func', + 'in1']), + (func_to_anat, mean_bolds, ['outputspec.anat_func_nobbreg', + 'in1']), + (func_to_anat_bbreg, xfms, [ + 'outputspec.func_to_anat_linear_xfm', 'in2']), + (func_to_anat, xfms, [ + 'outputspec.func_to_anat_linear_xfm_nobbreg', 'in2'])]) + wf.connect([ + (mean_bolds, fallback_mean_bolds, ['out', 'inlist']), + (xfms, fallback_xfms, ['out', 'inlist']), + (bbreg_guardrail, fallback_mean_bolds, ['failed_qc', 'index']), + (bbreg_guardrail, fallback_xfms, ['failed_qc', 'index'])]) outputs = { - 'space-T1w_desc-mean_bold': - (func_to_anat_bbreg, 'outputspec.anat_func'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': - (func_to_anat_bbreg, 'outputspec.func_to_anat_linear_xfm') - } - - return (wf, outputs) + 'space-T1w_desc-mean_bold': (fallback_mean_bolds, 'out'), + 'from-bold_to-T1w_mode-image_desc-linear_xfm': (fallback_xfms, + 'out')} + print(2938) + print(outputs) + return wf, outputs def create_func_to_T1template_xfm(wf, cfg, strat_pool, pipe_num, opt=None): @@ -3101,10 +3147,10 @@ def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): convert_func_to_anat_linear_warp.inputs.out_relwarp = True convert_func_to_anat_linear_warp.inputs.relwarp = True - + node, out = strat_pool.get_data('desc-preproc_T1w') wf.connect(node, out, convert_func_to_anat_linear_warp, 'reference') - + if strat_pool.check_rpool('blip-warp'): node, out = strat_pool.get_data('from-bold_to-T1w_mode-image_desc-linear_xfm') wf.connect(node, out, convert_func_to_anat_linear_warp, 'postmat') diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 72ec972c16..fbb1d3d00b 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -794,7 +794,8 @@ registration_workflows: boundary_based_registration: # this is a fork point - # run: [On, Off] - this will run both and fork the pipeline + # run: [On, Off, fallback] - this will run both and fork the pipeline + # if 'fallback' is one of the selected options, BBR will run and, if its output fails quality_thresholds, the pipeline will fallback to BBR's input image run: [On] # Standard FSL 5.0 Scheduler used for Boundary Based Registration. From 492c8236187b0e824edc4e0d2f1d12ab973a2566 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 20 Oct 2022 13:47:19 +0000 Subject: [PATCH 25/72] :recycle: Rewire coreg for bbreg fallback options --- CPAC/registration/registration.py | 76 +++++++++++++++++++++---------- 1 file changed, 53 insertions(+), 23 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 60c585c16f..8dbcd35b10 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -2829,11 +2829,29 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'bbr_wm_mask_args'] - bbreg_guardrail = registration_guardrail_node('bbreg_guardrail_' - f'{pipe_num}') + bbreg_guardrail = registration_guardrail_node( + f'bbreg{bbreg_status}_guardrail_{pipe_num}') + if opt is True: + # Retry once on failure + retry_node = create_bbregister_func_to_anat(diff_complete, + f'retry_func_to_anat_' + f'bbreg_{pipe_num}', + retry=True) + retry_node.inputs.inputspec.bbr_schedule = cfg[ + 'registration_workflows', 'functional_registration', + 'coregistration', 'boundary_based_registration', + 'bbr_schedule'] + retry_node.inputs.inputspec.bbr_wm_mask_args = cfg[ + 'registration_workflows', 'functional_registration', + 'coregistration', 'boundary_based_registration', + 'bbr_wm_mask_args'] + retry_guardrail = registration_guardrail_node( + f'retry_bbreg_guardrail_{pipe_num}') node, out = strat_pool.get_data('desc-reginput_bold') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.func') + if opt is True: + wf.connect(node, out, retry_node, 'inputspec.func') if cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ @@ -2841,6 +2859,9 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('T1w') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') wf.connect(node, out, bbreg_guardrail, 'reference') + if opt is True: + wf.connect(node, out, retry_node, 'inputspec.anat') + wf.connect(node, out, retry_guardrail, 'reference') elif cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ @@ -2848,14 +2869,19 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data('desc-brain_T1w') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') wf.connect(node, out, bbreg_guardrail, 'reference') + if opt is True: + wf.connect(node, out, retry_node, 'inputspec.anat') + wf.connect(node, out, retry_guardrail, 'reference') wf.connect(func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg', func_to_anat_bbreg, 'inputspec.linear_reg_matrix') + if opt is True: + wf.connect(func_to_anat, + 'outputspec.func_to_anat_linear_xfm_nobbreg', + retry_node, 'inputspec.linear_reg_matrix') if strat_pool.check_rpool('space-bold_label-WM_mask'): node, out = strat_pool.get_data(["space-bold_label-WM_mask"]) - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') else: if cfg['registration_workflows', 'functional_registration', 'coregistration', 'boundary_based_registration', @@ -2867,47 +2893,54 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): 'bbr_wm_map'] == 'partial_volume_map': node, out = strat_pool.get_data(["label-WM_pveseg", "label-WM_mask"]) - wf.connect(node, out, - func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') + wf.connect(node, out, + func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') + if opt is True: + wf.connect(node, out, retry_node, 'inputspec.anat_wm_segmentation') if diff_complete: node, out = strat_pool.get_data('effectiveEchoSpacing') wf.connect(node, out, func_to_anat_bbreg, 'echospacing_input.echospacing') + if opt is True: + wf.connect(node, out, + retry_node, 'echospacing_input.echospacing') node, out = strat_pool.get_data('diffphase-pedir') wf.connect(node, out, func_to_anat_bbreg, 'pedir_input.pedir') + if opt is True: + wf.connect(node, out, retry_node, 'pedir_input.pedir') node, out = strat_pool.get_data("despiked-fieldmap") wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmap') + if opt is True: + wf.connect(node, out, retry_node, 'inputspec.fieldmap') node, out = strat_pool.get_data("fieldmap-mask") wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmapmask') + if opt is True: + wf.connect(node, out, retry_node, 'inputspec.fieldmapmask') wf.connect(func_to_anat_bbreg, 'outputspec.anat_func', bbreg_guardrail, 'registered') + if opt is True: + wf.connect(func_to_anat_bbreg, 'outputspec.anat_func', + retry_guardrail, 'registered') mean_bolds = pe.Node(Merge(2), run_without_submitting=True, - name=f'bbreg_mean_bold_choices_{pipe_num}') + name=f'bbreg_mean_bold_choices_{pipe_num}') xfms = pe.Node(Merge(2), run_without_submitting=True, - name=f'bbreg_xfm_choices_{pipe_num}') + name=f'bbreg_xfm_choices_{pipe_num}') fallback_mean_bolds = pe.Node(Select, run_without_submitting=True, - name='bbreg_choose_mean_bold_' - f'{pipe_num}') + name=f'bbreg_choose_mean_bold_{pipe_num}' + ) fallback_xfms = pe.Node(Select, run_without_submitting=True, name=f'bbreg_choose_xfm_{pipe_num}') if opt is True: - # Retry once on failure - retry_node = create_bbregister_func_to_anat(diff_complete, - f'retry_func_to_anat_' - f'bbreg_{pipe_num}', - retry=True) wf.connect([ - (func_to_anat_bbreg, mean_bolds, ['outputspec.anat_func', - 'in1']), - (retry_node, mean_bolds, ['outputspec.anat_func_nobbreg', - 'in1']), + (bbreg_guardrail, mean_bolds, ['registered', 'in1']), + (retry_guardrail, mean_bolds, ['registered', 'in1']), (func_to_anat_bbreg, xfms, [ 'outputspec.func_to_anat_linear_xfm', 'in2']), (retry_node, xfms, [ @@ -2915,8 +2948,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): else: # Fall back to no-BBReg wf.connect([ - (func_to_anat_bbreg, mean_bolds, ['outputspec.anat_func', - 'in1']), + (bbreg_guardrail, mean_bolds, ['registered', 'in1']), (func_to_anat, mean_bolds, ['outputspec.anat_func_nobbreg', 'in1']), (func_to_anat_bbreg, xfms, [ @@ -2932,8 +2964,6 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): 'space-T1w_desc-mean_bold': (fallback_mean_bolds, 'out'), 'from-bold_to-T1w_mode-image_desc-linear_xfm': (fallback_xfms, 'out')} - print(2938) - print(outputs) return wf, outputs From e8f0c324a106b58c818cdaa0bd1ac9215ae7417e Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 20 Oct 2022 14:10:24 +0000 Subject: [PATCH 26/72] :adhesive_bandage: ~~el~~if --- CPAC/distortion_correction/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/distortion_correction/utils.py b/CPAC/distortion_correction/utils.py index 0649cc40c0..ed962a35c5 100644 --- a/CPAC/distortion_correction/utils.py +++ b/CPAC/distortion_correction/utils.py @@ -189,7 +189,7 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one=None, n_PE_steps = dim - 1 ro_time_one = np.round(dwell_time_one * n_PE_steps, 6) ro_time_two = np.round(dwell_time_two * n_PE_steps, 6) - elif ro_time_one and ro_time_two: + if ro_time_one and ro_time_two: ro_times = [f"-1 0 0 {ro_time_one}", f"1 0 0 {ro_time_two}"] else: raise Exception("[!] No dwell time or total readout time " @@ -200,7 +200,7 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one=None, n_PE_steps = dim - 1 ro_time_one = np.round(dwell_time_one * n_PE_steps, 6) ro_time_two = np.round(dwell_time_two * n_PE_steps, 6) - elif ro_time_one and ro_time_two: + if ro_time_one and ro_time_two: ro_times = [f"0 -1 0 {ro_time_one}", f"0 1 0 {ro_time_two}"] else: raise Exception("[!] No dwell time or total readout time " From 1190804a5a3709f8fa0ee594ef662e3348a395fe Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 20 Oct 2022 14:11:26 +0000 Subject: [PATCH 27/72] :rotating_light: Minor linting --- CPAC/distortion_correction/utils.py | 4 ++-- CPAC/registration/registration.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CPAC/distortion_correction/utils.py b/CPAC/distortion_correction/utils.py index ed962a35c5..8df4ff6f6e 100644 --- a/CPAC/distortion_correction/utils.py +++ b/CPAC/distortion_correction/utils.py @@ -183,7 +183,7 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one=None, if isinstance(unwarp_dir, bytes): unwarp_dir = unwarp_dir.decode() - if unwarp_dir in ["x", "x-", "-x","i","-i","i-"]: + if unwarp_dir in ["x", "x-", "-x", "i", "-i", "i-"]: if dwell_time_one and dwell_time_two: dim = nibabel.load(phase_one).shape[0] n_PE_steps = dim - 1 @@ -194,7 +194,7 @@ def phase_encode(unwarp_dir, phase_one, phase_two, dwell_time_one=None, else: raise Exception("[!] No dwell time or total readout time " "present for the acq-fMRI EPI field maps.") - elif unwarp_dir in ["y", "y-", "-y","j","-j","j-"]: + elif unwarp_dir in ["y", "y-", "-y", "j", "-j", "j-"]: if dwell_time_one and dwell_time_two: dim = nibabel.load(phase_one).shape[1] n_PE_steps = dim - 1 diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 8dbcd35b10..993a27c4aa 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1573,7 +1573,7 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", output_names=['checked_transform_list', 'list_length'], function=check_transforms), - name=f'check_transforms', + name='check_transforms', mem_gb=6) wf.connect(collect_transforms, 'out', check_transform, 'transform_list') From 66e4ce614ca1c33856459b2888f342f44167a687 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 20 Oct 2022 18:23:17 +0000 Subject: [PATCH 28/72] :recycle: Move logger import + declaration into guardrail function --- CPAC/registration/guardrails.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index da173dfe3d..0ae4c4d2fc 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" -import logging from copy import deepcopy from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT @@ -24,7 +23,6 @@ from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec from CPAC.qc import qc_masks, REGISTRATION_GUARDRAIL_THRESHOLDS -logger = logging.getLogger('nipype.workflow') _SPEC_KEYS = { FLIRT: {'reference': 'reference', 'registered': 'out_file'}, Registration: {'reference': 'reference', 'registered': 'out_file'}} @@ -84,6 +82,8 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False metrics met specified thresholds?, used as index for selecting outputs """ + import logging + logger = logging.getLogger('nipype.workflow') qc_metrics = qc_masks(registered, reference) failed_qc = 0 for metric, threshold in REGISTRATION_GUARDRAIL_THRESHOLDS.items(): From 328b569738af1b6908889bdf6fad3ed745964df3 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 20 Oct 2022 18:34:34 +0000 Subject: [PATCH 29/72] :twisted_rightwards_arrows: Merge dotgraph into enh/guardrails MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Squashed commit of the following: commit ba782c10dcd9219bffd835f97bc4d4204c01a3d2 Author: Jon Clucas Date: Tue Oct 18 14:36:38 2022 -0400 :truck: Include graph2use in graph filenames commit b132f10ba943d0546e84a6b3a42271ed33423bea Author: Jon Clucas Date: Tue Oct 18 12:32:44 2022 -0400 :wrench: Add graphviz options to pipeline config commit a72f1a6b695fc973cab650e8f03243bca6f37453 Author: Jon Clucas Date: Mon Oct 17 17:25:27 2022 -0400 :art: Update dotgraph building to quote strings (to allow `-`s) commit 356dc30cd61e62305818f37a761f5ebf1f93cc6b Merge: b1ce64b5 55667a2f Author: Steve Giavasis Date: Fri Oct 14 16:33:28 2022 -0400 Merge pull request #1778 from FCP-INDI/deeper-config-path 🚚 Move C-PAC generated configs into `log/sub-${sub}_ses-${ses}` subdirectory commit b1ce64b5976b18356268a0a61a37cdb33e2d67f2 Merge: 37a6874d c6ec0a0a Author: Steve Giavasis Date: Fri Oct 14 16:33:19 2022 -0400 Merge pull request #1769 from FCP-INDI/feature/total_readout_time Ingress and use TotalReadoutTime from epi fmap meta-data in FSL topup commit c6ec0a0ad333faa95f71b3797bc744f57fad813c Merge: 8301e3c0 37a6874d Author: Steve Giavasis Date: Fri Oct 14 16:03:28 2022 -0400 Merge branch 'develop' into feature/total_readout_time commit 8301e3c09994b2128d72b8394d91976903589c08 Author: Steve Giavasis Date: Fri Oct 14 15:56:44 2022 -0400 Revert to using dwell time first to maintain original DCAN script's functionality commit 55667a2f12cadda692cd0a4b404a71b71660ebc0 Author: Jon Clucas Date: Fri Oct 14 15:55:55 2022 -0400 :art: Include minimized pipeline config in hardlinking step commit a641d00d7afc97b810abe549530dd7edcb5f51c4 Author: Jon Clucas Date: Fri Oct 14 15:44:42 2022 -0400 :lock: Make generated config files read-only Co-authored-by: Steve Giavasis commit 37a6874da3c25e4e7695d8e753976c11a5239d80 Merge: b5d0d1f8 5b2465fc Author: Steve Giavasis Date: Fri Oct 14 15:46:39 2022 -0400 Merge pull request #1787 from diegoaper/main :bug: :white_check_mark: Group Runner 1.8 and MDMR Changes commit cb5bc42d7af48dbd42f36cb41a4d0c2009bc08ab Merge: 9c0a89c5 b5d0d1f8 Author: Jon Clucas Date: Fri Oct 14 15:37:53 2022 -0400 :twisted_rightwards_arrows: Merge develop into deeper-config-path commit b5d0d1f821fc094fdcdf35213b644ee4fd519e18 Merge: 7f50838c dc3f1136 Author: Steve Giavasis Date: Fri Oct 14 15:19:13 2022 -0400 Merge pull request #1796 from clane9/bugfix/random_seed :bug: fix type handling in `set_up_random_state` commit dc3f113674b9c4b6583f1d2faf22851e01de3180 Merge: 0ccc40d9 7f50838c Author: Steve Giavasis Date: Fri Oct 14 15:18:31 2022 -0400 Merge branch 'develop' into bugfix/random_seed commit 7f50838ce42add51e5ea954532baecc1e72fdddd Merge: 8ebf0a78 c2ba9d6e Author: Steve Giavasis Date: Fri Oct 14 15:17:39 2022 -0400 Merge pull request #1790 from FCP-INDI/backwards-compatibility/default_pipeline.yml ⚰️ Add placeholder default_pipeline.yml for backwards compatibility commit 8ebf0a784156dd254a3bf070234ea0040bc3da66 Merge: fd91a429 0182f98c Author: Steve Giavasis Date: Fri Oct 14 15:16:59 2022 -0400 Merge pull request #1788 from FCP-INDI/rbc-update 🔧 Base `rbc-options` on `fmriprep-options`' preproc commit fd91a4292ff35229cb8c5e1fa7e892b8c4a6720f Merge: 6b402b0d 76a4a751 Author: Steve Giavasis Date: Fri Oct 14 15:16:14 2022 -0400 Merge pull request #1781 from FCP-INDI/fix/FUGUE--dwell 🐛 Fix `FUGUE --dwell` parameterization commit 0ccc40d955956c66a714f4a10a0bc58e5332a40a Author: Connor Lane Date: Thu Oct 13 16:55:12 2022 -0400 move changelog entry to the unreleased section commit b5c742a12e42b68ff78e2f039a24cdeb714216e1 Author: Connor Lane Date: Wed Oct 12 17:43:26 2022 -0400 update changelog commit 6b402b0d8cc44e0b5ef70d8c0aeef768ca1d6375 Author: Jon Clucas Date: Wed Oct 12 16:12:59 2022 -0400 fixup! :fast_forward: Bring `develop` up to date with `main` commit 5867210016c5df683458847ac0c8cabed3a17112 Author: Connor Lane Date: Wed Oct 12 09:48:39 2022 -0400 :bug: fix type handling in `set_up_random_state` commit 5b2465fcc8a9986462db774046edbe55d4b91f7e Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Tue Oct 11 11:48:57 2022 -0400 Removed participant Configuration commit 92f9658a869432f86e7e2cbfcfb8f4a9a8675f9c Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 14:09:46 2022 -0400 Update cpac_group_runner.py commit a112d34b559d2991ac91472f7159cd08400370e5 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:40:56 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 031946af377930cd21e736768bd141f1ebbc5ad7 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:40:35 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit df2bb336f2228bc5287f879dd9c9e4a33ccd64d2 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:40:00 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit c5d306da403533bee48bfe189dd4df1c191e1503 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:39:46 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit e903b982c02f0a8a9dae58e195f372ca1fee866f Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:39:16 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 82e2ece25f52e9e553f2c27aae05463b011b2cf7 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:38:50 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit d75dbe37fb9e19f9c189ee4abbe12383b6aec137 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:38:32 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 9407c63f4f0b5f77292d7b4c709d79aed6f09a85 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:38:11 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 61097e3a4e7724222575e9a078746d25fcffa7fe Merge: 0ba271af 750c9ecc Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:37:40 2022 -0400 Merge branch 'main' of https://github.com/diegoaper/C-PAC_MDMR-Group_Runner commit 0ba271af3f0bdfaaf8779f9a717863013a7d413d Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:37:29 2022 -0400 Update cpac_group_runner.py commit 750c9ecc80660d1cac404dcd82f58934d6da3b30 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Mon Oct 10 10:37:04 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit cf81d08bd8cd897c38c6b97574461e7654e90a51 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:36:52 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit e570fbfb480b69692ed58bfec277ddaa94abc805 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:36:38 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 2eea1f40f38ad0225d74ed71c7445a5f6b6d34e3 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:35:59 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 99620c621f498708e14b088d2a065b8b2f985aab Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:35:44 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit cf8f0d9acf89aa0721f461331cb4457520ee5cb4 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:33:11 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit de6235f1677214172aee2516c187e3f8d4b4e7cc Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:32:40 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit ebe6a272b5f1b1f66db81c346f55cf07d642bf25 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:32:26 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit c50db8dd8ad4618cb5366cc338e2a7b6abd5b5cb Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:30:13 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit 748f3766d7e4f559480bea63f973a9255d743487 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:29:26 2022 -0400 Update CPAC/cwas/pipeline.py Co-authored-by: Jon Clucas commit 6d938f4916c51e0c508fe9ee36bd22c26662753b Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:28:55 2022 -0400 Update CPAC/cwas/pipeline.py Co-authored-by: Jon Clucas commit 0b2b868e4449af08a06dc143b6b4363748780f4c Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:28:20 2022 -0400 Update CPAC/cwas/pipeline.py Co-authored-by: Jon Clucas commit 281427d7756862d0db114608741fcaacf93e5698 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:27:24 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 63c3ad4f453c6027a172d7b3b68b3bdfbc953f6e Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:27:15 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit ca75679c7c76245fed77a4ea2000e188c8467413 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:27:05 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 22ed56abac40e808b150f88f05931269a0fb373b Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:26:54 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 490e319acdbcd2901878c4ffff1149abbebd610b Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:26:32 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 846286507c1c87f9a2124e1420693b9d77cbfb80 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:25:57 2022 -0400 Update CPAC/resources/configs/group_config_template.yml Co-authored-by: Jon Clucas commit 3269c0ee924f414020ac066a583ee05c12a346a7 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:25:14 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit bb6458b6516423bd366ede7bf11cd454bb4a065d Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:33 2022 -0400 Update CPAC/pipeline/cpac_group_runner.py Co-authored-by: Jon Clucas commit fe1dff4866db62227043e4567844b6a2cfee5dfd Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:25 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 6fcb279a9b404215d3617ca5bc4c19e38c0d27eb Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:10 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit f25b604bf4ecefd7798a9167b3acb5fcb3591103 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:22:04 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 35318a58572b5d9c508c7f6b35e78808c2812b88 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:21:52 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 6ccce46f49e4840f1837cad8c130137e224f8730 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 17:20:28 2022 -0400 Update CPAC/cwas/cwas.py Co-authored-by: Jon Clucas commit 6dc4a2b904529504a21a91252d4dbfa3b4418fc8 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 10:55:53 2022 -0400 Update cpac_outputs.tsv Added 2 lines for MDMR group_analysis output commit b6ed208a307dde12be98df6a1b3f8b39163d5b1a Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 10:48:47 2022 -0400 Update group_config_template.yml commit 2ab1d648b08104e22a878dec2128f5430f61304e Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Fri Oct 7 10:48:09 2022 -0400 Fixed 1.8.4 image intake Fixed the intake for images to make the 1.8.4 BIDS convention. commit 68e34164d1392e457a2c181ea64fbf1e39cdc644 Merge: 32632fa3 9586d714 Author: Jon Clucas Date: Fri Oct 7 10:20:22 2022 -0400 :fast_forward: Bring `develop` up to date with `main` commit c2ba9d6e40d640f2ca62da1ac039fa80329aae94 Author: Jon Clucas Date: Wed Oct 5 15:16:39 2022 -0400 :coffin: Add placeholder default_pipeline.yml for backwards compatibility [skip ci] commit 76a4a751885a989ea5cfcdaa646532b8d81e5bde Author: Jon Clucas Date: Wed Oct 5 17:51:41 2022 +0000 :recycle: Send 'desc-mean_bold' as input to `distcor_blip_fsl_topup` Co-authored-by: Steve Giavasis commit 0182f98c61cb7fbb495c8300e6a6a7991c859240 Author: Jon Clucas Date: Wed Oct 5 11:12:03 2022 -0400 :memo: Add rbc-options update to CHANGELOG [skip ci] Co-authored-by: Greg Kiar commit 52089447e163efbba5248c430ce7defd1e281847 Author: Jon Clucas Date: Tue Oct 4 17:00:49 2022 -0400 :wrench: Base rbc-options on fmriprep-options Co-authored-by: Greg Kiar commit 1ab4086a07cc74d645624c56e404c4002ca527a9 Author: Jon Clucas Date: Tue Oct 4 20:48:57 2022 +0000 :recycle: Replumb to prevent wild TOPUP forking Co-authored-by: Steve Giavasis commit c3ac6ac0ef3e9d64e3bf54a0e1d2d94cb4399e85 Author: diegoaper <88197703+diegoaper@users.noreply.github.com> Date: Tue Oct 4 15:48:30 2022 -0400 MDMR Group Config Template Update to the scripts and config template commit eee5461c92410a30cb010b6ca958a5dd528b9c97 Author: Jon Clucas Date: Fri Sep 30 21:21:15 2022 +0000 :recycle: Fix cyclical imports commit db2785138f1a7c8af5b3adc61347c5d113dcc043 Author: Jon Clucas Date: Fri Sep 30 16:37:46 2022 -0400 :alien: Import typehints in function node commit c1481538fd0971aa03b6d7aabf331610663bbeba Author: Jon Clucas Date: Fri Sep 30 15:41:34 2022 +0000 :loud_sound: Add FSL-`DwellTime` fix to CHANGELOG [skip ci] Co-authored-by: Nathalia Esper Co-authored-by: Alex Franco commit e53581ba558b2265ad2ed2aaebda80073f2751be Author: Jon Clucas Date: Fri Sep 30 15:27:52 2022 +0000 :bug: Fix import path commit 233858594fe3fbe72c12156b5af34f19a3c78ff1 Author: Jon Clucas Date: Fri Sep 30 15:24:38 2022 +0000 :page_facing_up: Add/update LGPL-3-or-later notice to modified files [skip ci] commit 109d5362ff4ba303948fd77b6713a6b2ac1e30e7 Author: Jon Clucas Date: Fri Sep 30 15:11:39 2022 +0000 :recycle: Only look for `EffectiveEchoSpacing` in functional sidecar commit 695d29a38f54610e5f91ecb34085cb1934e42c11 Author: Jon Clucas Date: Fri Sep 30 14:41:04 2022 +0000 :rotating_light: Remove duplicate good-name [skip ci] commit c9bea1881402c104d4a1ceb57b3282fc682151df Author: Jon Clucas Date: Fri Sep 30 14:37:12 2022 +0000 :recycle: DRY and let `calc_delta_te_and_asym_ratio` determine EES source commit 55ca4d91270ec750a7ff5ab9e774f838e18b8146 Author: Jon Clucas Date: Thu Sep 29 14:09:12 2022 +0000 :bug: Fix logic checking for `diffphase-effectiveEchoSpacing` commit d41dc2922c3e9133ecccba84020e2b15ac728bdc Author: Jon Clucas Date: Wed Sep 28 12:42:26 2022 -0400 :bug: Only connect calc_delta_ratio if diff commit ca86b0b99671c66bb75874e010b192a4f445d4b3 Author: Jon Clucas Date: Wed Sep 28 09:56:32 2022 -0400 :bug: Fix plumbing for EffectiveEchoSpacing commit f7ca070e9a4dd3967f45f57b33155b9b6857d8bf Author: Jon Clucas Date: Tue Sep 27 17:26:49 2022 -0400 :recycle: Get EffectiveEchoSpacing from func metadata if possible commit 376d5508b7011008ab0c6b6857fe401f882eb7d1 Author: Jon Clucas Date: Tue Sep 27 15:19:59 2022 -0400 :bug: Update DwellTime → EffectiveEchoSpacing for FSL FUGUE Co-authored-by: Nathalia Esper Co-authored-by: Alex Franco commit 32632fa3dcf0c60f2f966df3b19b54ba7d66d687 Merge: eed40c51 238f4991 Author: Steve Giavasis Date: Mon Sep 26 13:23:08 2022 -0400 Merge pull request #1777 from FCP-INDI/differentiate-sametime-data-configs 🐛 Differentiate sametime data and pipeline configs commit eed40c51d9c63914339422f966ec8da7cfe8744e Merge: 76eb34d2 a64541e0 Author: Steve Giavasis Date: Mon Sep 26 13:22:50 2022 -0400 Merge pull request #1775 from FCP-INDI/deeper-working-path 🚚 Use `f'pipeline_{pipeline_name}'` subdirectories for `log`, `working`, and `output` commit 76eb34d278004dcd2389012f93ab42fa7558089c Author: sgiavasis Date: Mon Sep 26 17:21:02 2022 +0000 Merge resolution for latest nuisance and native/template-space options changes. commit b30a2e0f5fa3016c0c2cbba1d24ac0cd8c71d011 Merge: 1e24c690 ea182429 Author: sgiavasis Date: Sun Sep 25 08:30:26 2022 +0000 Merge branch 'dev-fix' into develop commit ea182429bd4fb8e50d796f86b92eccd3098bd4c5 Author: sgiavasis Date: Sun Sep 25 08:27:49 2022 +0000 Staging changes from template-space derivatives for next release. commit 9c0a89c5b6fa0dbfa363aadd28f6c9bf8efa2cda Author: Jon Clucas Date: Fri Sep 23 12:48:29 2022 -0400 :bug: Create nested working directory before writing PID to disk commit d50f8daa167c4f1834617d6cc70fb436d78f5e5d Author: Jon Clucas Date: Fri Sep 23 12:26:11 2022 -0400 :recycle: Move PID on disk one level deeper in working directory commit 137c482ee9dfd269ec41377f7666512aeab6254c Author: Jon Clucas Date: Fri Sep 23 12:17:07 2022 -0400 :recycle: Modularize checking p_name commit 1899c9e56fa822aaece7cdbd6ef3c42edb313e0c Author: Jon Clucas Date: Fri Sep 23 10:50:41 2022 -0400 :truck: Nest `failedToStart.log` as deeply as possible commit ed554a69e195c4076be21399b9a477b44b16617d Merge: 04903949 a64541e0 Author: Jon Clucas Date: Fri Sep 23 10:33:30 2022 -0400 :twisted_rightwards_arrow: Merge develop into deeper-config-path commit 238f49919b4d55a0b2d4d71072183957275063e0 Merge: 68ac96ab 1e24c690 Author: Jon Clucas Date: Fri Sep 23 10:19:13 2022 -0400 :twisted_rightwards_arrows: Merge develop into differentiate-sametime-data-configs commit a64541e060c58ad94ed3d97c85953e8d05d19deb Merge: e5ec37c2 1e24c690 Author: Jon Clucas Date: Fri Sep 23 10:14:02 2022 -0400 :twisted_rightwards_arrows: Merge develop into deeper-working-path commit 04903949816ccfa74c64e14831f45112f75a5879 Author: Jon Clucas Date: Thu Sep 22 14:38:02 2022 -0400 :pencil2: Set c['subject_id'] commit f3aefca12e4d1c47d9a6251e52d05460a4fc4db9 Author: Jon Clucas Date: Thu Sep 22 12:20:57 2022 -0400 :truck: Move C-PAC-generated data and pipeline configs one level deeper commit 2ed4d6a5d27a4df4a9668a2cd20effdcc0c50cbc Author: Jon Clucas Date: Thu Sep 22 11:21:48 2022 -0400 :truck: Move C-PAC-generated config files into log dir commit 19c3890377d1bf6ed65d371b403dafbfa8b5c5fd Merge: e5ec37c2 68ac96ab Author: Jon Clucas Date: Thu Sep 22 10:59:04 2022 -0400 :twisted_rightwards_arrows: Merge differentiate-sametime-data-configs into deeper-config-path commit 68ac96ab2ccd9e509ad58ccabe5577b569b6b6bc Author: Jon Clucas Date: Wed Sep 21 17:37:02 2022 -0400 :white_check_mark: Add unit test for hash_data_config commit 600319493a9c48e5766743a08dc74f059271f95c Author: Jon Clucas Date: Wed Sep 21 17:07:57 2022 -0400 :loud_sound: Add config hash string update to CHANGELOG commit e5ec37c22e6b09776be134c2caa189877680625e Merge: e09cfa0c 14e1591a Author: Jon Clucas Date: Wed Sep 21 16:36:39 2022 -0400 :twisted_rightwards_arrows: Merge develop into deeper-working-path commit e09cfa0c523ff4fbcbfb8543dd02e8f848a644eb Author: Jon Clucas Date: Wed Sep 21 16:13:35 2022 -0400 :bug: Change working path update location commit 9586d7146eff224200e18622523a56da6a9826a6 Merge: 12c4f74a 94d80829 Author: Steve Giavasis Date: Mon Sep 19 21:31:01 2022 -0400 Merge pull request #1764 from FCP-INDI/fix/pipeline-config-versions 🔖 Hotfix: Update version to 1.8.4 commit 94d80829eee156ead693da1bbacb1f7e2dca3fd0 Author: sgiavasis Date: Tue Sep 20 01:27:46 2022 +0000 Fix Neurostars link in the README. commit 8804f6180d5d5c6e69f3422691ab6c034348b23e Author: Jon Clucas Date: Fri Sep 16 15:23:00 2022 -0400 :art: Rename `f'cpac_{pipeline_name}'` directories to `f'pipeline_{pipeline_name}'` to match `output`, `log`, and `working` subdirectories Co-authored-by: Steve Giavasis commit 77a1c5c534f375a4b000e6adec569d7f9f8abc62 Author: Jon Clucas Date: Fri Sep 16 15:13:12 2022 -0400 :truck: Add pipeline-config-level directory for participant working directories commit 7718419c86d546923a48a659817200788db1fc88 Author: Jon Clucas Date: Fri Sep 16 14:49:31 2022 -0400 :goal_net: Sanitize `pipeline_name` during validation commit ce6dd09eea9a47b1d8fee0c1083205cd20c30ec4 Author: sgiavasis Date: Fri Sep 9 18:51:27 2022 +0000 Updated changelog. commit 7f42613670e58d2e14a4a1b475d05a34d09232f3 Author: sgiavasis Date: Fri Sep 9 18:45:47 2022 +0000 Improved the error message for missing epi field map meta-data. commit d43aa6c1b78b71bd4142d77891f0f1e9c596af0e Author: sgiavasis Date: Fri Sep 9 04:14:31 2022 +0000 Read in TotalReadoutTime from field map metadata, if it exists. Pass TotalReadoutTime to FSL TOPUP if present. commit 9453398fefcdc83bea2b82727c52b5d140a69989 Author: Theodore (Machine User) Date: Wed Aug 31 17:42:23 2022 +0000 :bookmark: Update version to 1.8.4 (:construction_worker: Differentiate between Git branches and tags for Docker tags) --- CPAC/pipeline/cpac_pipeline.py | 17 + .../nipype_pipeline_engine/__init__.py | 43 +- .../pipeline/nipype_pipeline_engine/engine.py | 399 ++++++++++++++++-- CPAC/pipeline/schema.py | 12 +- .../configs/pipeline_config_default.yml | 13 + dev/docker_data/run.py | 7 +- 6 files changed, 435 insertions(+), 56 deletions(-) diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 370b00c418..432e9ab0d4 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -440,6 +440,23 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, logger.exception('Building workflow failed') raise exception + wf_graph = c['pipeline_setup', 'log_directory', 'graphviz', + 'entire_workflow'] + if wf_graph.get('generate'): + for graph2use in wf_graph.get('graph2use'): + dotfilename = os.path.join(log_dir, f'{p_name}_{graph2use}.dot') + for graph_format in wf_graph.get('format'): + try: + workflow.write_graph(dotfilename=dotfilename, + graph2use=graph2use, + format=graph_format, + simple_form=wf_graph.get( + 'simple_form', True)) + except Exception as exception: + raise RuntimeError(f'Failed to visualize {p_name} (' + f'{graph2use}, {graph_format})' + ) from exception + if test_config: logger.info('This has been a test of the pipeline configuration ' 'file, the pipeline was built successfully, but was ' diff --git a/CPAC/pipeline/nipype_pipeline_engine/__init__.py b/CPAC/pipeline/nipype_pipeline_engine/__init__.py index 48b445241b..fc346b5068 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/__init__.py +++ b/CPAC/pipeline/nipype_pipeline_engine/__init__.py @@ -1,35 +1,34 @@ -'''Module to import Nipype Pipeline engine and override some Classes. -See https://fcp-indi.github.io/docs/developer/nodes -for C-PAC-specific documentation. -See https://nipype.readthedocs.io/en/latest/api/generated/nipype.pipeline.engine.html -for Nipype's documentation. - -Copyright (C) 2022 C-PAC Developers +# Copyright (C) 2022 C-PAC Developers -This file is part of C-PAC. +# This file is part of C-PAC. -C-PAC is free software: you can redistribute it and/or modify it under -the terms of the GNU Lesser General Public License as published by the -Free Software Foundation, either version 3 of the License, or (at your -option) any later version. +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -C-PAC is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public -License for more details. +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. -You should have received a copy of the GNU Lesser General Public -License along with C-PAC. If not, see .''' # noqa: E501 +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +'''Module to import Nipype Pipeline engine and override some Classes. +See https://fcp-indi.github.io/docs/developer/nodes +for C-PAC-specific documentation. +See https://nipype.readthedocs.io/en/latest/api/generated/nipype.pipeline.engine.html +for Nipype's documentation.''' # noqa: E501 # pylint: disable=line-too-long from nipype.pipeline import engine as pe # import everything in nipype.pipeline.engine.__all__ from nipype.pipeline.engine import * # noqa: F401,F403 # import our DEFAULT_MEM_GB and override Node, MapNode -from .engine import DEFAULT_MEM_GB, get_data_size, Node, MapNode, \ - UNDEFINED_SIZE, Workflow +from .engine import DEFAULT_MEM_GB, export_graph, get_data_size, Node, \ + MapNode, UNDEFINED_SIZE, Workflow __all__ = [ interface for interface in dir(pe) if not interface.startswith('_') -] + ['DEFAULT_MEM_GB', 'get_data_size', 'Node', 'MapNode', 'UNDEFINED_SIZE', - 'Workflow'] +] + ['DEFAULT_MEM_GB', 'export_graph', 'get_data_size', 'Node', 'MapNode', + 'UNDEFINED_SIZE', 'Workflow'] del pe diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 8efa4324c2..0629aed193 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -1,48 +1,71 @@ -'''Module to import Nipype Pipeline engine and override some Classes. -See https://fcp-indi.github.io/docs/developer/nodes -for C-PAC-specific documentation. -See https://nipype.readthedocs.io/en/latest/api/generated/nipype.pipeline.engine.html -for Nipype's documentation. +# STATEMENT OF CHANGES: +# This file is derived from sources licensed under the Apache-2.0 terms, +# and this file has been changed. + +# CHANGES: +# * Supports just-in-time dynamic memory allocation +# * Skips doctests that require files that we haven't copied over +# * Applies a random seed +# * Supports overriding memory estimates via a log file and a buffer +# * Adds quotation marks around strings in dotfiles -STATEMENT OF CHANGES: - This file is derived from sources licensed under the Apache-2.0 terms, - and this file has been changed. +# ORIGINAL WORK'S ATTRIBUTION NOTICE: +# Copyright (c) 2009-2016, Nipype developers -CHANGES: - * Supports just-in-time dynamic memory allocation - * Skips doctests that require files that we haven't copied over - * Applies a random seed - * Supports overriding memory estimates via a log file and a buffer +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at -ORIGINAL WORK'S ATTRIBUTION NOTICE: - Copyright (c) 2009-2016, Nipype developers +# http://www.apache.org/licenses/LICENSE-2.0 - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. - http://www.apache.org/licenses/LICENSE-2.0 +# Prior to release 0.12, Nipype was licensed under a BSD license. - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +# Modifications Copyright (C) 2022 C-PAC Developers - Prior to release 0.12, Nipype was licensed under a BSD license. +# This file is part of C-PAC. -Modifications Copyright (C) 2022 C-PAC Developers +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -This file is part of C-PAC.''' # noqa: E501 +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +'''Module to import Nipype Pipeline engine and override some Classes. +See https://fcp-indi.github.io/docs/developer/nodes +for C-PAC-specific documentation. +See https://nipype.readthedocs.io/en/latest/api/generated/nipype.pipeline.engine.html +for Nipype's documentation.''' # noqa: E501 # pylint: disable=line-too-long import os import re +from copy import deepcopy from logging import getLogger from inspect import Parameter, Signature, signature from nibabel import load from nipype import logging from nipype.interfaces.utility import Function from nipype.pipeline import engine as pe -from nipype.pipeline.engine.utils import load_resultfile as _load_resultfile +from nipype.pipeline.engine.utils import ( + _create_dot_graph, + format_dot, + generate_expanded_graph, + get_print_name, + load_resultfile as _load_resultfile, + _replacefunk, + _run_dot +) +from nipype.utils.filemanip import fname_presuffix from nipype.utils.functions import getsource from numpy import prod from traits.trait_base import Undefined @@ -53,6 +76,7 @@ UNDEFINED_SIZE = (42, 42, 42, 1200) random_state_logger = getLogger('random') +logger = getLogger("nipype.workflow") def _check_mem_x_path(mem_x_path): @@ -482,6 +506,119 @@ def _configure_exec_nodes(self, graph): TypeError): self._handle_just_in_time_exception(node) + def _get_dot( + self, prefix=None, hierarchy=None, colored=False, simple_form=True, + level=0 + ): + """Create a dot file with connection info""" + # pylint: disable=invalid-name,protected-access + import networkx as nx + + if prefix is None: + prefix = " " + if hierarchy is None: + hierarchy = [] + colorset = [ + "#FFFFC8", # Y + "#0000FF", + "#B4B4FF", + "#E6E6FF", # B + "#FF0000", + "#FFB4B4", + "#FFE6E6", # R + "#00A300", + "#B4FFB4", + "#E6FFE6", # G + "#0000FF", + "#B4B4FF", + ] # loop B + if level > len(colorset) - 2: + level = 3 # Loop back to blue + quoted_prefix = f'"{prefix}"' if len(prefix.strip()) else prefix + dotlist = [f'{quoted_prefix}label="{self.name}";'] + for node in nx.topological_sort(self._graph): + fullname = ".".join(hierarchy + [node.fullname]) + nodename = fullname.replace(".", "_") + if not isinstance(node, Workflow): + node_class_name = get_print_name(node, simple_form=simple_form) + if not simple_form: + node_class_name = ".".join(node_class_name.split(".")[1:]) + if hasattr(node, "iterables") and node.iterables: + dotlist.append(f'"{nodename}"[label="{node_class_name}", ' + "shape=box3d, style=filled, color=black, " + "colorscheme=greys7 fillcolor=2];") + else: + if colored: + dotlist.append(f'"{nodename}"[label="' + f'{node_class_name}", style=filled,' + f' fillcolor="{colorset[level]}"];') + else: + dotlist.append(f'"{nodename}"[label="' + f'{node_class_name}"];') + + for node in nx.topological_sort(self._graph): + if isinstance(node, Workflow): + fullname = ".".join(hierarchy + [node.fullname]) + nodename = fullname.replace(".", "_") + dotlist.append(f"subgraph \"cluster_{nodename}\" {{") + if colored: + dotlist.append(f'{prefix}{prefix}edge [color="' + f'{colorset[level + 1]}"];') + dotlist.append(f"{prefix}{prefix}style=filled;") + dotlist.append(f'{prefix}{prefix}fillcolor=' + f'"{colorset[level + 2]}";') + dotlist.append( + node._get_dot( + prefix=prefix + prefix, + hierarchy=hierarchy + [self.name], + colored=colored, + simple_form=simple_form, + level=level + 3, + ) + ) + dotlist.append("}") + else: + for subnode in self._graph.successors(node): + if node._hierarchy != subnode._hierarchy: + continue + if not isinstance(subnode, Workflow): + nodefullname = ".".join(hierarchy + [node.fullname]) + subnodefullname = ".".join( + hierarchy + [subnode.fullname]) + nodename = nodefullname.replace(".", "_") + subnodename = subnodefullname.replace(".", "_") + for _ in self._graph.get_edge_data( + node, subnode + )["connect"]: + dotlist.append(f'"{nodename}" -> "{subnodename}";') + logger.debug("connection: %s", dotlist[-1]) + # add between workflow connections + for u, v, d in self._graph.edges(data=True): + uname = ".".join(hierarchy + [u.fullname]) + vname = ".".join(hierarchy + [v.fullname]) + for src, dest in d["connect"]: + uname1 = uname + vname1 = vname + if isinstance(src, tuple): + srcname = src[0] + else: + srcname = src + if "." in srcname: + uname1 += "." + ".".join(srcname.split(".")[:-1]) + if "." in dest and "@" not in dest: + if not isinstance(v, Workflow): + if "datasink" not in str( + v._interface.__class__ + ).lower(): + vname1 += "." + ".".join(dest.split(".")[:-1]) + else: + vname1 += "." + ".".join(dest.split(".")[:-1]) + if uname1.split(".")[:-1] != vname1.split(".")[:-1]: + dotlist.append(f'"{uname1.replace(".", "_")}" -> ' + f'"{vname1.replace(".", "_")}";') + logger.debug("cross connection: %s", dotlist[-1]) + return ("\n" + prefix).join(dotlist) + def _handle_just_in_time_exception(self, node): # pylint: disable=protected-access if hasattr(self, '_local_func_scans'): @@ -491,6 +628,75 @@ def _handle_just_in_time_exception(self, node): # TODO: handle S3 files node._apply_mem_x(UNDEFINED_SIZE) # noqa: W0212 + def write_graph( + self, + dotfilename="graph.dot", + graph2use="hierarchical", + format="png", + simple_form=True, + ): + graphtypes = ["orig", "flat", "hierarchical", "exec", "colored"] + if graph2use not in graphtypes: + raise ValueError( + "Unknown graph2use keyword. Must be one of: " + str(graphtypes) + ) + base_dir, dotfilename = os.path.split(dotfilename) + if base_dir == "": + if self.base_dir: + base_dir = self.base_dir + if self.name: + base_dir = os.path.join(base_dir, self.name) + else: + base_dir = os.getcwd() + os.makedirs(base_dir, exist_ok=True) + if graph2use in ["hierarchical", "colored"]: + if self.name[:1].isdigit(): # these graphs break if int + raise ValueError(f"{graph2use} graph failed, workflow name " + "cannot begin with a number") + dotfilename = os.path.join(base_dir, dotfilename) + self.write_hierarchical_dotfile( + dotfilename=dotfilename, + colored=graph2use == "colored", + simple_form=simple_form, + ) + outfname = format_dot(dotfilename, format=format) + else: + graph = self._graph + if graph2use in ["flat", "exec"]: + graph = self._create_flat_graph() + if graph2use == "exec": + graph = generate_expanded_graph(deepcopy(graph)) + outfname = export_graph( + graph, + base_dir, + dotfilename=dotfilename, + format=format, + simple_form=simple_form, + ) + + logger.info("Generated workflow graph: %s " + "(graph2use=%s, simple_form=%s).", + outfname, graph2use, simple_form) + return outfname + + write_graph.__doc__ = pe.Workflow.write_graph.__doc__ + + def write_hierarchical_dotfile( + self, dotfilename=None, colored=False, simple_form=True + ): + # pylint: disable=invalid-name + dotlist = [f"digraph \"{self.name}\"{{"] + dotlist.append(self._get_dot(prefix=" ", colored=colored, + simple_form=simple_form)) + dotlist.append("}") + dotstr = "\n".join(dotlist) + if dotfilename: + with open(dotfilename, "wt", encoding="utf-8") as fp: + fp.writelines(dotstr) + fp.close() + else: + logger.info(dotstr) + def get_data_size(filepath, mode='xyzt'): """Function to return the size of a functional image (x * y * z * t) @@ -526,3 +732,140 @@ def get_data_size(filepath, mode='xyzt'): if mode == 'xyz': return prod(data_shape[0:3]).item() return prod(data_shape).item() + + +def export_graph( + graph_in, + base_dir=None, + show=False, + use_execgraph=False, + show_connectinfo=False, + dotfilename="graph.dot", + format="png", + simple_form=True, +): + """Displays the graph layout of the pipeline + This function requires that pygraphviz and matplotlib are available on + the system. + Parameters + ---------- + show : boolean + Indicate whether to generate pygraphviz output fromn + networkx. default [False] + use_execgraph : boolean + Indicates whether to use the specification graph or the + execution graph. default [False] + show_connectioninfo : boolean + Indicates whether to show the edge data on the graph. This + makes the graph rather cluttered. default [False] + """ + import networkx as nx + + graph = deepcopy(graph_in) + if use_execgraph: + graph = generate_expanded_graph(graph) + logger.debug("using execgraph") + else: + logger.debug("using input graph") + if base_dir is None: + base_dir = os.getcwd() + + os.makedirs(base_dir, exist_ok=True) + out_dot = fname_presuffix(dotfilename, suffix="_detailed.dot", + use_ext=False, newpath=base_dir) + _write_detailed_dot(graph, out_dot) + + # Convert .dot if format != 'dot' + outfname, res = _run_dot(out_dot, format_ext=format) + if res is not None and res.runtime.returncode: + logger.warning("dot2png: %s", res.runtime.stderr) + + pklgraph = _create_dot_graph(graph, show_connectinfo, simple_form) + simple_dot = fname_presuffix(dotfilename, suffix=".dot", use_ext=False, + newpath=base_dir) + nx.drawing.nx_pydot.write_dot(pklgraph, simple_dot) + + # Convert .dot if format != 'dot' + simplefname, res = _run_dot(simple_dot, format_ext=format) + if res is not None and res.runtime.returncode: + logger.warning("dot2png: %s", res.runtime.stderr) + + if show: + pos = nx.graphviz_layout(pklgraph, prog="dot") + nx.draw(pklgraph, pos) + if show_connectinfo: + nx.draw_networkx_edge_labels(pklgraph, pos) + + return simplefname if simple_form else outfname + + +def _write_detailed_dot(graph, dotfilename): + r""" + Create a dot file with connection info :: + digraph structs { + node [shape=record]; + struct1 [label=" left| middle| right"]; + struct2 [label=" one| two"]; + struct3 [label="hello\nworld |{ b |{c| d|e}| f}| g | h"]; + struct1:f1 -> struct2:f0; + struct1:f0 -> struct2:f1; + struct1:f2 -> struct3:here; + } + """ + # pylint: disable=invalid-name + import networkx as nx + + text = ["digraph structs {", "node [shape=record];"] + # write nodes + edges = [] + for n in nx.topological_sort(graph): + nodename = n.itername + inports = [] + for u, v, d in graph.in_edges(nbunch=n, data=True): + for cd in d["connect"]: + if isinstance(cd[0], (str, bytes)): + outport = cd[0] + else: + outport = cd[0][0] + inport = cd[1] + ipstrip = f"in{_replacefunk(inport)}" + opstrip = f"out{_replacefunk(outport)}" + edges.append(f'"{u.itername.replace(".", "")}":' + f'"{opstrip}":e -> ' + f'"{v.itername.replace(".", "")}":' + f'"{ipstrip}":w;') + if inport not in inports: + inports.append(inport) + inputstr = (["{IN"] + + [f"| {ip}" for + ip in sorted(inports)] + ["}"]) + outports = [] + for u, v, d in graph.out_edges(nbunch=n, data=True): + for cd in d["connect"]: + if isinstance(cd[0], (str, bytes)): + outport = cd[0] + else: + outport = cd[0][0] + if outport not in outports: + outports.append(outport) + outputstr = ( + ["{OUT"] + + [f"| {oport}" for + oport in sorted(outports)] + ["}"]) + srcpackage = "" + if hasattr(n, "_interface"): + pkglist = n.interface.__class__.__module__.split(".") + if len(pkglist) > 2: + srcpackage = pkglist[2] + srchierarchy = ".".join(nodename.split(".")[1:-1]) + nodenamestr = (f"{{ {nodename.split('.')[-1]} | {srcpackage} | " + f"{srchierarchy} }}") + text += [f'"{nodename.replace(".", "")}" [label=' + f'"{"".join(inputstr)}|{nodenamestr}|{"".join(outputstr)}"];'] + # write edges + for edge in sorted(edges): + text.append(edge) + text.append("}") + with open(dotfilename, "wt", encoding="utf-8") as filep: + filep.write("\n".join(text)) + return text diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index e68a4f07ea..969c0b02f2 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -25,8 +25,6 @@ from CPAC import docs_prefix from CPAC.pipeline.random_state.seed import MAX_SEED from CPAC.utils.datatypes import ListFromItem -from CPAC.utils.utils import delete_nested_value, lookup_nested_value, \ - set_nested_value # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits scientific_notation_str_regex = r'^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$' @@ -231,6 +229,16 @@ def sanitize(filename): 'log_directory': { 'run_logging': bool, 'path': str, + 'graphviz': { + 'entire_workflow': { + 'generate': bool, + 'graph2use': Maybe(All(Coerce(ListFromItem), + [All(Lower, + In(('orig', 'hierarchical', 'flat', + 'exec', 'colored')))])), + 'format': Maybe(All(Coerce(ListFromItem), + [All(Lower, In(('png', 'svg')))])), + 'simple_form': Maybe(bool)}} }, 'crash_log_directory': { 'path': Maybe(str), diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index fbb1d3d00b..3a4be49e18 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -79,6 +79,19 @@ pipeline_setup: path: /outputs/logs + # Configuration options for logging visualizations of the workflow graph + graphviz: + # Configuration for a graphviz visualization of the entire workflow. See https://fcp-indi.github.io/docs/developer/nodes#CPAC.pipeline.nipype_pipeline_engine.Workflow.write_graph for details about the various options + entire_workflow: + # Whether to generate the graph visualization + generate: Off + # Options: [orig, hierarchical, flat, exec, colored] + graph2use: [] + # Options: [svg, png] + format: [] + # The node name will be displayed in the form `nodename (package)` when On or `nodename.Class.package` when Off + simple_form: On + crash_log_directory: # Directory where CPAC should write crash logs. diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index db3ec4aaf5..0d0d549516 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -467,9 +467,9 @@ def run_main(): _url = (f'{DOCS_URL_PREFIX}/user/pipelines/' '1.7-1.8-nesting-mappings') - warn('\nC-PAC changed its pipeline configuration format in ' - f'v1.8.0.\nSee {_url} for details.\n', - category=DeprecationWarning) + logger.warning('\nC-PAC changed its pipeline configuration ' + 'format in v1.8.0.\nSee %s for details.\n', _url, + category=DeprecationWarning) updated_config = os.path.join( output_dir, @@ -516,7 +516,6 @@ def run_main(): else: c['pipeline_setup']['log_directory']['path'] = os.path.join( DEFAULT_TMP_DIR, "log") - log_dir = c['pipeline_setup']['log_directory']['path'] if args.mem_gb: c['pipeline_setup']['system_config'][ From 837533e1d997fd70469523edece21807887c2259 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 21 Oct 2022 13:48:15 +0000 Subject: [PATCH 30/72] :recycle: Better handling of global thresholds --- CPAC/qc/__init__.py | 6 ++++-- CPAC/qc/globals.py | 26 ++++++++++++++++++++++- CPAC/registration/guardrails.py | 11 +++++----- CPAC/utils/configuration/configuration.py | 5 ++--- 4 files changed, 37 insertions(+), 11 deletions(-) diff --git a/CPAC/qc/__init__.py b/CPAC/qc/__init__.py index 1602803627..810d06aedc 100644 --- a/CPAC/qc/__init__.py +++ b/CPAC/qc/__init__.py @@ -15,6 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Quality control utilities for C-PAC""" -from CPAC.qc.globals import REGISTRATION_GUARDRAIL_THRESHOLDS +from CPAC.qc.globals import registration_guardrail_thresholds, \ + update_thresholds from CPAC.qc.qcmetrics import qc_masks -__all__ = ['qc_masks', 'REGISTRATION_GUARDRAIL_THRESHOLDS'] +__all__ = ['qc_masks', 'registration_guardrail_thresholds', + 'update_thresholds'] diff --git a/CPAC/qc/globals.py b/CPAC/qc/globals.py index 5fa3606a85..e4a05d8d9d 100644 --- a/CPAC/qc/globals.py +++ b/CPAC/qc/globals.py @@ -15,4 +15,28 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Global QC values""" -REGISTRATION_GUARDRAIL_THRESHOLDS = {} +_REGISTRATION_GUARDRAIL_THRESHOLDS = {'thresholds': {}} + + +def registration_guardrail_thresholds() -> dict: + """Get registration guardrail thresholds + + Returns + ------- + dict + """ + return _REGISTRATION_GUARDRAIL_THRESHOLDS['thresholds'] + + +def update_thresholds(thresholds) -> None: + """Set a registration guardrail threshold + + Parameters + ---------- + thresholds : dict of {str: float or int} + + Returns + ------- + None + """ + _REGISTRATION_GUARDRAIL_THRESHOLDS['thresholds'].update(thresholds) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 0ae4c4d2fc..1a93aa3610 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -15,13 +15,14 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" +import logging from copy import deepcopy from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT from nipype.interfaces.utility import Function from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec -from CPAC.qc import qc_masks, REGISTRATION_GUARDRAIL_THRESHOLDS +from CPAC.qc import qc_masks, registration_guardrail_thresholds _SPEC_KEYS = { FLIRT: {'reference': 'reference', 'registered': 'out_file'}, @@ -82,11 +83,10 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False metrics met specified thresholds?, used as index for selecting outputs """ - import logging logger = logging.getLogger('nipype.workflow') qc_metrics = qc_masks(registered, reference) failed_qc = 0 - for metric, threshold in REGISTRATION_GUARDRAIL_THRESHOLDS.items(): + for metric, threshold in registration_guardrail_thresholds().items(): if threshold is not None: value = qc_metrics.get(metric) if isinstance(value, list): @@ -121,8 +121,9 @@ def registration_guardrail_node(name=None): 'reference'], output_names=['registered', 'failed_qc'], - imports=['from CPAC.qc import qc_masks, ' - 'REGISTRATION_GUARDRAIL_THRESHOLDS', + imports=['import logging', + 'from CPAC.qc import qc_masks, ' + 'registration_guardrail_thresholds', 'from CPAC.registration.guardrails ' 'import BadRegistrationError'], function=registration_guardrail), name=name) diff --git a/CPAC/utils/configuration/configuration.py b/CPAC/utils/configuration/configuration.py index 2df09feb93..c0312d4a45 100644 --- a/CPAC/utils/configuration/configuration.py +++ b/CPAC/utils/configuration/configuration.py @@ -23,7 +23,7 @@ from typing import Optional, Tuple from warnings import warn import yaml -from CPAC.qc import REGISTRATION_GUARDRAIL_THRESHOLDS +from CPAC.qc import update_thresholds from CPAC.utils.utils import load_preconfig from .diff import dct_diff @@ -152,8 +152,7 @@ def __init__(self, config_map=None): setattr(self, key, set_from_ENV(config_map[key])) # set global QC thresholds - REGISTRATION_GUARDRAIL_THRESHOLDS.update(self[ - 'registration_workflows', 'quality_thresholds']) + update_thresholds(self['registration_workflows', 'quality_thresholds']) self.__update_attr() From e32644b399b6e11cfc0e7583df6e796866521c89 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 21 Oct 2022 14:07:02 +0000 Subject: [PATCH 31/72] :goal_net: Guardrail: Log error on first try, raise on second --- CPAC/registration/guardrails.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 1a93aa3610..e371009dd2 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -16,6 +16,7 @@ # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" import logging +from typing import Tuple from copy import deepcopy from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT @@ -53,8 +54,9 @@ def __init__(self, *args, metric=None, value=None, threshold=None, super().__init__(msg, *args, **kwargs) -def registration_guardrail(registered: str, reference: str, retry: bool = False - ): +def registration_guardrail(registered: str, reference: str, + retry: bool = False, retry_num: int = 0 + ) -> Tuple[str, int]: """Check QC metrics post-registration and throw an exception if metrics are below given thresholds. @@ -71,9 +73,12 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False registered, reference : str path to mask - retry : bool + retry : bool, optional can retry? + retry_num : int, optional + how many previous tries? + Returns ------- registered_mask : str @@ -99,8 +104,11 @@ def registration_guardrail(registered: str, reference: str, retry: bool = False if retry: registered = f'{registered}-failed' else: - logger.error(str(BadRegistrationError( - metric=metric, value=value, threshold=threshold))) + bad_registration = BadRegistrationError( + metric=metric, value=value, threshold=threshold) + logger.error(str(bad_registration)) + if retry_num: # if we've already retried, raise the error + raise bad_registration return registered, failed_qc @@ -122,6 +130,7 @@ def registration_guardrail_node(name=None): output_names=['registered', 'failed_qc'], imports=['import logging', + 'from typing import Tuple', 'from CPAC.qc import qc_masks, ' 'registration_guardrail_thresholds', 'from CPAC.registration.guardrails ' From 88b558952c2fcce3f8405be15a0c35f9594f7c8a Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 21 Oct 2022 20:20:34 +0000 Subject: [PATCH 32/72] :technologist: Add decorator for retry __doc__s --- CPAC/registration/guardrails.py | 37 +++++++++++++++++++++++++-------- CPAC/utils/__init__.py | 2 -- CPAC/utils/docs.py | 37 +++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 11 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index e371009dd2..cbcd46ed8c 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -22,8 +22,10 @@ from nipype.interfaces.fsl import FLIRT from nipype.interfaces.utility import Function from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow -from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec +# from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec from CPAC.qc import qc_masks, registration_guardrail_thresholds +from CPAC.registration.utils import hardcoded_reg +from CPAC.utils.docs import retry_docstring _SPEC_KEYS = { FLIRT: {'reference': 'reference', 'registered': 'out_file'}, @@ -112,23 +114,24 @@ def registration_guardrail(registered: str, reference: str, return registered, failed_qc -def registration_guardrail_node(name=None): +def registration_guardrail_node(name=None, retry_num=0): """Convenience method to get a new registration_guardrail Node Parameters ---------- name : str, optional + retry_num : int, optional + how many previous tries? + Returns ------- Node """ if name is None: name = 'registration_guardrail' - return Node(Function(input_names=['registered', - 'reference'], - output_names=['registered', - 'failed_qc'], + node = Node(Function(input_names=['registered', 'reference', 'retry_num'], + output_names=['registered', 'failed_qc'], imports=['import logging', 'from typing import Tuple', 'from CPAC.qc import qc_masks, ' @@ -136,6 +139,9 @@ def registration_guardrail_node(name=None): 'from CPAC.registration.guardrails ' 'import BadRegistrationError'], function=registration_guardrail), name=name) + if retry_num: + node.inputs.retry_num = retry_num + return node def registration_guardrail_workflow(registration_node, retry=True): @@ -167,7 +173,7 @@ def registration_guardrail_workflow(registration_node, retry=True): guardrail.outputs.registered)[0] else: wf.connect(guardrail, 'registered', outputspec, outkey) - connect_from_spec(outputspec, registration_node, outkey) + # connect_from_spec(outputspec, registration_node, outkey) return wf @@ -197,16 +203,29 @@ def retry_registration(wf, registration_node, registered): outputspec = registration_node.outputs outkey = spec_key(registration_node, 'registered') guardrail = registration_guardrail_node(f'{name}_guardrail') - connect_from_spec(inputspec, retry_node) + # connect_from_spec(inputspec, retry_node) wf.connect([ (inputspec, guardrail, [ (spec_key(retry_node, 'reference'), 'reference')]), (retry_node, guardrail, [(outkey, 'registered')]), (guardrail, outputspec, [('registered', outkey)])]) - connect_from_spec(retry_node, outputspec, registered) + # connect_from_spec(retry_node, outputspec, registered) return wf, retry_node +@retry_docstring(hardcoded_reg) +def retry_hardcoded_reg(moving_brain, reference_brain, moving_skull, + reference_skull, ants_para, moving_mask=None, + reference_mask=None, fixed_image_mask=None, + interp=None, reg_with_skull=0, previous_failure=False): + if previous_failure: + return [], None + return hardcoded_reg(moving_brain, reference_brain, moving_skull, + reference_skull, ants_para, moving_mask, + reference_mask, fixed_image_mask, interp, + reg_with_skull) + + def retry_registration_node(registered, registration_node): """Retry registration if previous attempt failed diff --git a/CPAC/utils/__init__.py b/CPAC/utils/__init__.py index c5c791ec03..b13af927fe 100644 --- a/CPAC/utils/__init__.py +++ b/CPAC/utils/__init__.py @@ -6,8 +6,6 @@ from .extract_data import run from .datatypes import ListFromItem from .configuration import check_pname, Configuration, set_subject -from .strategy import Strategy -from .outputs import Outputs from .utils import ( get_zscore, diff --git a/CPAC/utils/docs.py b/CPAC/utils/docs.py index b1ee23df0b..181df9aa98 100644 --- a/CPAC/utils/docs.py +++ b/CPAC/utils/docs.py @@ -71,4 +71,41 @@ def grab_docstring_dct(fn): return dct +def retry_docstring(orig): + """Decorator to autodocument retries. + + Examples + -------- + >>> @retry_docstring(grab_docstring_dct) + ... def do_nothing(): + ... '''Does this do anything?''' + ... pass + >>> print(do_nothing.__doc__) + Does this do anything? + Retries the following after a failed QC check: + Function to grab a NodeBlock dictionary from a docstring. + + Parameters + ---------- + fn : function + The NodeBlock function with the docstring to be parsed. + + Returns + ------- + dct : dict + A NodeBlock configuration dictionary. + + """ + def retry(obj): + if obj.__doc__ is None: + obj.__doc__ = '' + origdoc = (f'{orig.__module__}.{orig.__name__}' if + orig.__doc__ is None else orig.__doc__) + obj.__doc__ = '\n'.join([ + obj.__doc__, 'Retries the following after a failed QC check:', + origdoc]) + return obj + return retry + + DOCS_URL_PREFIX = _docs_url_prefix() From c83ed622af8177e06e6386a880a2a6c86d500b8f Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Sat, 22 Oct 2022 03:05:13 +0000 Subject: [PATCH 33/72] :recycle: Rewire guardrail for `anat_mni_ants_register` --- CPAC/registration/guardrails.py | 32 ++++++++- CPAC/registration/registration.py | 115 +++++++++++++++--------------- 2 files changed, 90 insertions(+), 57 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index cbcd46ed8c..5276151f98 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -20,7 +20,7 @@ from copy import deepcopy from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT -from nipype.interfaces.utility import Function +from nipype.interfaces.utility import Function, Merge, Select from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow # from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec from CPAC.qc import qc_masks, registration_guardrail_thresholds @@ -56,6 +56,36 @@ def __init__(self, *args, metric=None, value=None, threshold=None, super().__init__(msg, *args, **kwargs) +def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', + ) -> Node: + """Generate requisite Nodes for choosing a path through the graph + with retries + + Parameters + ---------- + wf : Workflow + + node1, node2 : Node + try guardrail, retry guardrail + + Returns + ------- + select : Node + """ + # pylint: disable=redefined-outer-name,reimported,unused-import + from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow + name = node1.name + choices = Node(Merge(2), run_without_submitting=True, + name=f'{name}_choices') + select = Node(Select(), run_without_submitting=True, + name=f'choose_{name}') + wf.connect([(node1, choices, [('registered', 'in1')]), + (node2, choices, [('registered', 'in2')]), + (choices, select, [('out', 'inlist')]), + (node1, select, [('failed_qc', 'index')])]) + return select + + def registration_guardrail(registered: str, reference: str, retry: bool = False, retry_num: int = 0 ) -> Tuple[str, int]: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 993a27c4aa..4c525b9507 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -21,10 +21,10 @@ from CPAC.pipeline import nipype_pipeline_engine as pe from nipype.interfaces import afni, ants, c3, fsl, utility as util from nipype.interfaces.afni import utils as afni_utils -from nipype.interfaces.utility import Merge, Select from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks -from CPAC.registration.guardrails import registration_guardrail_node +from CPAC.registration.guardrails import guardrail_selection, \ + registration_guardrail_node from CPAC.registration.utils import seperate_warps_list, \ check_transforms, \ generate_inverse_transform_flags, \ @@ -1174,20 +1174,14 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', .. image:: :width: 500 ''' - + from CPAC.registration.guardrails import retry_hardcoded_reg calc_ants_warp_wf = pe.Workflow(name=name) - inputspec = pe.Node(util.IdentityInterface( - fields=['moving_brain', - 'reference_brain', - 'moving_skull', - 'reference_skull', - 'reference_mask', - 'moving_mask', - 'fixed_image_mask', - 'ants_para', - 'interp']), - name='inputspec') + warp_inputs = ['moving_brain', 'reference_brain', 'moving_skull', + 'reference_skull', 'ants_para', 'moving_mask', + 'reference_mask', 'fixed_image_mask', 'interp'] + inputspec = pe.Node(util.IdentityInterface(fields=warp_inputs), + name='inputspec') outputspec = pe.Node(util.IdentityInterface( fields=['ants_initial_xfm', @@ -1208,27 +1202,30 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', calculate_ants_warp.inputs.initial_moving_transform_com = 0 ''' reg_imports = ['import os', 'import subprocess'] - calculate_ants_warp = \ - pe.Node(interface=util.Function(input_names=['moving_brain', - 'reference_brain', - 'moving_skull', - 'reference_skull', - 'ants_para', - 'moving_mask', - 'reference_mask', - 'fixed_image_mask', - 'interp', - 'reg_with_skull'], - output_names=['warp_list', - 'warped_image'], - function=hardcoded_reg, - imports=reg_imports), - name='calc_ants_warp', - mem_gb=2.8, - mem_x=(2e-7, 'moving_brain', 'xyz')) + warp_inputs += ['reg_with_skull'] + warp_outputs = ['warp_list', 'warped_image'] + calculate_ants_warp = pe.Node( + interface=util.Function(input_names=warp_inputs, + output_names=warp_outputs, + function=hardcoded_reg, + imports=reg_imports), + name='calc_ants_warp', mem_gb=2.8, + mem_x=(2e-7, 'moving_brain', 'xyz')) + retry_calculate_ants_warp = pe.Node( + interface=util.Function(input_names=[*warp_inputs, 'previous_failure'], + output_names=warp_outputs, + function=retry_hardcoded_reg, + imports=['from CPAC.registration.utils ' + 'import hardcoded_reg', + 'from CPAC.utils.docs import ' + 'retry_docstring']), + name='retry_calc_ants_warp', mem_gb=2.8, + mem_x=(2e-7, 'moving_brain', 'xyz')) + guardrails = tuple(registration_guardrail_node( + f'{_try}{name}_guardrail', i) for i, _try in enumerate(('', 'retry_'))) calculate_ants_warp.interface.num_threads = num_threads - + retry_calculate_ants_warp.interface.num_threads = num_threads select_forward_initial = pe.Node(util.Function( input_names=['warp_list', 'selection'], output_names=['selected_warp'], @@ -1264,13 +1261,10 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', select_inverse_warp.inputs.selection = "Inverse" - guardrail = registration_guardrail_node(f'{name}_guardrail') calc_ants_warp_wf.connect(inputspec, 'moving_brain', calculate_ants_warp, 'moving_brain') calc_ants_warp_wf.connect(inputspec, 'reference_brain', calculate_ants_warp, 'reference_brain') - calc_ants_warp_wf.connect(inputspec, 'reference_brain', - guardrail, 'reference') if reg_ants_skull == 1: calculate_ants_warp.inputs.reg_with_skull = 1 @@ -1279,11 +1273,17 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', calculate_ants_warp, 'moving_skull') calc_ants_warp_wf.connect(inputspec, 'reference_skull', calculate_ants_warp, 'reference_skull') + for guardrail in guardrails: + calc_ants_warp_wf.connect(inputspec, 'reference_skull', + guardrail, 'reference') else: calc_ants_warp_wf.connect(inputspec, 'moving_brain', calculate_ants_warp, 'moving_skull') calc_ants_warp_wf.connect(inputspec, 'reference_brain', calculate_ants_warp, 'reference_skull') + for guardrail in guardrails: + calc_ants_warp_wf.connect(inputspec, 'reference_brain', + guardrail, 'reference') calc_ants_warp_wf.connect(inputspec, 'fixed_image_mask', calculate_ants_warp, 'fixed_image_mask') @@ -1317,9 +1317,11 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', outputspec, 'warp_field') calc_ants_warp_wf.connect(select_inverse_warp, 'selected_warp', outputspec, 'inverse_warp_field') - calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', - guardrail, 'registered') - calc_ants_warp_wf.connect(guardrail, 'registered', + for guardrail in guardrails: + calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', + guardrail, 'registered') + select = guardrail_selection(calc_ants_warp_wf, *guardrails) + calc_ants_warp_wf.connect(select, 'out', outputspec, 'normalized_output_brain') return calc_ants_warp_wf @@ -2928,38 +2930,39 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(func_to_anat_bbreg, 'outputspec.anat_func', retry_guardrail, 'registered') - mean_bolds = pe.Node(Merge(2), run_without_submitting=True, + mean_bolds = pe.Node(util.Merge(2), run_without_submitting=True, name=f'bbreg_mean_bold_choices_{pipe_num}') - xfms = pe.Node(Merge(2), run_without_submitting=True, + xfms = pe.Node(util.Merge(2), run_without_submitting=True, name=f'bbreg_xfm_choices_{pipe_num}') - fallback_mean_bolds = pe.Node(Select, run_without_submitting=True, + fallback_mean_bolds = pe.Node(util.Select(), + run_without_submitting=True, name=f'bbreg_choose_mean_bold_{pipe_num}' ) - fallback_xfms = pe.Node(Select, run_without_submitting=True, + fallback_xfms = pe.Node(util.Select(), run_without_submitting=True, name=f'bbreg_choose_xfm_{pipe_num}') if opt is True: wf.connect([ - (bbreg_guardrail, mean_bolds, ['registered', 'in1']), - (retry_guardrail, mean_bolds, ['registered', 'in1']), + (bbreg_guardrail, mean_bolds, [('registered', 'in1')]), + (retry_guardrail, mean_bolds, [('registered', 'in2')]), (func_to_anat_bbreg, xfms, [ - 'outputspec.func_to_anat_linear_xfm', 'in2']), + ('outputspec.func_to_anat_linear_xfm', 'in1')]), (retry_node, xfms, [ - 'outputspec.func_to_anat_linear_xfm_nobbreg', 'in2'])]) + ('outputspec.func_to_anat_linear_xfm', 'in2')])]) else: # Fall back to no-BBReg wf.connect([ - (bbreg_guardrail, mean_bolds, ['registered', 'in1']), - (func_to_anat, mean_bolds, ['outputspec.anat_func_nobbreg', - 'in1']), + (bbreg_guardrail, mean_bolds, [('registered', 'in1')]), + (func_to_anat, mean_bolds, [('outputspec.anat_func_nobbreg', + 'in2')]), (func_to_anat_bbreg, xfms, [ - 'outputspec.func_to_anat_linear_xfm', 'in2']), + ('outputspec.func_to_anat_linear_xfm', 'in1')]), (func_to_anat, xfms, [ - 'outputspec.func_to_anat_linear_xfm_nobbreg', 'in2'])]) + ('outputspec.func_to_anat_linear_xfm_nobbreg', 'in2')])]) wf.connect([ - (mean_bolds, fallback_mean_bolds, ['out', 'inlist']), - (xfms, fallback_xfms, ['out', 'inlist']), - (bbreg_guardrail, fallback_mean_bolds, ['failed_qc', 'index']), - (bbreg_guardrail, fallback_xfms, ['failed_qc', 'index'])]) + (mean_bolds, fallback_mean_bolds, [('out', 'inlist')]), + (xfms, fallback_xfms, [('out', 'inlist')]), + (bbreg_guardrail, fallback_mean_bolds, [('failed_qc', 'index')]), + (bbreg_guardrail, fallback_xfms, [('failed_qc', 'index')])]) outputs = { 'space-T1w_desc-mean_bold': (fallback_mean_bolds, 'out'), 'from-bold_to-T1w_mode-image_desc-linear_xfm': (fallback_xfms, From fcd8a90c71aa07a3b398c6ddb0a32ed15b9362b0 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 24 Oct 2022 17:36:21 +0000 Subject: [PATCH 34/72] :recycle: Use guardrails to inform warp selection --- .../longitudinal_workflow.py | 14 ++--- CPAC/pipeline/engine.py | 2 +- CPAC/registration/__init__.py | 15 +---- CPAC/registration/guardrails.py | 1 + CPAC/registration/registration.py | 55 ++++++++++++++++--- 5 files changed, 53 insertions(+), 34 deletions(-) diff --git a/CPAC/longitudinal_pipeline/longitudinal_workflow.py b/CPAC/longitudinal_pipeline/longitudinal_workflow.py index a7ee45c210..6cab874aa3 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_workflow.py +++ b/CPAC/longitudinal_pipeline/longitudinal_workflow.py @@ -42,19 +42,13 @@ build_segmentation_stack from CPAC.pipeline.engine import initiate_rpool, ingress_output_dir -from CPAC.registration import ( +from CPAC.registration.registration import ( + apply_transform, create_fsl_flirt_linear_reg, create_fsl_fnirt_nonlinear_reg, - create_wf_calculate_ants_warp -) - -from CPAC.registration.registration import apply_transform + create_wf_calculate_ants_warp) -from CPAC.utils.datasource import ( - resolve_resolution, - create_anat_datasource, - create_check_for_s3_node -) +from CPAC.utils.datasource import resolve_resolution from CPAC.longitudinal_pipeline.longitudinal_preproc import ( subject_specific_template diff --git a/CPAC/pipeline/engine.py b/CPAC/pipeline/engine.py index f58a4da299..a4a1a19f4f 100644 --- a/CPAC/pipeline/engine.py +++ b/CPAC/pipeline/engine.py @@ -30,7 +30,6 @@ from CPAC.image_utils.statistical_transforms import z_score_standardize, \ fisher_z_score_standardize from CPAC.pipeline.check_outputs import ExpectedOutputs -from CPAC.registration.registration import transform_derivative from CPAC.utils.outputs import Outputs from CPAC.utils.datasource import ( create_anat_datasource, @@ -617,6 +616,7 @@ def get_strats(self, resources, debug=False): def derivative_xfm(self, wf, label, connection, json_info, pipe_idx, pipe_x): + from CPAC.registration.registration import transform_derivative if label in self.xfm: diff --git a/CPAC/registration/__init__.py b/CPAC/registration/__init__.py index 2faacbda44..38bcac9bcf 100644 --- a/CPAC/registration/__init__.py +++ b/CPAC/registration/__init__.py @@ -1,16 +1,3 @@ -from .registration import create_fsl_flirt_linear_reg, \ - create_fsl_fnirt_nonlinear_reg, \ - create_fsl_fnirt_nonlinear_reg_nhp, \ - create_register_func_to_anat, \ - create_register_func_to_anat_use_T2, \ - create_wf_calculate_ants_warp - from .output_func_to_standard import output_func_to_standard -__all__ = ['create_fsl_flirt_linear_reg', - 'create_fsl_fnirt_nonlinear_reg', - 'create_fsl_fnirt_nonlinear_reg_nhp', - 'create_register_func_to_anat', - 'create_register_func_to_anat_use_T2', - 'create_wf_calculate_ants_warp', - 'output_func_to_standard'] +__all__ = ['output_func_to_standard'] diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 5276151f98..6549105cf9 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -73,6 +73,7 @@ def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', select : Node """ # pylint: disable=redefined-outer-name,reimported,unused-import + # noqa: F401 from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow name = node1.name choices = Node(Merge(2), run_without_submitting=True, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 4c525b9507..d0b9a793ae 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1158,7 +1158,8 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', 1. Calculates a nonlinear anatomical-to-template registration. .. exec:: - from CPAC.registration import create_wf_calculate_ants_warp + from CPAC.registration.registration import \ + create_wf_calculate_ants_warp wf = create_wf_calculate_ants_warp() wf.write_graph( graph2use='orig', @@ -1265,14 +1266,23 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', calculate_ants_warp, 'moving_brain') calc_ants_warp_wf.connect(inputspec, 'reference_brain', calculate_ants_warp, 'reference_brain') + calc_ants_warp_wf.connect(inputspec, 'moving_brain', + retry_calculate_ants_warp, 'moving_brain') + calc_ants_warp_wf.connect(inputspec, 'reference_brain', + retry_calculate_ants_warp, 'reference_brain') if reg_ants_skull == 1: calculate_ants_warp.inputs.reg_with_skull = 1 + retry_calculate_ants_warp.inputs.reg_with_skull = 1 calc_ants_warp_wf.connect(inputspec, 'moving_skull', calculate_ants_warp, 'moving_skull') calc_ants_warp_wf.connect(inputspec, 'reference_skull', calculate_ants_warp, 'reference_skull') + calc_ants_warp_wf.connect(inputspec, 'moving_skull', + retry_calculate_ants_warp, 'moving_skull') + calc_ants_warp_wf.connect(inputspec, 'reference_skull', + retry_calculate_ants_warp, 'reference_skull') for guardrail in guardrails: calc_ants_warp_wf.connect(inputspec, 'reference_skull', guardrail, 'reference') @@ -1281,6 +1291,10 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', calculate_ants_warp, 'moving_skull') calc_ants_warp_wf.connect(inputspec, 'reference_brain', calculate_ants_warp, 'reference_skull') + calc_ants_warp_wf.connect(inputspec, 'moving_brain', + retry_calculate_ants_warp, 'moving_skull') + calc_ants_warp_wf.connect(inputspec, 'reference_brain', + retry_calculate_ants_warp, 'reference_skull') for guardrail in guardrails: calc_ants_warp_wf.connect(inputspec, 'reference_brain', guardrail, 'reference') @@ -1295,16 +1309,43 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', calculate_ants_warp, 'ants_para') calc_ants_warp_wf.connect(inputspec, 'interp', calculate_ants_warp, 'interp') + calc_ants_warp_wf.connect(inputspec, 'fixed_image_mask', + retry_calculate_ants_warp, 'fixed_image_mask') + calc_ants_warp_wf.connect(inputspec, 'reference_mask', + retry_calculate_ants_warp, 'reference_mask') + calc_ants_warp_wf.connect(inputspec, 'moving_mask', + retry_calculate_ants_warp, 'moving_mask') + calc_ants_warp_wf.connect(inputspec, 'ants_para', + retry_calculate_ants_warp, 'ants_para') + calc_ants_warp_wf.connect(inputspec, 'interp', + retry_calculate_ants_warp, 'interp') # inter-workflow connections + calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', + guardrails[0], 'registered') + calc_ants_warp_wf.connect(retry_calculate_ants_warp, 'warped_image', + guardrails[1], 'registered') + select = guardrail_selection(calc_ants_warp_wf, *guardrails) + warp_list_choices = pe.Node(util.Merge(2), run_without_submitting=True, + name=f'{name}_warplist_choices') + choose_warp_list = pe.Node(util.Select(), run_without_submitting=True, + name=f'choose_{name}') calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + warp_list_choices, 'in1') + calc_ants_warp_wf.connect(retry_calculate_ants_warp, 'warp_list', + warp_list_choices, 'in2') + calc_ants_warp_wf.connect(warp_list_choices, 'out', + choose_warp_list, 'inlist') + calc_ants_warp_wf.connect(guardrails[0], 'failed_qc', + choose_warp_list, 'index') + calc_ants_warp_wf.connect(choose_warp_list, 'out', select_forward_initial, 'warp_list') - calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + calc_ants_warp_wf.connect(choose_warp_list, 'out', select_forward_rigid, 'warp_list') - calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + calc_ants_warp_wf.connect(choose_warp_list, 'out', select_forward_affine, 'warp_list') - calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + calc_ants_warp_wf.connect(choose_warp_list, 'out', select_forward_warp, 'warp_list') - calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', + calc_ants_warp_wf.connect(choose_warp_list, 'out', select_inverse_warp, 'warp_list') # connections to outputspec calc_ants_warp_wf.connect(select_forward_initial, 'selected_warp', @@ -1317,10 +1358,6 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', outputspec, 'warp_field') calc_ants_warp_wf.connect(select_inverse_warp, 'selected_warp', outputspec, 'inverse_warp_field') - for guardrail in guardrails: - calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', - guardrail, 'registered') - select = guardrail_selection(calc_ants_warp_wf, *guardrails) calc_ants_warp_wf.connect(select, 'out', outputspec, 'normalized_output_brain') From 88de7d65229ace1a18200180e3a87af9864a64f2 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 24 Oct 2022 19:03:23 +0000 Subject: [PATCH 35/72] :recycle: Import registration imports for retry registration --- CPAC/registration/registration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index d0b9a793ae..e693293a66 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1216,7 +1216,8 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', interface=util.Function(input_names=[*warp_inputs, 'previous_failure'], output_names=warp_outputs, function=retry_hardcoded_reg, - imports=['from CPAC.registration.utils ' + imports=[*reg_imports, + 'from CPAC.registration.utils ' 'import hardcoded_reg', 'from CPAC.utils.docs import ' 'retry_docstring']), From fa87a92059755f40aea6ca4246da051c3eee56ad Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 24 Oct 2022 21:04:16 +0000 Subject: [PATCH 36/72] :goal_net: More specific error handling --- CPAC/registration/exceptions.py | 39 +++++++++++++++++++++++++++++++++ CPAC/registration/guardrails.py | 25 +-------------------- CPAC/registration/utils.py | 12 +++++++--- 3 files changed, 49 insertions(+), 27 deletions(-) create mode 100644 CPAC/registration/exceptions.py diff --git a/CPAC/registration/exceptions.py b/CPAC/registration/exceptions.py new file mode 100644 index 0000000000..f7caac0d0e --- /dev/null +++ b/CPAC/registration/exceptions.py @@ -0,0 +1,39 @@ +# Copyright (C) 2022 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +"""Custom registration exceptions""" +class BadRegistrationError(ValueError): + """Exception for when a QC measure for a registration falls below a + specified threshold""" + def __init__(self, *args, metric=None, value=None, threshold=None, + **kwargs): + """ + Parameters + ---------- + metric : str + QC metric + + value : float + calculated QC value + + threshold : float + specified threshold + """ + msg = "Registration failed quality control" + if all(arg is not None for arg in (metric, value, threshold)): + msg += f" ({metric}: {value} < {threshold})" + msg += "." + super().__init__(msg, *args, **kwargs) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 6549105cf9..a64d896741 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -24,6 +24,7 @@ from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow # from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec from CPAC.qc import qc_masks, registration_guardrail_thresholds +from CPAC.registration.exceptions import BadRegistrationError from CPAC.registration.utils import hardcoded_reg from CPAC.utils.docs import retry_docstring @@ -32,30 +33,6 @@ Registration: {'reference': 'reference', 'registered': 'out_file'}} -class BadRegistrationError(ValueError): - """Exception for when a QC measure for a registration falls below a - specified threshold""" - def __init__(self, *args, metric=None, value=None, threshold=None, - **kwargs): - """ - Parameters - ---------- - metric : str - QC metric - - value : float - calculated QC value - - threshold : float - specified threshold - """ - msg = "Registration failed quality control" - if all(arg is not None for arg in (metric, value, threshold)): - msg += f" ({metric}: {value} < {threshold})" - msg += "." - super().__init__(msg, *args, **kwargs) - - def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', ) -> Node: """Generate requisite Nodes for choosing a path through the graph diff --git a/CPAC/registration/utils.py b/CPAC/registration/utils.py index 1185f0190b..b625895233 100644 --- a/CPAC/registration/utils.py +++ b/CPAC/registration/utils.py @@ -66,6 +66,8 @@ def hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, ants_para, moving_mask=None, reference_mask=None, fixed_image_mask=None, interp=None, reg_with_skull=0): + import subprocess + from CPAC.registration.exceptions import BadRegistrationError # TODO: expand transforms to cover all in ANTs para regcmd = ["antsRegistration"] @@ -444,10 +446,14 @@ def hardcoded_reg(moving_brain, reference_brain, moving_skull, f.write(' '.join(regcmd)) try: - retcode = subprocess.check_output(regcmd) + subprocess.check_output(regcmd) + except BadRegistrationError as bad_registration: + raise bad_registration except Exception as e: - raise Exception('[!] ANTS registration did not complete successfully.' - '\n\nError details:\n{0}\n{1}\n'.format(e, e.output)) + msg = '[!] ANTS registration did not complete successfully.' + if hasattr(e, 'output'): + msg += '\n\nError details:\n{e}\n{e.output}\n' + raise Exception(msg) # pylint: disable=raise-missing-from warp_list = [] warped_image = None From e115d2e78daf8da985ff518d1d4e82f327df937f Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 13:58:30 +0000 Subject: [PATCH 37/72] :recycle: Pass failed_qc status to retry_calculate_ants_warp --- CPAC/registration/guardrails.py | 2 +- CPAC/registration/registration.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index a64d896741..89d50db013 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -226,7 +226,7 @@ def retry_hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, ants_para, moving_mask=None, reference_mask=None, fixed_image_mask=None, interp=None, reg_with_skull=0, previous_failure=False): - if previous_failure: + if not previous_failure: return [], None return hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, ants_para, moving_mask, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index e693293a66..2f54fe4ded 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1338,6 +1338,8 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', choose_warp_list, 'inlist') calc_ants_warp_wf.connect(guardrails[0], 'failed_qc', choose_warp_list, 'index') + calc_ants_warp_wf.connect(guardrails[0], 'failed_qc', + retry_calculate_ants_warp, 'previous_failure') calc_ants_warp_wf.connect(choose_warp_list, 'out', select_forward_initial, 'warp_list') calc_ants_warp_wf.connect(choose_warp_list, 'out', From ef7d5400fb236ac079aeddf27cf2a7f43f5ab942 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 14:01:38 +0000 Subject: [PATCH 38/72] :art: Increment seed for ANTs retry --- CPAC/pipeline/random_state/seed.py | 16 ++++++++++++++++ CPAC/registration/guardrails.py | 15 +++++++++------ CPAC/registration/registration.py | 5 +++-- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index f5cf2d87db..dceb9e29d9 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -32,6 +32,22 @@ _seed = {'seed': None} +def increment_seed(node: 'Node') -> 'Node': + """Increment the random seed for a given node + + Parameters + ---------- + node : Node + + Returns + ------- + node : Node + """ + if isinstance(node.seed, int): + node.seed = seed_plus_1() + return node + + def random_random_seed(): '''Returns a random postive integer up to 2147483647 diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 89d50db013..b7846d29f2 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -117,7 +117,12 @@ def registration_guardrail(registered: str, reference: str, bad_registration = BadRegistrationError( metric=metric, value=value, threshold=threshold) logger.error(str(bad_registration)) - if retry_num: # if we've already retried, raise the error + if retry_num: + # if we've already retried, raise the error and + # stop execution + from nipype import config + config.update_config({ + 'execution': {'stop_on_first_crash': True}}) raise bad_registration return registered, failed_qc @@ -247,12 +252,10 @@ def retry_registration_node(registered, registration_node): ------- Node """ - from CPAC.pipeline.random_state.seed import seed_plus_1 + from CPAC.pipeline.random_state.seed import increment_seed if registered.endswith('-failed'): - retry_node = registration_node.clone( - name=f'{registration_node.name}-retry') - if isinstance(retry_node.seed, int): - retry_node.seed = seed_plus_1() + retry_node = increment_seed(registration_node.clone( + name=f'{registration_node.name}-retry')) return retry_node return registration_node diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 2f54fe4ded..239d843290 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -23,6 +23,7 @@ from nipype.interfaces.afni import utils as afni_utils from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks +from CPAC.pipeline.random_state.seed import increment_seed from CPAC.registration.guardrails import guardrail_selection, \ registration_guardrail_node from CPAC.registration.utils import seperate_warps_list, \ @@ -1212,7 +1213,7 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', imports=reg_imports), name='calc_ants_warp', mem_gb=2.8, mem_x=(2e-7, 'moving_brain', 'xyz')) - retry_calculate_ants_warp = pe.Node( + retry_calculate_ants_warp = increment_seed(pe.Node( interface=util.Function(input_names=[*warp_inputs, 'previous_failure'], output_names=warp_outputs, function=retry_hardcoded_reg, @@ -1222,7 +1223,7 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', 'from CPAC.utils.docs import ' 'retry_docstring']), name='retry_calc_ants_warp', mem_gb=2.8, - mem_x=(2e-7, 'moving_brain', 'xyz')) + mem_x=(2e-7, 'moving_brain', 'xyz'))) guardrails = tuple(registration_guardrail_node( f'{_try}{name}_guardrail', i) for i, _try in enumerate(('', 'retry_'))) From e28b1a7cbc22977a8ffc36da60792d1fa7419ad6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 16:33:38 +0000 Subject: [PATCH 39/72] :recycle: Refactor guardrails for create_register_func_to_anat_use_T2 --- CPAC/registration/guardrails.py | 21 +++- CPAC/registration/registration.py | 158 +++++++++++++++--------------- 2 files changed, 97 insertions(+), 82 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index b7846d29f2..b869227c70 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -34,7 +34,8 @@ def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', - ) -> Node: + output_key: str = 'registered', + guardrail_node: 'Node' = None) -> Node: """Generate requisite Nodes for choosing a path through the graph with retries @@ -43,7 +44,13 @@ def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', wf : Workflow node1, node2 : Node - try guardrail, retry guardrail + first try, retry + + output_key : str + field to choose + + guardrail_node : Node + guardrail to collect 'failed_qc' from if not node1 Returns ------- @@ -52,15 +59,19 @@ def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', # pylint: disable=redefined-outer-name,reimported,unused-import # noqa: F401 from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow + if guardrail_node is None: + guardrail_node = node1 name = node1.name + if output_key != 'registered': + name = f'{name}_{output_key}' choices = Node(Merge(2), run_without_submitting=True, name=f'{name}_choices') select = Node(Select(), run_without_submitting=True, name=f'choose_{name}') - wf.connect([(node1, choices, [('registered', 'in1')]), - (node2, choices, [('registered', 'in2')]), + wf.connect([(node1, choices, [(output_key, 'in1')]), + (node2, choices, [(output_key, 'in2')]), (choices, select, [('out', 'inlist')]), - (node1, select, [('failed_qc', 'index')])]) + (guardrail_node, select, [('failed_qc', 'index')])]) return select diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 239d843290..32da26a2ff 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -58,7 +58,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, name='inputspec') outputNode = pe.Node( - util.IdentityInterface(fields=['output_image']), + util.IdentityInterface(fields=['output_image', 'failed_qc']), name='outputspec') if int(num_cpus) > 1 and time_series: @@ -101,7 +101,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, 'reg_tool'], output_names=['interpolation'], function=interpolation_string), - name=f'interp_string', + name='interp_string', mem_gb=2.5) interp_string.inputs.reg_tool = reg_tool @@ -113,7 +113,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, pe.Node(util.Function(input_names=['transform'], output_names=['transform_list'], function=single_ants_xfm_to_list), - name=f'single_ants_xfm_to_list', + name='single_ants_xfm_to_list', mem_gb=2.5) wf.connect(inputNode, 'transform', ants_xfm_list, 'transform') @@ -133,7 +133,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, name=f'chunk_{wf_name}', mem_gb=2.5) - #chunk.inputs.n_chunks = int(num_cpus) + # chunk.inputs.n_chunks = int(num_cpus) # 10-TR sized chunks chunk.inputs.chunk_size = 10 @@ -213,7 +213,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, name=f'chunk_{wf_name}', mem_gb=2.5) - #chunk.inputs.n_chunks = int(num_cpus) + # chunk.inputs.n_chunks = int(num_cpus) # 10-TR sized chunks chunk.inputs.chunk_size = 10 @@ -247,6 +247,7 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # Pass output through guardrail before continuing wf.connect(guardrail, 'registered', outputNode, 'output_image') + wf.connect(guardrail, 'failed_qc', outputNode, 'failed_qc') return wf @@ -256,6 +257,7 @@ def transform_derivative(wf_name, label, reg_tool, num_cpus, num_ants_cores, '''Transform output derivatives to template space. This function is designed for use with the NodeBlock connection engine. + This function is not guardrailed. ''' wf = pe.Workflow(name=wf_name) @@ -288,10 +290,13 @@ def transform_derivative(wf_name, label, reg_tool, num_cpus, num_ants_cores, wf.connect(inputnode, 'reference', apply_xfm, 'inputspec.reference') wf.connect(inputnode, 'transform', apply_xfm, 'inputspec.transform') - outputnode = pe.Node(util.IdentityInterface(fields=['out_file']), + outputnode = pe.Node(util.IdentityInterface(fields=['out_file', + 'failed_qc']), name='outputspec') - wf.connect(apply_xfm, 'outputspec.output_image', outputnode, 'out_file') + wf.connect([ + (apply_xfm, outputnode, [('outputspec.output_image', 'out_file'), + ('outputspec.failed_qc', 'failed_qc')])]) return wf @@ -752,8 +757,7 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, return register_func_to_anat -def create_register_func_to_anat_use_T2(config, - name='register_func_to_anat_use_T2'): +def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): # for monkey data # ref: https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L287-L295 # https://github.com/HechengJin0/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L524-L535 @@ -764,8 +768,6 @@ def create_register_func_to_anat_use_T2(config, Parameters ---------- - config : configuration, mandatory - Pipeline configuration. name : string, optional Name of the workflow. @@ -814,37 +816,42 @@ def create_register_func_to_anat_use_T2(config, linear_reg_func_to_t2.inputs.searchr_x = [30, 30] linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - - guardrail_t1 = registration_guardrail_node('guardrail_T1') - guardrail_t2 = registration_guardrail_node('guardrail_T2') - - register_func_to_anat_use_T2.connect(inputspec, 'func', - linear_reg_func_to_t2, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', - linear_reg_func_to_t2, 'reference') - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', - guardrail_t2, 'reference') + retry_linear_reg_func_to_t2 = increment_seed(linear_reg_func_to_t2.clone( + f'retry_{linear_reg_func_to_t2.name}')) + + guardrails = [] + for i, node in enumerate(linear_reg_func_to_t2, + retry_linear_reg_func_to_t2): + register_func_to_anat_use_T2.connect(inputspec, 'func', + node, 'in_file') + register_func_to_anat_use_T2.connect(inputspec, 'T2_head', + node, 'reference') + guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', + i) + register_func_to_anat_use_T2.connect(node, 'out_file', + guardrails[i], 'registered') + register_func_to_anat_use_T2.connect(inputspec, 'T2_head', + guardrails[i], 'reference') + linear_reg_func_to_t2_matrix = guardrail_selection( + register_func_to_anat_use_T2, linear_reg_func_to_t2, + retry_linear_reg_func_to_t2, 'out_matrix_file', guardrails[0]) # ${FSLDIR}/bin/convert_xfm -omat "$fMRIFolder"/T2w2Scout.mat -inverse "$fMRIFolder"/Scout2T2w.mat invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') invt.inputs.invert_xfm = True - register_func_to_anat_use_T2.connect( - linear_reg_func_to_t2, 'out_matrix_file', invt, 'in_file') + register_func_to_anat_use_T2.connect(linear_reg_func_to_t2_matrix, 'out', + invt, 'in_file') # ${FSLDIR}/bin/applywarp --interp=nn -i ${T1wFolder}/${T2wRestoreImageBrain} -r ${fMRIFolder}/${ScoutName}_gdc --premat="$fMRIFolder"/T2w2Scout.mat -o ${fMRIFolder}/Scout_brain_mask.nii.gz anat_to_func = pe.Node(interface=fsl.ApplyWarp(), name='anat_to_func') anat_to_func.inputs.interp = 'nn' - guardrail_anat_to_func = registration_guardrail_node('guardrail_' - 'anat-to-func') register_func_to_anat_use_T2.connect(inputspec, 'T2_brain', anat_to_func, 'in_file') register_func_to_anat_use_T2.connect(inputspec, 'func', anat_to_func, 'ref_file') - register_func_to_anat_use_T2.connect(inputspec, 'func', - guardrail_anat_to_func, 'reference') register_func_to_anat_use_T2.connect(invt, 'out_file', anat_to_func, 'premat') @@ -854,8 +861,6 @@ def create_register_func_to_anat_use_T2(config, func_brain_mask.inputs.args = '-bin' register_func_to_anat_use_T2.connect(anat_to_func, 'out_file', - guardrail_anat_to_func, 'registered') - register_func_to_anat_use_T2.connect(guardrail_anat_to_func, 'registered', func_brain_mask, 'in_file') # ${FSLDIR}/bin/fslmaths ${fMRIFolder}/${ScoutName}_gdc -mas ${fMRIFolder}/Scout_brain_mask.nii.gz ${fMRIFolder}/Scout_brain_dc.nii.gz @@ -878,13 +883,28 @@ def create_register_func_to_anat_use_T2(config, linear_reg_func_to_t1.inputs.searchr_x = [30, 30] linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - - register_func_to_anat_use_T2.connect(func_brain, 'out_file', - linear_reg_func_to_t1, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', - linear_reg_func_to_t1, 'reference') - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', - guardrail_t1, 'reference') + retry_linear_reg_func_to_t1 = increment_seed(linear_reg_func_to_t1.clone( + f'retry_{linear_reg_func_to_t1.name}')) + + guardrails = [] + for i, node in enumerate(linear_reg_func_to_t1, + retry_linear_reg_func_to_t1): + register_func_to_anat_use_T2.connect(func_brain, 'out_file', + node, 'in_file') + register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', + node, 'reference') + guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', + i) + register_func_to_anat_use_T2.connect(node, 'out_file', + guardrails[i], 'registered') + register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', + guardrails[i], 'reference') + # pylint: disable=no-value-for-parameter + select_linear_reg_func_to_t1 = guardrail_selection( + register_func_to_anat_use_T2, *guardrails) + linear_reg_func_to_t1_matrix = guardrail_selection( + register_func_to_anat_use_T2, linear_reg_func_to_t1, + retry_linear_reg_func_to_t1, 'out_matrix_file', guardrails[0]) # #taking out warpfield as it is not being made without a fieldmap. # ${FSLDIR}/bin/convertwarp --relout --rel -r ${T1wFolder}/${T2wRestoreImage} --postmat=${fMRIFolder}/${ScoutName}_gdc2T1w_init.mat -o ${fMRIFolder}/${ScoutName}_gdc2T1w_init_warp @@ -893,20 +913,17 @@ def create_register_func_to_anat_use_T2(config, convert_warp.inputs.out_relwarp = True convert_warp.inputs.relwarp = True - register_func_to_anat_use_T2.connect( - linear_reg_func_to_t1, 'out_matrix_file', convert_warp, 'postmat') + register_func_to_anat_use_T2.connect(linear_reg_func_to_t1_matrix, 'out', + convert_warp, 'postmat') register_func_to_anat_use_T2.connect(inputspec, 'T2_head', convert_warp, 'reference') register_func_to_anat_use_T2.connect( - linear_reg_func_to_t1, 'out_matrix_file', + linear_reg_func_to_t1_matrix, 'out', outputspec, 'func_to_anat_linear_xfm_nobbreg') register_func_to_anat_use_T2.connect(convert_warp, 'out_file', outputspec, 'func_to_anat_linear_warp_nobbreg') - - register_func_to_anat_use_T2.connect(linear_reg_func_to_t1, 'out_file', - guardrail_t1, 'registered') - register_func_to_anat_use_T2.connect(guardrail_t1, 'registered', + register_func_to_anat_use_T2.connect(select_linear_reg_func_to_t1, 'out', outputspec, 'anat_func_nobbreg') return register_func_to_anat_use_T2 @@ -1840,12 +1857,12 @@ def bold_to_T1template_xfm_connector(wf_name, cfg, reg_tool, symmetric=False): name='change_transform_type') wf.connect(fsl_reg_2_itk, 'itk_transform', - change_transform, 'input_affine_file') + change_transform, 'input_affine_file') # combine ALL xfm's into one - makes it easier downstream write_composite_xfm = pe.Node( interface=ants.ApplyTransforms(), - name=f'write_composite_xfm', + name='write_composite_xfm', mem_gb=1.5) write_composite_xfm.inputs.print_out_composite_warp_file = True write_composite_xfm.inputs.output_image = \ @@ -2450,12 +2467,10 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, interface=ants.ApplyTransforms(), name=f'ANTS-ABCD_T1_to_template_{pipe_num}') ants_apply_warp_t1_to_template.inputs.dimension = 3 - ants_apply_warp_t1_to_template.inputs.print_out_composite_warp_file = True - ants_apply_warp_t1_to_template.inputs.output_image = 'ANTs_CombinedWarp.nii.gz' - - guardrail_brain = registration_guardrail_node('guardrail_brain_T1w') - guardrail_head = registration_guardrail_node('guardrail_head_T1w') - guardrail_mask = registration_guardrail_node('guardrail_T1w_mask') + ants_apply_warp_t1_to_template.inputs.print_out_composite_warp_file = \ + True + ants_apply_warp_t1_to_template.inputs.output_image = \ + 'ANTs_CombinedWarp.nii.gz' node, out = strat_pool.get_data(['desc-restore_T1w', 'desc-preproc_T1w', @@ -2465,8 +2480,6 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, node, out = strat_pool.get_data('T1w-template') wf.connect(node, out, ants_apply_warp_t1_to_template, 'reference_image') - for guardrail in (guardrail_brain, guardrail_head, guardrail_mask): - wf.connect(node, out, guardrail, 'reference') node, out = strat_pool.get_data('from-T1w_to-template_mode-image_xfm') wf.connect(node, out, ants_apply_warp_t1_to_template, 'transforms') @@ -2629,16 +2642,13 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, apply_mask, 'in_file') wf.connect(fsl_apply_warp_t1_brain_to_template, 'out_file', apply_mask, 'mask_file') - wf.connect(apply_mask, 'out_file', guardrail_brain, 'registered') - wf.connect(fsl_apply_warp_t1_to_template, 'out_file', - guardrail_head, 'registered') - wf.connect(fsl_apply_warp_t1_brain_mask_to_template, 'out_file', - guardrail_mask, 'registered') outputs = { - 'space-template_desc-brain_T1w': (guardrail_brain, 'registered'), - 'space-template_desc-head_T1w': (guardrail_head, 'registered'), - 'space-template_desc-T1w_mask': (guardrail_mask, 'registered'), + 'space-template_desc-brain_T1w': (apply_mask, 'out_file'), + 'space-template_desc-head_T1w': (fsl_apply_warp_t1_to_template, + 'out_file'), + 'space-template_desc-T1w_mask': ( + fsl_apply_warp_t1_brain_mask_to_template, 'out_file'), 'from-T1w_to-template_mode-image_xfm': (merge_xfms, 'merged_file'), 'from-template_to-T1w_mode-image_xfm': (merge_inv_xfms, 'merged_file') @@ -2784,12 +2794,12 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): subwfname = f'func_to_anat_FLIRT_bbreg{bbreg_status}_{pipe_num}' if strat_pool.check_rpool('T2w') and cfg.anatomical_preproc['run_t2']: # monkey data - func_to_anat = create_register_func_to_anat_use_T2(cfg, subwfname) + func_to_anat = create_register_func_to_anat_use_T2(subwfname) # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/fMRIVolume/GenericfMRIVolumeProcessingPipeline.sh#L177 # fslmaths "$fMRIFolder"/"$NameOffMRI"_mc -Tmean "$fMRIFolder"/"$ScoutName"_gdc func_mc_mean = pe.Node(interface=afni_utils.TStat(), - name=f'func_motion_corrected_mean_{pipe_num}') + name=f'func_motion_corrected_mean_{pipe_num}') func_mc_mean.inputs.options = '-mean' func_mc_mean.inputs.outputtype = 'NIFTI_GZ' @@ -2815,19 +2825,19 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): subwfname) func_to_anat.inputs.inputspec.dof = cfg.registration_workflows[ - 'functional_registration']['coregistration']['dof'] + 'functional_registration']['coregistration']['dof'] func_to_anat.inputs.inputspec.interp = cfg.registration_workflows[ - 'functional_registration']['coregistration']['interpolation'] + 'functional_registration']['coregistration']['interpolation'] node, out = strat_pool.get_data('desc-reginput_bold') wf.connect(node, out, func_to_anat, 'inputspec.func') if cfg.registration_workflows['functional_registration'][ - 'coregistration']['reference'] == 'brain': + 'coregistration']['reference'] == 'brain': node, out = strat_pool.get_data('desc-brain_T1w') elif cfg.registration_workflows['functional_registration'][ - 'coregistration']['reference'] == 'restore-brain': + 'coregistration']['reference'] == 'restore-brain': node, out = strat_pool.get_data('desc-restore-brain_T1w') wf.connect(node, out, func_to_anat, 'inputspec.anat') @@ -3185,7 +3195,8 @@ def warp_timeseries_to_T1template(wf, cfg, strat_pool, pipe_num, opt=None): return (wf, outputs) -def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None): +def warp_timeseries_to_T1template_abcd(wf, cfg, strat_pool, pipe_num, opt=None + ): """ {"name": "transform_timeseries_to_T1template_abcd", "config": ["registration_workflows", "functional_registration", @@ -3503,9 +3514,6 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(node, out, anat_resample, 'in_file') wf.connect(node, out, anat_resample, 'reference') - guardrail_brain = registration_guardrail_node('guardrail-brain_bold') - guardrail_mask = registration_guardrail_node('guardrail-bold_mask') - # ${FSLDIR}/bin/applywarp --rel --interp=spline -i ${T1wImage} -r ${ResampRefIm} --premat=$FSLDIR/etc/flirtsch/ident.mat -o ${WD}/${T1wImageFile}.${FinalfMRIResolution} applywarp_anat_res = pe.Node(interface=fsl.ApplyWarp(), name=f'anat_func_res_{pipe_num}') @@ -3531,7 +3539,6 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(node, out, applywarp_anat_mask_res, 'in_file') wf.connect(applywarp_anat_res, 'out_file', applywarp_anat_mask_res, 'ref_file') - wf.connect(applywarp_anat_res, 'out_file', guardrail_mask, 'reference') # ${FSLDIR}/bin/fslmaths ${WD}/${T1wImageFile}.${FinalfMRIResolution} -mas ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz ${WD}/${FreeSurferBrainMaskFile}.${FinalfMRIResolution}.nii.gz T1_brain_res = pe.Node(interface=fsl.MultiImageMaths(), @@ -3660,7 +3667,6 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No applywarp_func_to_standard, 'field_file') wf.connect(applywarp_anat_res, 'out_file', applywarp_func_to_standard, 'ref_file') - wf.connect(applywarp_anat_res, 'out_file', guardrail_brain, 'reference') # applywarp --rel --interp=nn --in=${WD}/prevols/vol${vnum}_mask.nii.gz --warp=${MotionMatrixFolder}/${MotionMatrixPrefix}${vnum}_all_warp.nii.gz --ref=${WD}/${T1wImageFile}.${FinalfMRIResolution} --out=${WD}/postvols/vol${vnum}_mask.nii.gz applywarp_func_mask_to_standard = pe.MapNode(interface=fsl.ApplyWarp(), @@ -3734,12 +3740,10 @@ def warp_timeseries_to_T1template_dcan_nhp(wf, cfg, strat_pool, pipe_num, opt=No wf.connect(applywarp_anat_mask_res, 'out_file', func_mask_final, 'in_file') wf.connect(find_min_mask, 'out_file', func_mask_final, 'operand_files') - wf.connect(extract_func_brain, 'out_file', guardrail_brain, 'registered') - wf.connect(func_mask_final, 'out_file', guardrail_mask, 'registered') outputs = { - 'space-template_desc-brain_bold': (guardrail_brain, 'registered'), - 'space-template_desc-bold_mask': (guardrail_mask, 'registered') + 'space-template_desc-brain_bold': (extract_func_brain, 'out_file'), + 'space-template_desc-bold_mask': (func_mask_final, 'out_file') } return (wf, outputs) From be69c25186b2c3a7b33422f93d98544573de2e82 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 17:09:48 +0000 Subject: [PATCH 40/72] :recycle: Refactor bbreg guardrails --- CPAC/registration/guardrails.py | 30 +++++ CPAC/registration/registration.py | 213 ++++++++++++------------------ 2 files changed, 116 insertions(+), 127 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index b869227c70..ee3dff397e 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -33,6 +33,36 @@ Registration: {'reference': 'reference', 'registered': 'out_file'}} +def connect_retries(wf, nodes, connections): + """Function to generalize making the same connections to try and + retry nodes. + + For each 3-tuple (``conn``) in ``connections``, will do + + .. code-block:: Python + + wf.connect(conn[0], node, conn[1], conn[2]) + + for each node in nodes + + Parameters + ---------- + wf : Workflow + + nodes : iterable of Nodes + + connections : iterable of 3-tuples of (Node, str or tuple, str) + + Returns + ------- + Workflow + """ + for node in nodes: + for conn in connections: + wf.connect(conn[0], node, conn[1], conn[2]) + return wf + + def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', output_key: str = 'registered', guardrail_node: 'Node' = None) -> Node: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 32da26a2ff..e339874460 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -17,6 +17,7 @@ """Registration functions""" # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order # TODO: replace Tuple with tuple, Union with |, once Python >= 3.9, 3.10 +from sqlite3 import connect from typing import Optional, Tuple, Union from CPAC.pipeline import nipype_pipeline_engine as pe from nipype.interfaces import afni, ants, c3, fsl, utility as util @@ -24,7 +25,8 @@ from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline.random_state.seed import increment_seed -from CPAC.registration.guardrails import guardrail_selection, \ +from CPAC.registration.guardrails import connect_retries, \ + guardrail_selection, \ registration_guardrail_node from CPAC.registration.utils import seperate_warps_list, \ check_transforms, \ @@ -974,7 +976,6 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, outputspec.anat_func : string (nifti file) Functional data in anatomical space """ - from CPAC.pipeline.random_state.seed import seed_plus_1 register_bbregister_func_to_anat = pe.Workflow(name=name) inputspec = pe.Node(util.IdentityInterface(fields=['func', @@ -999,14 +1000,10 @@ def create_bbregister_func_to_anat(phase_diff_distcor=False, wm_bb_mask = pe.Node(interface=fsl.ImageMaths(), name='wm_bb_mask') - if retry: - seed = seed_plus_1() - wm_bb_mask.seed = seed register_bbregister_func_to_anat.connect( inputspec, 'bbr_wm_mask_args', wm_bb_mask, 'op_string') - register_bbregister_func_to_anat.connect(inputspec, 'anat_wm_segmentation', wm_bb_mask, 'in_file') @@ -1017,50 +1014,52 @@ def bbreg_args(bbreg_target): bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name='bbreg_func_to_anat') bbreg_func_to_anat.inputs.dof = 6 + guardrail_bbreg_func_to_anat = registration_guardrail_node( + f'{bbreg_func_to_anat.name}_guardrail') + nodes = [bbreg_func_to_anat] + guardrails = [guardrail_bbreg_func_to_anat] if retry: - bbreg_func_to_anat.seed = seed - - register_bbregister_func_to_anat.connect( - inputspec, 'bbr_schedule', - bbreg_func_to_anat, 'schedule') - register_bbregister_func_to_anat.connect( - wm_bb_mask, ('out_file', bbreg_args), - bbreg_func_to_anat, 'args') - register_bbregister_func_to_anat.connect( - inputspec, 'func', - bbreg_func_to_anat, 'in_file') - register_bbregister_func_to_anat.connect( - inputspec, 'anat', - bbreg_func_to_anat, 'reference') - register_bbregister_func_to_anat.connect( - inputspec, 'linear_reg_matrix', - bbreg_func_to_anat, 'in_matrix_file') - + retry_bbreg_func_to_anat = increment_seed(bbreg_func_to_anat.clone( + f'retry_{bbreg_func_to_anat.name}')) + guardrail_retry_bbreg_func_to_anat = registration_guardrail_node( + f'{retry_bbreg_func_to_anat.name}_guardrail') + nodes += [retry_bbreg_func_to_anat] + guardrails += [guardrail_retry_bbreg_func_to_anat] + register_bbregister_func_to_anat = connect_retries( + register_bbregister_func_to_anat, nodes, [ + (inputspec, 'bbr_schedule', 'schedule'), + (wm_bb_mask, ('out_file', bbreg_args), 'args'), + (inputspec, 'func', 'in_file'), + (inputspec, 'anat', 'reference'), + (inputspec, 'linear_reg_matrix', 'in_matrix_file')]) if phase_diff_distcor: + register_bbregister_func_to_anat = connect_retries( + register_bbregister_func_to_anat, nodes, [ + (inputNode_pedir, ('pedir', convert_pedir), 'pedir'), + (inputspec, 'fieldmap', 'fieldmap'), + (inputspec, 'fieldmapmask', 'fieldmapmask'), + (inputNode_echospacing, 'echospacing', 'echospacing')]) + for i, node in enumerate(nodes): + register_bbregister_func_to_anat.connect(inputspec, 'anat', + guardrails[i], 'reference') + register_bbregister_func_to_anat.connect(node, 'out_file', + guardrails[i], 'registered') + if retry: + # pylint: disable=no-value-for-parameter + outfile = guardrail_selection(register_bbregister_func_to_anat, + *guardrails) + matrix = guardrail_selection(register_bbregister_func_to_anat, *nodes, + 'out_matrix_file', guardrails[0]) register_bbregister_func_to_anat.connect( - inputNode_pedir, ('pedir', convert_pedir), - bbreg_func_to_anat, 'pedir') - register_bbregister_func_to_anat.connect( - inputspec, 'fieldmap', - bbreg_func_to_anat, 'fieldmap') - register_bbregister_func_to_anat.connect( - inputspec, 'fieldmapmask', - bbreg_func_to_anat, 'fieldmapmask') + matrix, 'out', outputspec, 'func_to_anat_linear_xfm') + register_bbregister_func_to_anat.connect(outfile, 'out', + outputspec, 'anat_func') + else: register_bbregister_func_to_anat.connect( - inputNode_echospacing, 'echospacing', - bbreg_func_to_anat, 'echospacing') - - guardrail = registration_guardrail_node(name=f'{name}_guardrail') - register_bbregister_func_to_anat.connect(inputspec, 'anat', - guardrail, 'reference') - register_bbregister_func_to_anat.connect( - bbreg_func_to_anat, 'out_matrix_file', - outputspec, 'func_to_anat_linear_xfm') - register_bbregister_func_to_anat.connect(bbreg_func_to_anat, 'out_file', - guardrail, 'registered') - register_bbregister_func_to_anat.connect(guardrail, 'registered', - outputspec, 'anat_func') - + bbreg_func_to_anat, 'out_matrix_file', + outputspec, 'func_to_anat_linear_xfm') + register_bbregister_func_to_anat.connect(guardrails[0], 'registered', + outputspec, 'anat_func') return register_bbregister_func_to_anat @@ -2871,9 +2870,11 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): (func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg') } - if opt in [True, "fallback"]: + if opt in [True, 'fallback']: + fallback = opt == 'fallback' func_to_anat_bbreg = create_bbregister_func_to_anat( - diff_complete, f'func_to_anat_bbreg{bbreg_status}_{pipe_num}') + diff_complete, f'func_to_anat_bbreg{bbreg_status}_{pipe_num}', + opt is True) func_to_anat_bbreg.inputs.inputspec.bbr_schedule = \ cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ @@ -2882,56 +2883,31 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'bbr_wm_mask_args'] - bbreg_guardrail = registration_guardrail_node( - f'bbreg{bbreg_status}_guardrail_{pipe_num}') - if opt is True: - # Retry once on failure - retry_node = create_bbregister_func_to_anat(diff_complete, - f'retry_func_to_anat_' - f'bbreg_{pipe_num}', - retry=True) - retry_node.inputs.inputspec.bbr_schedule = cfg[ - 'registration_workflows', 'functional_registration', - 'coregistration', 'boundary_based_registration', - 'bbr_schedule'] - retry_node.inputs.inputspec.bbr_wm_mask_args = cfg[ - 'registration_workflows', 'functional_registration', - 'coregistration', 'boundary_based_registration', - 'bbr_wm_mask_args'] - retry_guardrail = registration_guardrail_node( - f'retry_bbreg_guardrail_{pipe_num}') + if fallback: + bbreg_guardrail = registration_guardrail_node( + f'bbreg{bbreg_status}_guardrail_{pipe_num}') node, out = strat_pool.get_data('desc-reginput_bold') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.func') - if opt is True: - wf.connect(node, out, retry_node, 'inputspec.func') if cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'whole-head': node, out = strat_pool.get_data('T1w') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') - wf.connect(node, out, bbreg_guardrail, 'reference') - if opt is True: - wf.connect(node, out, retry_node, 'inputspec.anat') - wf.connect(node, out, retry_guardrail, 'reference') + if fallback: + wf.connect(node, out, bbreg_guardrail, 'reference') elif cfg.registration_workflows['functional_registration'][ 'coregistration']['boundary_based_registration'][ 'reference'] == 'brain': node, out = strat_pool.get_data('desc-brain_T1w') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat') - wf.connect(node, out, bbreg_guardrail, 'reference') - if opt is True: - wf.connect(node, out, retry_node, 'inputspec.anat') - wf.connect(node, out, retry_guardrail, 'reference') + if fallback: + wf.connect(node, out, bbreg_guardrail, 'reference') wf.connect(func_to_anat, 'outputspec.func_to_anat_linear_xfm_nobbreg', func_to_anat_bbreg, 'inputspec.linear_reg_matrix') - if opt is True: - wf.connect(func_to_anat, - 'outputspec.func_to_anat_linear_xfm_nobbreg', - retry_node, 'inputspec.linear_reg_matrix') if strat_pool.check_rpool('space-bold_label-WM_mask'): node, out = strat_pool.get_data(["space-bold_label-WM_mask"]) @@ -2948,76 +2924,59 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): "label-WM_mask"]) wf.connect(node, out, func_to_anat_bbreg, 'inputspec.anat_wm_segmentation') - if opt is True: - wf.connect(node, out, retry_node, 'inputspec.anat_wm_segmentation') if diff_complete: node, out = strat_pool.get_data('effectiveEchoSpacing') wf.connect(node, out, func_to_anat_bbreg, 'echospacing_input.echospacing') - if opt is True: - wf.connect(node, out, - retry_node, 'echospacing_input.echospacing') node, out = strat_pool.get_data('diffphase-pedir') wf.connect(node, out, func_to_anat_bbreg, 'pedir_input.pedir') - if opt is True: - wf.connect(node, out, retry_node, 'pedir_input.pedir') node, out = strat_pool.get_data("despiked-fieldmap") wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmap') - if opt is True: - wf.connect(node, out, retry_node, 'inputspec.fieldmap') node, out = strat_pool.get_data("fieldmap-mask") wf.connect(node, out, func_to_anat_bbreg, 'inputspec.fieldmapmask') - if opt is True: - wf.connect(node, out, retry_node, 'inputspec.fieldmapmask') - - wf.connect(func_to_anat_bbreg, 'outputspec.anat_func', - bbreg_guardrail, 'registered') - if opt is True: - wf.connect(func_to_anat_bbreg, 'outputspec.anat_func', - retry_guardrail, 'registered') - - mean_bolds = pe.Node(util.Merge(2), run_without_submitting=True, - name=f'bbreg_mean_bold_choices_{pipe_num}') - xfms = pe.Node(util.Merge(2), run_without_submitting=True, - name=f'bbreg_xfm_choices_{pipe_num}') - fallback_mean_bolds = pe.Node(util.Select(), - run_without_submitting=True, - name=f'bbreg_choose_mean_bold_{pipe_num}' - ) - fallback_xfms = pe.Node(util.Select(), run_without_submitting=True, - name=f'bbreg_choose_xfm_{pipe_num}') - if opt is True: - wf.connect([ - (bbreg_guardrail, mean_bolds, [('registered', 'in1')]), - (retry_guardrail, mean_bolds, [('registered', 'in2')]), - (func_to_anat_bbreg, xfms, [ - ('outputspec.func_to_anat_linear_xfm', 'in1')]), - (retry_node, xfms, [ - ('outputspec.func_to_anat_linear_xfm', 'in2')])]) - else: + if fallback: # Fall back to no-BBReg + mean_bolds = pe.Node(util.Merge(2), run_without_submitting=True, + name=f'bbreg_mean_bold_choices_{pipe_num}') + xfms = pe.Node(util.Merge(2), run_without_submitting=True, + name=f'bbreg_xfm_choices_{pipe_num}') + fallback_mean_bolds = pe.Node(util.Select(), + run_without_submitting=True, + name='bbreg_choose_mean_bold_' + f'{pipe_num}') + fallback_xfms = pe.Node(util.Select(), run_without_submitting=True, + name=f'bbreg_choose_xfm_{pipe_num}') wf.connect([ + (func_to_anat_bbreg, bbreg_guardrail, [ + ('outputspec.anat_func', 'registered')]), (bbreg_guardrail, mean_bolds, [('registered', 'in1')]), (func_to_anat, mean_bolds, [('outputspec.anat_func_nobbreg', 'in2')]), (func_to_anat_bbreg, xfms, [ ('outputspec.func_to_anat_linear_xfm', 'in1')]), (func_to_anat, xfms, [ - ('outputspec.func_to_anat_linear_xfm_nobbreg', 'in2')])]) - wf.connect([ - (mean_bolds, fallback_mean_bolds, [('out', 'inlist')]), - (xfms, fallback_xfms, [('out', 'inlist')]), - (bbreg_guardrail, fallback_mean_bolds, [('failed_qc', 'index')]), - (bbreg_guardrail, fallback_xfms, [('failed_qc', 'index')])]) - outputs = { - 'space-T1w_desc-mean_bold': (fallback_mean_bolds, 'out'), - 'from-bold_to-T1w_mode-image_desc-linear_xfm': (fallback_xfms, - 'out')} + ('outputspec.func_to_anat_linear_xfm_nobbreg', 'in2')]), + (mean_bolds, fallback_mean_bolds, [('out', 'inlist')]), + (xfms, fallback_xfms, [('out', 'inlist')]), + (bbreg_guardrail, fallback_mean_bolds, [ + ('failed_qc', 'index')]), + (bbreg_guardrail, fallback_xfms, [('failed_qc', 'index')])]) + outputs = { + 'space-T1w_desc-mean_bold': (fallback_mean_bolds, 'out'), + 'from-bold_to-T1w_mode-image_desc-linear_xfm': (fallback_xfms, + 'out')} + else: + outputs = { + 'space-T1w_desc-mean_bold': (func_to_anat_bbreg, + 'outputspec.anat_func'), + 'from-bold_to-T1w_mode-image_desc-linear_xfm': ( + func_to_anat_bbreg, + 'outputspec.func_to_anat_linear_xfm')} return wf, outputs From ec62b6a419ee2f2696fed6461c06220c22cce915 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 18:28:54 +0000 Subject: [PATCH 41/72] :goal_net: Guardrail `create_fsl_flirt_linear_reg` --- CPAC/registration/guardrails.py | 6 +--- CPAC/registration/registration.py | 55 +++++++++++++------------------ 2 files changed, 24 insertions(+), 37 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index ee3dff397e..f891800769 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -159,11 +159,7 @@ def registration_guardrail(registered: str, reference: str, metric=metric, value=value, threshold=threshold) logger.error(str(bad_registration)) if retry_num: - # if we've already retried, raise the error and - # stop execution - from nipype import config - config.update_config({ - 'execution': {'stop_on_first_crash': True}}) + # if we've already retried, raise the error raise bad_registration return registered, failed_qc diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index e339874460..c3a2c91c38 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -69,10 +69,6 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, # time series chunks multi_input = True - # Guardrail: check QC metrics - guardrail = registration_guardrail_node(f'{wf_name}_guardrail') - wf.connect(inputNode, 'reference', guardrail, 'reference') - if reg_tool == 'ants': if multi_input: @@ -162,13 +158,11 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, func_concat.inputs.outputtype = 'NIFTI_GZ' wf.connect(apply_warp, 'output_image', func_concat, 'in_files') - wf.connect(func_concat, 'out_file', guardrail, 'registered') - wf.connect(guardrail, 'registered', outputNode, 'output_image') + wf.connect(func_concat, 'out_file', outputNode, 'output_image') else: wf.connect(inputNode, 'input_image', apply_warp, 'input_image') - wf.connect(apply_warp, 'output_image', guardrail, 'registered') - wf.connect(guardrail, 'registered', outputNode, 'output_image') + wf.connect(apply_warp, 'output_image', outputNode, 'output_image') elif reg_tool == 'fsl': @@ -241,15 +235,11 @@ def apply_transform(wf_name, reg_tool, time_series=False, multi_input=False, func_concat.inputs.outputtype = 'NIFTI_GZ' wf.connect(apply_warp, 'out_file', func_concat, 'in_files') - wf.connect(func_concat, 'out_file', guardrail, 'registered') + wf.connect(func_concat, 'out_file', outputNode, 'output_image') else: wf.connect(inputNode, 'input_image', apply_warp, 'in_file') - wf.connect(apply_warp, 'out_file', guardrail, 'registered') - - # Pass output through guardrail before continuing - wf.connect(guardrail, 'registered', outputNode, 'output_image') - wf.connect(guardrail, 'failed_qc', outputNode, 'failed_qc') + wf.connect(apply_warp, 'out_file', outputNode, 'output_image') return wf @@ -339,31 +329,32 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' - - guardrail = registration_guardrail_node(f'{name}_guardrail') + retry_reg = increment_seed(linear_reg.clone(f'retry_{linear_reg.name}')) + nodes = (linear_reg, retry_reg) + guardrails = [] + for i, node in enumerate(nodes): + guardrails[i] = registration_guardrail_node(node.name, i) + linear_register.connect(inputspec, 'reference_brain', + guardrails[i], 'reference') + linear_register.connect(node, 'out_file', guardrails[i], 'registered') inv_flirt_xfm = pe.Node(interface=fsl.utils.ConvertXFM(), name='inv_linear_reg0_xfm') inv_flirt_xfm.inputs.invert_xfm = True - linear_register.connect(inputspec, 'input_brain', - linear_reg, 'in_file') - linear_register.connect(inputspec, 'reference_brain', - linear_reg, 'reference') - linear_register.connect(inputspec, 'reference_brain', - guardrail, 'reference') - linear_register.connect(inputspec, 'interp', - linear_reg, 'interp') - linear_register.connect(linear_reg, 'out_file', - guardrail, 'registered') - linear_register.connect(guardrail, 'registered', - outputspec, 'output_brain') - linear_register.connect(linear_reg, 'out_matrix_file', - inv_flirt_xfm, 'in_file') + linear_register = connect_retries(linear_register, nodes, [ + (inputspec, 'input_brain', 'in_file'), + (inputspec, 'reference_brain', 'reference'), + (inputspec, 'interp', 'interp')]) + # pylint: disable=no-value-for-parameter + registered = guardrail_selection(linear_register, *guardrails) + linear_register.connect(registered, 'out', outputspec, 'output_brain') + matrix = guardrail_selection(linear_register, *nodes, 'out_matrix_file', + guardrails[0]) + linear_register.connect(matrix, 'out', inv_flirt_xfm, 'in_file') linear_register.connect(inv_flirt_xfm, 'out_file', outputspec, 'invlinear_xfm') - linear_register.connect(linear_reg, 'out_matrix_file', - outputspec, 'linear_xfm') + linear_register.connect(matrix, 'out', outputspec, 'linear_xfm') return linear_register From 94f0092cfde708bc0a6665d001c35cb7960d58e6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 18:46:49 +0000 Subject: [PATCH 42/72] :goal_net: Guardrail --- CPAC/registration/guardrails.py | 17 ++++++- CPAC/registration/registration.py | 80 ++++++++++++++----------------- 2 files changed, 51 insertions(+), 46 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index f891800769..bb1e78541c 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -22,7 +22,7 @@ from nipype.interfaces.fsl import FLIRT from nipype.interfaces.utility import Function, Merge, Select from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow -# from CPAC.pipeline.nipype_pipeline_engine.utils import connect_from_spec +from CPAC.pipeline.random_state.seed import increment_seed from CPAC.qc import qc_masks, registration_guardrail_thresholds from CPAC.registration.exceptions import BadRegistrationError from CPAC.registration.utils import hardcoded_reg @@ -227,6 +227,21 @@ def registration_guardrail_workflow(registration_node, retry=True): return wf +def retry_clone(node: 'Node') -> 'Node': + """Function to clone a node, name the clone, and increment its + random seed + + Parameters + ---------- + node : Node + + Returns + ------- + Node + """ + return increment_seed(node.clone(f'retry_{node.name}')) + + def retry_registration(wf, registration_node, registered): """Function conditionally retry registration if previous attempt failed diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index c3a2c91c38..13cd7ddfad 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -27,7 +27,8 @@ from CPAC.pipeline.random_state.seed import increment_seed from CPAC.registration.guardrails import connect_retries, \ guardrail_selection, \ - registration_guardrail_node + registration_guardrail_node, \ + retry_clone from CPAC.registration.utils import seperate_warps_list, \ check_transforms, \ generate_inverse_transform_flags, \ @@ -329,7 +330,7 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' - retry_reg = increment_seed(linear_reg.clone(f'retry_{linear_reg.name}')) + retry_reg = retry_clone(linear_reg) nodes = (linear_reg, retry_reg) guardrails = [] for i, node in enumerate(nodes): @@ -429,45 +430,37 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): nonlinear_reg = pe.Node(interface=fsl.FNIRT(), name='nonlinear_reg_1') - nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True - - brain_warp = pe.Node(interface=fsl.ApplyWarp(), - name='brain_warp') - - nonlinear_register.connect(inputspec, 'input_skull', - nonlinear_reg, 'in_file') - - nonlinear_register.connect(inputspec, 'reference_skull', - nonlinear_reg, 'ref_file') - - nonlinear_register.connect(inputspec, 'interp', - brain_warp, 'interp') - - nonlinear_register.connect(inputspec, 'ref_mask', - nonlinear_reg, 'refmask_file') - - # FNIRT parameters are specified by FSL config file - # ${FSLDIR}/etc/flirtsch/TI_2_MNI152_2mm.cnf (or user-specified) - nonlinear_register.connect(inputspec, 'fnirt_config', - nonlinear_reg, 'config_file') - - nonlinear_register.connect(inputspec, 'linear_aff', - nonlinear_reg, 'affine_file') - - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - outputspec, 'nonlinear_xfm') - - nonlinear_register.connect(inputspec, 'input_brain', - brain_warp, 'in_file') - - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - brain_warp, 'field_file') - - nonlinear_register.connect(inputspec, 'reference_brain', - brain_warp, 'ref_file') - + retry_reg = retry_clone(nonlinear_reg) + nodes = [nonlinear_reg, retry_reg] + guardrails = [] + for i, node in enumerate(nodes): + guardrails[i] = registration_guardrail_node(f'{node.name}_guardrail') + nonlinear_register.connect(inputspec, 'reference_skull', + guardrails[i], 'reference') + nonlinear_register.connect(node, 'warped_file', + guardrails[i], 'registered') + + nonlinear_register = connect_retries(nonlinear_register, nodes, [ + (inputspec, 'input_skull', 'in_file'), + (inputspec, 'reference_skull', 'ref_file'), + (inputspec, 'ref_mask', 'refmask_file'), + # FNIRT parameters are specified by FSL config file + # ${FSLDIR}/etc/flirtsch/TI_2_MNI152_2mm.cnf (or user-specified) + (inputspec, 'fnirt_config', 'config_file'), + (inputspec, 'linear_aff', 'affine_file') + ]) + + brain_warp = pe.Node(interface=fsl.ApplyWarp(), name='brain_warp') + nonlinear_register.connect([ + (inputspec, brain_warp, [('interp', 'interp'), + ('input_brain', 'in_file'), + ('reference_brain', 'ref_file')])]) + field_coeff = guardrail_selection(nonlinear_register, *nodes, + 'fieldcoeff_file', guardrails[0]) + nonlinear_register.connect(field_coeff, 'out', outputspec, 'nonlinear_xfm') + nonlinear_register.connect(field_coeff, 'out', brain_warp, 'field_file') nonlinear_register.connect(brain_warp, 'out_file', outputspec, 'output_brain') @@ -809,8 +802,7 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t2.inputs.searchr_x = [30, 30] linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - retry_linear_reg_func_to_t2 = increment_seed(linear_reg_func_to_t2.clone( - f'retry_{linear_reg_func_to_t2.name}')) + retry_linear_reg_func_to_t2 = retry_clone(linear_reg_func_to_t2) guardrails = [] for i, node in enumerate(linear_reg_func_to_t2, @@ -876,8 +868,7 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t1.inputs.searchr_x = [30, 30] linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - retry_linear_reg_func_to_t1 = increment_seed(linear_reg_func_to_t1.clone( - f'retry_{linear_reg_func_to_t1.name}')) + retry_linear_reg_func_to_t1 = retry_clone(linear_reg_func_to_t1) guardrails = [] for i, node in enumerate(linear_reg_func_to_t1, @@ -1010,8 +1001,7 @@ def bbreg_args(bbreg_target): nodes = [bbreg_func_to_anat] guardrails = [guardrail_bbreg_func_to_anat] if retry: - retry_bbreg_func_to_anat = increment_seed(bbreg_func_to_anat.clone( - f'retry_{bbreg_func_to_anat.name}')) + retry_bbreg_func_to_anat = retry_clone(bbreg_func_to_anat) guardrail_retry_bbreg_func_to_anat = registration_guardrail_node( f'{retry_bbreg_func_to_anat.name}_guardrail') nodes += [retry_bbreg_func_to_anat] From 883a56a031dac2279648fadde93f365e70e65a66 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 18:52:42 +0000 Subject: [PATCH 43/72] :recycle: Make `connect_retries` a `Workflow` method --- .../pipeline/nipype_pipeline_engine/engine.py | 18 +++++++++++ CPAC/registration/guardrails.py | 30 ------------------ CPAC/registration/registration.py | 31 +++++++++---------- 3 files changed, 32 insertions(+), 47 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 0629aed193..21c7a237c8 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -506,6 +506,24 @@ def _configure_exec_nodes(self, graph): TypeError): self._handle_just_in_time_exception(node) + def connect_retries(self, nodes, connections): + """Method to generalize making the same connections to try and + retry nodes. + + For each 3-tuple (``conn``) in ``connections``, will do + ``wf.connect(conn[0], node, conn[1], conn[2])`` for each ``node`` + in ``nodes`` + + Parameters + ---------- + nodes : iterable of Nodes + + connections : iterable of 3-tuples of (Node, str or tuple, str) + """ + for node in nodes: + for conn in connections: + self.connect(conn[0], node, conn[1], conn[2]) + def _get_dot( self, prefix=None, hierarchy=None, colored=False, simple_form=True, level=0 diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index bb1e78541c..69d7a4bc20 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -33,36 +33,6 @@ Registration: {'reference': 'reference', 'registered': 'out_file'}} -def connect_retries(wf, nodes, connections): - """Function to generalize making the same connections to try and - retry nodes. - - For each 3-tuple (``conn``) in ``connections``, will do - - .. code-block:: Python - - wf.connect(conn[0], node, conn[1], conn[2]) - - for each node in nodes - - Parameters - ---------- - wf : Workflow - - nodes : iterable of Nodes - - connections : iterable of 3-tuples of (Node, str or tuple, str) - - Returns - ------- - Workflow - """ - for node in nodes: - for conn in connections: - wf.connect(conn[0], node, conn[1], conn[2]) - return wf - - def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', output_key: str = 'registered', guardrail_node: 'Node' = None) -> Node: diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 13cd7ddfad..b3b3b77d70 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -25,8 +25,7 @@ from CPAC.anat_preproc.lesion_preproc import create_lesion_preproc from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline.random_state.seed import increment_seed -from CPAC.registration.guardrails import connect_retries, \ - guardrail_selection, \ +from CPAC.registration.guardrails import guardrail_selection, \ registration_guardrail_node, \ retry_clone from CPAC.registration.utils import seperate_warps_list, \ @@ -343,7 +342,7 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): name='inv_linear_reg0_xfm') inv_flirt_xfm.inputs.invert_xfm = True - linear_register = connect_retries(linear_register, nodes, [ + linear_register.connect_retries(nodes, [ (inputspec, 'input_brain', 'in_file'), (inputspec, 'reference_brain', 'reference'), (inputspec, 'interp', 'interp')]) @@ -442,7 +441,7 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): nonlinear_register.connect(node, 'warped_file', guardrails[i], 'registered') - nonlinear_register = connect_retries(nonlinear_register, nodes, [ + nonlinear_register.connect_retries(nodes, [ (inputspec, 'input_skull', 'in_file'), (inputspec, 'reference_skull', 'ref_file'), (inputspec, 'ref_mask', 'refmask_file'), @@ -1006,20 +1005,18 @@ def bbreg_args(bbreg_target): f'{retry_bbreg_func_to_anat.name}_guardrail') nodes += [retry_bbreg_func_to_anat] guardrails += [guardrail_retry_bbreg_func_to_anat] - register_bbregister_func_to_anat = connect_retries( - register_bbregister_func_to_anat, nodes, [ - (inputspec, 'bbr_schedule', 'schedule'), - (wm_bb_mask, ('out_file', bbreg_args), 'args'), - (inputspec, 'func', 'in_file'), - (inputspec, 'anat', 'reference'), - (inputspec, 'linear_reg_matrix', 'in_matrix_file')]) + register_bbregister_func_to_anat.connect_retries(nodes, [ + (inputspec, 'bbr_schedule', 'schedule'), + (wm_bb_mask, ('out_file', bbreg_args), 'args'), + (inputspec, 'func', 'in_file'), + (inputspec, 'anat', 'reference'), + (inputspec, 'linear_reg_matrix', 'in_matrix_file')]) if phase_diff_distcor: - register_bbregister_func_to_anat = connect_retries( - register_bbregister_func_to_anat, nodes, [ - (inputNode_pedir, ('pedir', convert_pedir), 'pedir'), - (inputspec, 'fieldmap', 'fieldmap'), - (inputspec, 'fieldmapmask', 'fieldmapmask'), - (inputNode_echospacing, 'echospacing', 'echospacing')]) + register_bbregister_func_to_anat.connect_retries(nodes, [ + (inputNode_pedir, ('pedir', convert_pedir), 'pedir'), + (inputspec, 'fieldmap', 'fieldmap'), + (inputspec, 'fieldmapmask', 'fieldmapmask'), + (inputNode_echospacing, 'echospacing', 'echospacing')]) for i, node in enumerate(nodes): register_bbregister_func_to_anat.connect(inputspec, 'anat', guardrails[i], 'reference') From f2b9115aa2504b454b5d9aa6a646e04de43d7668 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 19:21:02 +0000 Subject: [PATCH 44/72] :goal_net: Guardrail `create_fsl_fnirt_nonlinear_reg_nhp` --- CPAC/registration/guardrails.py | 20 ++++ CPAC/registration/registration.py | 154 ++++++++++-------------------- 2 files changed, 69 insertions(+), 105 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 69d7a4bc20..68eaa4d883 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -134,6 +134,26 @@ def registration_guardrail(registered: str, reference: str, return registered, failed_qc +def nodes_and_guardrails(*nodes): + """Returns a two tuples of Nodes: (try, retry) and their + respective guardrails + + Parameters + ---------- + nodes : any number of Nodes + + Returns + ------- + nodes : tuple of Nodes + + guardrails : tuple of Nodes + """ + guardrails = [] + for i, node in nodes: + guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', + i) + return tuple(nodes), tuple(guardrails) + def registration_guardrail_node(name=None, retry_num=0): """Convenience method to get a new registration_guardrail Node diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b3b3b77d70..541fefe0ac 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -16,8 +16,6 @@ # License along with C-PAC. If not, see . """Registration functions""" # pylint: disable=too-many-lines,ungrouped-imports,wrong-import-order -# TODO: replace Tuple with tuple, Union with |, once Python >= 3.9, 3.10 -from sqlite3 import connect from typing import Optional, Tuple, Union from CPAC.pipeline import nipype_pipeline_engine as pe from nipype.interfaces import afni, ants, c3, fsl, utility as util @@ -26,6 +24,7 @@ from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline.random_state.seed import increment_seed from CPAC.registration.guardrails import guardrail_selection, \ + nodes_and_guardrails, \ registration_guardrail_node, \ retry_clone from CPAC.registration.utils import seperate_warps_list, \ @@ -330,10 +329,8 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' retry_reg = retry_clone(linear_reg) - nodes = (linear_reg, retry_reg) - guardrails = [] + nodes, guardrails = nodes_and_guardrails(linear_reg, retry_reg) for i, node in enumerate(nodes): - guardrails[i] = registration_guardrail_node(node.name, i) linear_register.connect(inputspec, 'reference_brain', guardrails[i], 'reference') linear_register.connect(node, 'out_file', guardrails[i], 'registered') @@ -432,7 +429,7 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True retry_reg = retry_clone(nonlinear_reg) - nodes = [nonlinear_reg, retry_reg] + nodes, guardrails = nodes_and_guardrails(nonlinear_reg, retry_reg) guardrails = [] for i, node in enumerate(nodes): guardrails[i] = registration_guardrail_node(f'{node.name}_guardrail') @@ -440,7 +437,6 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): guardrails[i], 'reference') nonlinear_register.connect(node, 'warped_file', guardrails[i], 'registered') - nonlinear_register.connect_retries(nodes, [ (inputspec, 'input_skull', 'in_file'), (inputspec, 'reference_skull', 'ref_file'), @@ -542,99 +538,57 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): nonlinear_reg = pe.Node(interface=fsl.FNIRT(), name='nonlinear_reg_1') - nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True nonlinear_reg.inputs.field_file = True + retry_reg = retry_clone(nonlinear_reg) + nodes, guardrails = nodes_and_guardrails(nonlinear_reg, retry_reg) + fieldcoeff_file = guardrail_selection(nonlinear_register, *nodes, + 'fieldcoeff_file', guardrails[0]) + field_file = guardrail_selection(nonlinear_register, *nodes, 'field_file', + guardrails[0]) - nonlinear_register.connect(inputspec, 'input_skull', - nonlinear_reg, 'in_file') - - nonlinear_register.connect(inputspec, 'reference_skull', - nonlinear_reg, 'ref_file') - - nonlinear_register.connect(inputspec, 'ref_mask', - nonlinear_reg, 'refmask_file') - - nonlinear_register.connect(inputspec, 'fnirt_config', - nonlinear_reg, 'config_file') - - nonlinear_register.connect(inputspec, 'linear_aff', - nonlinear_reg, 'affine_file') - - brain_warp = pe.Node(interface=fsl.ApplyWarp(), - name='brain_warp') + brain_warp = pe.Node(interface=fsl.ApplyWarp(), name='brain_warp') brain_warp.inputs.interp = 'nn' brain_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - brain_warp, 'in_file') - - nonlinear_register.connect(nonlinear_reg, 'field_file', - brain_warp, 'field_file') - - nonlinear_register.connect(inputspec, 'reference_skull', - brain_warp, 'ref_file') - - head_warp = pe.Node(interface=fsl.ApplyWarp(), - name='head_warp') + head_warp = pe.Node(interface=fsl.ApplyWarp(), name='head_warp') head_warp.inputs.interp = 'spline' head_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - head_warp, 'in_file') - - nonlinear_register.connect(nonlinear_reg, 'field_file', - head_warp, 'field_file') - - nonlinear_register.connect(inputspec, 'reference_skull', - head_warp, 'ref_file') - - mask_warp = pe.Node(interface=fsl.ApplyWarp(), - name='mask_warp') + mask_warp = pe.Node(interface=fsl.ApplyWarp(), name='mask_warp') mask_warp.inputs.interp = 'nn' mask_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - mask_warp, 'in_file') - - nonlinear_register.connect(nonlinear_reg, 'field_file', - mask_warp, 'field_file') - - nonlinear_register.connect(inputspec, 'reference_skull', - mask_warp, 'ref_file') - - biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), - name='biasfield_warp') + biasfield_warp = pe.Node(interface=fsl.ApplyWarp(), name='biasfield_warp') biasfield_warp.inputs.interp = 'spline' biasfield_warp.inputs.relwarp = True - nonlinear_register.connect(inputspec, 'input_brain', - biasfield_warp, 'in_file') - - nonlinear_register.connect(nonlinear_reg, 'field_file', - biasfield_warp, 'field_file') - - nonlinear_register.connect(inputspec, 'reference_skull', - biasfield_warp, 'ref_file') - - nonlinear_register.connect(nonlinear_reg, 'fieldcoeff_file', - outputspec, 'nonlinear_xfm') - - nonlinear_register.connect(nonlinear_reg, 'field_file', - outputspec, 'nonlinear_warp') - - nonlinear_register.connect(brain_warp, 'out_file', - outputspec, 'output_brain') - - nonlinear_register.connect(head_warp, 'out_file', - outputspec, 'output_head') - - nonlinear_register.connect(mask_warp, 'out_file', - outputspec, 'output_mask') - - nonlinear_register.connect(biasfield_warp, 'out_file', - outputspec, 'output_biasfield') + nonlinear_register.connect_retries(nodes, [ + (inputspec, 'input_skull', 'in_file'), + (inputspec, 'reference_skull', 'ref_file'), + (inputspec, 'ref_mask', 'refmask_file'), + (inputspec, 'fnirt_config', 'config_file'), + (inputspec, 'linear_aff', 'affine_file')]) + nonlinear_register.connect([ + (inputspec, brain_warp, [('input_brain', 'in_file'), + ('reference_skull', 'ref_file')]), + (field_file, brain_warp, [('out', 'field_file')]), + (inputspec, head_warp, [('input_brain', 'in_file'), + ('reference_skull', 'ref_file')]), + (field_file, head_warp, [('out', 'field_file')]), + (inputspec, mask_warp, [('input_brain', 'in_file'), + ('reference_skull', 'ref_file')]), + (field_file, mask_warp, [('out', 'field_file')]), + (inputspec, biasfield_warp, [('input_brain', 'in_file'), + ('reference_skull', 'ref_file')]), + (field_file, biasfield_warp, [('out', 'field_file')]), + (fieldcoeff_file, outputspec, [('out', 'fieldcoeff_file')]), + (field_file, outputspec, [('out', 'nonlinear_warp')]), + (brain_warp, outputspec, [('out_file', 'output_brain')]), + (head_warp, outputspec, [('out_file', 'output_head')]), + (mask_warp, outputspec, [('out_file', 'output_mask')]), + (biasfield_warp, outputspec, [('out_file', 'output_biasfield')])]) return nonlinear_register @@ -802,16 +756,13 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] retry_linear_reg_func_to_t2 = retry_clone(linear_reg_func_to_t2) - - guardrails = [] - for i, node in enumerate(linear_reg_func_to_t2, - retry_linear_reg_func_to_t2): + nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t2, + retry_linear_reg_func_to_t2) + for i, node in enumerate(nodes): register_func_to_anat_use_T2.connect(inputspec, 'func', node, 'in_file') register_func_to_anat_use_T2.connect(inputspec, 'T2_head', node, 'reference') - guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', - i) register_func_to_anat_use_T2.connect(node, 'out_file', guardrails[i], 'registered') register_func_to_anat_use_T2.connect(inputspec, 'T2_head', @@ -868,16 +819,13 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] retry_linear_reg_func_to_t1 = retry_clone(linear_reg_func_to_t1) - - guardrails = [] - for i, node in enumerate(linear_reg_func_to_t1, - retry_linear_reg_func_to_t1): + nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t1, + retry_linear_reg_func_to_t1) + for i, node in enumerate(nodes): register_func_to_anat_use_T2.connect(func_brain, 'out_file', node, 'in_file') register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', node, 'reference') - guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', - i) register_func_to_anat_use_T2.connect(node, 'out_file', guardrails[i], 'registered') register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', @@ -995,16 +943,12 @@ def bbreg_args(bbreg_target): bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name='bbreg_func_to_anat') bbreg_func_to_anat.inputs.dof = 6 - guardrail_bbreg_func_to_anat = registration_guardrail_node( - f'{bbreg_func_to_anat.name}_guardrail') - nodes = [bbreg_func_to_anat] - guardrails = [guardrail_bbreg_func_to_anat] + nodes, guardrails = nodes_and_guardrails(bbreg_func_to_anat) if retry: - retry_bbreg_func_to_anat = retry_clone(bbreg_func_to_anat) - guardrail_retry_bbreg_func_to_anat = registration_guardrail_node( - f'{retry_bbreg_func_to_anat.name}_guardrail') - nodes += [retry_bbreg_func_to_anat] - guardrails += [guardrail_retry_bbreg_func_to_anat] + retry_nodes, retry_guardrails = nodes_and_guardrails( + retry_clone(bbreg_func_to_anat)) + nodes += retry_nodes + guardrails += retry_guardrails register_bbregister_func_to_anat.connect_retries(nodes, [ (inputspec, 'bbr_schedule', 'schedule'), (wm_bb_mask, ('out_file', bbreg_args), 'args'), From fb90455543a7b522af54e08482f4908dc32d65b1 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 19:58:36 +0000 Subject: [PATCH 45/72] :recycle: Refactor guardrails for `create_wf_calculate_ants_warp` --- .../pipeline/nipype_pipeline_engine/engine.py | 18 ++- CPAC/registration/guardrails.py | 5 +- CPAC/registration/registration.py | 108 +++++------------- 3 files changed, 50 insertions(+), 81 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 21c7a237c8..f2cfc49b45 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -511,7 +511,7 @@ def connect_retries(self, nodes, connections): retry nodes. For each 3-tuple (``conn``) in ``connections``, will do - ``wf.connect(conn[0], node, conn[1], conn[2])`` for each ``node`` + ``wf.connect(conn[0], conn[1], node, conn[2])`` for each ``node`` in ``nodes`` Parameters @@ -520,9 +520,23 @@ def connect_retries(self, nodes, connections): connections : iterable of 3-tuples of (Node, str or tuple, str) """ + wrong_conn_type_msg = (r'connect_retries `connections` argument ' + 'must be an iterable of (Node, str or ' + 'tuple, str) tuples.') + if not isinstance(connections, (list, tuple)): + raise TypeError(f'{wrong_conn_type_msg}: Given {connections}') for node in nodes: + if not isinstance(node, Node): + raise TypeError('connect_retries requires an iterable ' + r'of nodes for the `nodes` parameter: ' + f'Given {node}') for conn in connections: - self.connect(conn[0], node, conn[1], conn[2]) + if not all((isinstance(conn, (list, tuple)), len(conn) == 3, + isinstance(conn[0], Node), + isinstance(conn[1], (tuple, str)), + isinstance(conn[2], str))): + raise TypeError(f'{wrong_conn_type_msg}: Given {conn}') + self.connect(*conn[:2], node, conn[2]) def _get_dot( self, prefix=None, hierarchy=None, colored=False, simple_form=True, diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 68eaa4d883..42337f7e6a 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -148,12 +148,13 @@ def nodes_and_guardrails(*nodes): guardrails : tuple of Nodes """ - guardrails = [] - for i, node in nodes: + guardrails = [None] * len(nodes) + for i, node in enumerate(nodes): guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', i) return tuple(nodes), tuple(guardrails) + def registration_guardrail_node(name=None, retry_num=0): """Convenience method to get a new registration_guardrail Node diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 541fefe0ac..199ce0518f 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -1162,132 +1162,86 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', 'retry_docstring']), name='retry_calc_ants_warp', mem_gb=2.8, mem_x=(2e-7, 'moving_brain', 'xyz'))) - guardrails = tuple(registration_guardrail_node( - f'{_try}{name}_guardrail', i) for i, _try in enumerate(('', 'retry_'))) - + nodes, guardrails = nodes_and_guardrails(calculate_ants_warp, + retry_calculate_ants_warp) calculate_ants_warp.interface.num_threads = num_threads retry_calculate_ants_warp.interface.num_threads = num_threads + select_forward_initial = pe.Node(util.Function( input_names=['warp_list', 'selection'], output_names=['selected_warp'], function=seperate_warps_list), name='select_forward_initial') - select_forward_initial.inputs.selection = "Initial" select_forward_rigid = pe.Node(util.Function( input_names=['warp_list', 'selection'], output_names=['selected_warp'], function=seperate_warps_list), name='select_forward_rigid') - select_forward_rigid.inputs.selection = "Rigid" select_forward_affine = pe.Node(util.Function( input_names=['warp_list', 'selection'], output_names=['selected_warp'], function=seperate_warps_list), name='select_forward_affine') - select_forward_affine.inputs.selection = "Affine" select_forward_warp = pe.Node(util.Function( input_names=['warp_list', 'selection'], output_names=['selected_warp'], function=seperate_warps_list), name='select_forward_warp') - select_forward_warp.inputs.selection = "Warp" select_inverse_warp = pe.Node(util.Function( input_names=['warp_list', 'selection'], output_names=['selected_warp'], function=seperate_warps_list), name='select_inverse_warp') - select_inverse_warp.inputs.selection = "Inverse" - calc_ants_warp_wf.connect(inputspec, 'moving_brain', - calculate_ants_warp, 'moving_brain') - calc_ants_warp_wf.connect(inputspec, 'reference_brain', - calculate_ants_warp, 'reference_brain') - calc_ants_warp_wf.connect(inputspec, 'moving_brain', - retry_calculate_ants_warp, 'moving_brain') - calc_ants_warp_wf.connect(inputspec, 'reference_brain', - retry_calculate_ants_warp, 'reference_brain') + calc_ants_warp_wf.connect_retries(nodes, [ + (inputspec, 'moving_brain', 'moving_brain'), + (inputspec, 'reference_brain', 'reference_brain')]) if reg_ants_skull == 1: calculate_ants_warp.inputs.reg_with_skull = 1 retry_calculate_ants_warp.inputs.reg_with_skull = 1 - - calc_ants_warp_wf.connect(inputspec, 'moving_skull', - calculate_ants_warp, 'moving_skull') - calc_ants_warp_wf.connect(inputspec, 'reference_skull', - calculate_ants_warp, 'reference_skull') - calc_ants_warp_wf.connect(inputspec, 'moving_skull', - retry_calculate_ants_warp, 'moving_skull') - calc_ants_warp_wf.connect(inputspec, 'reference_skull', - retry_calculate_ants_warp, 'reference_skull') - for guardrail in guardrails: - calc_ants_warp_wf.connect(inputspec, 'reference_skull', - guardrail, 'reference') + calc_ants_warp_wf.connect_retries(nodes, [ + (inputspec, 'moving_skull', 'moving_skull'), + (inputspec, 'reference_skull', 'reference_skull')]) + calc_ants_warp_wf.connect_retries(guardrails, [ + (inputspec, 'reference_skull', 'reference')]) else: - calc_ants_warp_wf.connect(inputspec, 'moving_brain', - calculate_ants_warp, 'moving_skull') - calc_ants_warp_wf.connect(inputspec, 'reference_brain', - calculate_ants_warp, 'reference_skull') - calc_ants_warp_wf.connect(inputspec, 'moving_brain', - retry_calculate_ants_warp, 'moving_skull') - calc_ants_warp_wf.connect(inputspec, 'reference_brain', - retry_calculate_ants_warp, 'reference_skull') - for guardrail in guardrails: - calc_ants_warp_wf.connect(inputspec, 'reference_brain', - guardrail, 'reference') - - calc_ants_warp_wf.connect(inputspec, 'fixed_image_mask', - calculate_ants_warp, 'fixed_image_mask') - calc_ants_warp_wf.connect(inputspec, 'reference_mask', - calculate_ants_warp, 'reference_mask') - calc_ants_warp_wf.connect(inputspec, 'moving_mask', - calculate_ants_warp, 'moving_mask') - calc_ants_warp_wf.connect(inputspec, 'ants_para', - calculate_ants_warp, 'ants_para') - calc_ants_warp_wf.connect(inputspec, 'interp', - calculate_ants_warp, 'interp') - calc_ants_warp_wf.connect(inputspec, 'fixed_image_mask', - retry_calculate_ants_warp, 'fixed_image_mask') - calc_ants_warp_wf.connect(inputspec, 'reference_mask', - retry_calculate_ants_warp, 'reference_mask') - calc_ants_warp_wf.connect(inputspec, 'moving_mask', - retry_calculate_ants_warp, 'moving_mask') - calc_ants_warp_wf.connect(inputspec, 'ants_para', - retry_calculate_ants_warp, 'ants_para') - calc_ants_warp_wf.connect(inputspec, 'interp', - retry_calculate_ants_warp, 'interp') + calc_ants_warp_wf.connect_retries(nodes, [ + (inputspec, 'moving_brain', 'moving_skull'), + (inputspec, 'reference_brain', 'reference_skull')]) + calc_ants_warp_wf.connect_retries(guardrails, [ + (inputspec, 'reference_brain', 'reference')]) + + calc_ants_warp_wf.connect_retries(nodes, [ + (inputspec, 'fixed_image_mask', 'fixed_image_mask'), + (inputspec, 'reference_mask', 'reference_mask'), + (inputspec, 'moving_mask', 'moving_mask'), + (inputspec, 'ants_para', 'ants_para'), + (inputspec, 'interp', 'interp')]) # inter-workflow connections calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', guardrails[0], 'registered') calc_ants_warp_wf.connect(retry_calculate_ants_warp, 'warped_image', guardrails[1], 'registered') + # pylint: disable=no-value-for-parameter select = guardrail_selection(calc_ants_warp_wf, *guardrails) - warp_list_choices = pe.Node(util.Merge(2), run_without_submitting=True, - name=f'{name}_warplist_choices') - choose_warp_list = pe.Node(util.Select(), run_without_submitting=True, - name=f'choose_{name}') - calc_ants_warp_wf.connect(calculate_ants_warp, 'warp_list', - warp_list_choices, 'in1') - calc_ants_warp_wf.connect(retry_calculate_ants_warp, 'warp_list', - warp_list_choices, 'in2') - calc_ants_warp_wf.connect(warp_list_choices, 'out', - choose_warp_list, 'inlist') - calc_ants_warp_wf.connect(guardrails[0], 'failed_qc', - choose_warp_list, 'index') + warp_list = guardrail_selection(calc_ants_warp_wf, *nodes, 'warp_list', + guardrails[0]) calc_ants_warp_wf.connect(guardrails[0], 'failed_qc', retry_calculate_ants_warp, 'previous_failure') - calc_ants_warp_wf.connect(choose_warp_list, 'out', + calc_ants_warp_wf.connect(warp_list, 'out', select_forward_initial, 'warp_list') - calc_ants_warp_wf.connect(choose_warp_list, 'out', + calc_ants_warp_wf.connect(warp_list, 'out', select_forward_rigid, 'warp_list') - calc_ants_warp_wf.connect(choose_warp_list, 'out', + calc_ants_warp_wf.connect(warp_list, 'out', select_forward_affine, 'warp_list') - calc_ants_warp_wf.connect(choose_warp_list, 'out', + calc_ants_warp_wf.connect(warp_list, 'out', select_forward_warp, 'warp_list') - calc_ants_warp_wf.connect(choose_warp_list, 'out', + calc_ants_warp_wf.connect(warp_list, 'out', select_inverse_warp, 'warp_list') # connections to outputspec calc_ants_warp_wf.connect(select_forward_initial, 'selected_warp', From c1673842728b791f6feb0142dc94dc776449457c Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Tue, 25 Oct 2022 20:22:28 +0000 Subject: [PATCH 46/72] :recycle: Refactor guardrail for `create_register_func_to_anat` --- .../pipeline/nipype_pipeline_engine/engine.py | 7 +- CPAC/registration/guardrails.py | 2 +- CPAC/registration/registration.py | 109 +++++++++--------- CPAC/registration/utils.py | 22 ++-- 4 files changed, 74 insertions(+), 66 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index f2cfc49b45..9f18969a39 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -50,8 +50,9 @@ import os import re from copy import deepcopy -from logging import getLogger from inspect import Parameter, Signature, signature +from logging import getLogger +from typing import Iterable, Tuple, Union from nibabel import load from nipype import logging from nipype.interfaces.utility import Function @@ -506,7 +507,9 @@ def _configure_exec_nodes(self, graph): TypeError): self._handle_just_in_time_exception(node) - def connect_retries(self, nodes, connections): + def connect_retries(self, nodes: Iterable['Node'], + connections: Iterable[Tuple['Node', Union[str, tuple], + str]]) -> None: """Method to generalize making the same connections to try and retry nodes. diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 42337f7e6a..ae677a25a5 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -16,7 +16,7 @@ # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" import logging -from typing import Tuple +from typing import Tuple, Union from copy import deepcopy from nipype.interfaces.ants import Registration from nipype.interfaces.fsl import FLIRT diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 199ce0518f..241b4bab42 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -328,8 +328,8 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' - retry_reg = retry_clone(linear_reg) - nodes, guardrails = nodes_and_guardrails(linear_reg, retry_reg) + nodes, guardrails = nodes_and_guardrails(linear_reg, + retry_clone(linear_reg)) for i, node in enumerate(nodes): linear_register.connect(inputspec, 'reference_brain', guardrails[i], 'reference') @@ -428,11 +428,9 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): name='nonlinear_reg_1') nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True - retry_reg = retry_clone(nonlinear_reg) - nodes, guardrails = nodes_and_guardrails(nonlinear_reg, retry_reg) - guardrails = [] + nodes, guardrails = nodes_and_guardrails(nonlinear_reg, + retry_clone(nonlinear_reg)) for i, node in enumerate(nodes): - guardrails[i] = registration_guardrail_node(f'{node.name}_guardrail') nonlinear_register.connect(inputspec, 'reference_skull', guardrails[i], 'reference') nonlinear_register.connect(node, 'warped_file', @@ -541,8 +539,8 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True nonlinear_reg.inputs.field_file = True - retry_reg = retry_clone(nonlinear_reg) - nodes, guardrails = nodes_and_guardrails(nonlinear_reg, retry_reg) + nodes, guardrails = nodes_and_guardrails(nonlinear_reg, + retry_clone(nonlinear_reg)) fieldcoeff_file = guardrail_selection(nonlinear_register, *nodes, 'fieldcoeff_file', guardrails[0]) field_file = guardrail_selection(nonlinear_register, *nodes, 'field_file', @@ -656,8 +654,6 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, name='outputspec') linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_func_to_anat') - guardrail = registration_guardrail_node(f'{name}_guardrail') - linear_reg.inputs.interp = config.registration_workflows[ 'functional_registration']['coregistration']['interpolation'] linear_reg.inputs.cost = config.registration_workflows[ @@ -668,29 +664,31 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, 'coregistration']['arguments'] is not None: linear_reg.inputs.args = config.registration_workflows[ 'functional_registration']['coregistration']['arguments'] + nodes, guardrails = nodes_and_guardrails(linear_reg, + retry_clone(linear_reg)) if phase_diff_distcor: - register_func_to_anat.connect( - inputNode_pedir, ('pedir', convert_pedir), - linear_reg, 'pedir') - register_func_to_anat.connect(inputspec, 'fieldmap', - linear_reg, 'fieldmap') - register_func_to_anat.connect(inputspec, 'fieldmapmask', - linear_reg, 'fieldmapmask') - register_func_to_anat.connect(inputNode_echospacing, 'echospacing', - linear_reg, 'echospacing') - - register_func_to_anat.connect(inputspec, 'func', linear_reg, 'in_file') - register_func_to_anat.connect(inputspec, 'anat', linear_reg, 'reference') - register_func_to_anat.connect(inputspec, 'anat', guardrail, 'reference') - register_func_to_anat.connect(inputspec, 'dof', linear_reg, 'dof') - register_func_to_anat.connect(inputspec, 'interp', linear_reg, 'interp') - register_func_to_anat.connect(linear_reg, 'out_matrix_file', - outputspec, 'func_to_anat_linear_xfm_' - 'nobbreg') - register_func_to_anat.connect(linear_reg, 'out_file', - guardrail, 'registered') - register_func_to_anat.connect(guardrail, 'registered', + register_func_to_anat.connect_retries(nodes, [ + (inputNode_pedir, ('pedir', convert_pedir), 'pedir'), + (inputspec, 'fieldmap', 'fieldmap'), + (inputspec, 'fieldmapmask', 'fieldmapmask'), + (inputNode_echospacing, 'echospacing', 'echospacing')]) + + register_func_to_anat.connect_retries(nodes, [ + (inputspec, 'func', 'in_file'), + (inputspec, 'anat', 'reference'), + (inputspec, 'dof', 'dof'), + (inputspec, 'interp', 'interp')]) + register_func_to_anat.connect_retries(guardrails, [ + (inputspec, 'anat', 'reference')]) + select_matrix = guardrail_selection(register_func_to_anat, *nodes, + 'out_matrix_file', guardrails[0]) + register_func_to_anat.connect( + select_matrix, 'out', + outputspec, 'func_to_anat_linear_xfm_nobbreg') + # pylint: disable=no-value-for-parameter + select_reg = guardrail_selection(register_func_to_anat, *guardrails) + register_func_to_anat.connect(select_reg, 'out', outputspec, 'anat_func_nobbreg') return register_func_to_anat @@ -1330,8 +1328,10 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, } if opt == 'FSL': - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: + if cfg.registration_workflows['anatomical_registration'][ + 'registration']['FSL-FNIRT']['ref_resolution'] == \ + cfg.registration_workflows['anatomical_registration'][ + 'resolution_for_anat']: fnirt_reg_anat_mni = create_fsl_fnirt_nonlinear_reg( f'anat_mni_fnirt_register{symm}' ) @@ -1357,8 +1357,10 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, wf.connect(inputNode, 'fnirt_config', fnirt_reg_anat_mni, 'inputspec.fnirt_config') - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: + if cfg.registration_workflows['anatomical_registration'][ + 'registration']['FSL-FNIRT']['ref_resolution'] == \ + cfg.registration_workflows['anatomical_registration'][ + 'resolution_for_anat']: # NOTE: this is an UPDATE because of the opt block above added_outputs = { f'space-{sym}{tmpl}template_desc-brain_{orig}': ( @@ -1385,7 +1387,7 @@ def FSL_registration_connector(wf_name, cfg, orig="T1w", opt=None, } outputs.update(added_outputs) - return (wf, outputs) + return wf, outputs def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", @@ -1456,7 +1458,7 @@ def ANTs_registration_connector(wf_name, cfg, params, orig="T1w", if orig == 'T1w': if cfg.registration_workflows['anatomical_registration'][ - 'registration']['ANTs']['use_lesion_mask']: + 'registration']['ANTs']['use_lesion_mask']: # Create lesion preproc node to apply afni Refit and Resample lesion_preproc = create_lesion_preproc( wf_name=f'lesion_preproc{symm}' @@ -1866,8 +1868,10 @@ def register_FSL_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None): node, out = connect wf.connect(node, out, fsl, 'inputspec.input_brain') - if cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['ref_resolution'] == \ - cfg.registration_workflows['anatomical_registration']['resolution_for_anat']: + if cfg.registration_workflows['anatomical_registration']['registration'][ + 'FSL-FNIRT']['ref_resolution'] == \ + cfg.registration_workflows['anatomical_registration'][ + 'resolution_for_anat']: node, out = strat_pool.get_data('T1w-brain-template') wf.connect(node, out, fsl, 'inputspec.reference_brain') @@ -2551,13 +2555,15 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): get_func_volume.inputs.set( expr='a', - single_idx=cfg.registration_workflows['functional_registration']['coregistration'][ - 'func_input_prep']['Selected Functional Volume']['func_reg_input_volume'], + single_idx=cfg.registration_workflows['functional_registration'][ + 'coregistration'][ + 'func_input_prep']['Selected Functional Volume'][ + 'func_reg_input_volume'], outputtype='NIFTI_GZ' ) if not cfg.registration_workflows['functional_registration'][ - 'coregistration']['func_input_prep']['reg_with_skull']: + 'coregistration']['func_input_prep']['reg_with_skull']: node, out = strat_pool.get_data("desc-brain_bold") else: # TODO check which file is functional_skull_leaf @@ -2568,9 +2574,7 @@ def coregistration_prep_vol(wf, cfg, strat_pool, pipe_num, opt=None): coreg_input = (get_func_volume, 'out_file') - outputs = { - 'desc-reginput_bold': coreg_input - } + outputs = {'desc-reginput_bold': coreg_input} return (wf, outputs) @@ -2594,10 +2598,9 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): 'coregistration']['func_input_prep']['Mean Functional'][ 'n4_correct_func']: n4_correct_func = pe.Node( - interface= - ants.N4BiasFieldCorrection(dimension=3, - copy_header=True, - bspline_fitting_distance=200), + interface=ants.N4BiasFieldCorrection(dimension=3, + copy_header=True, + bspline_fitting_distance=200), shrink_factor=2, name=f'func_mean_n4_corrected_{pipe_num}') n4_correct_func.inputs.args = '-r True' @@ -2607,9 +2610,7 @@ def coregistration_prep_mean(wf, cfg, strat_pool, pipe_num, opt=None): coreg_input = (n4_correct_func, 'output_image') - outputs = { - 'desc-reginput_bold': coreg_input - } + outputs = {'desc-reginput_bold': coreg_input} return (wf, outputs) @@ -2628,9 +2629,7 @@ def coregistration_prep_fmriprep(wf, cfg, strat_pool, pipe_num, opt=None): coreg_input = strat_pool.get_data("desc-ref_bold") - outputs = { - 'desc-reginput_bold': coreg_input - } + outputs = {'desc-reginput_bold': coreg_input} return (wf, outputs) diff --git a/CPAC/registration/utils.py b/CPAC/registration/utils.py index b625895233..62c884989d 100644 --- a/CPAC/registration/utils.py +++ b/CPAC/registration/utils.py @@ -446,7 +446,8 @@ def hardcoded_reg(moving_brain, reference_brain, moving_skull, f.write(' '.join(regcmd)) try: - subprocess.check_output(regcmd) + # pylint: disable=unused-variable + retcode = subprocess.check_output(regcmd) # noqa: F841 except BadRegistrationError as bad_registration: raise bad_registration except Exception as e: @@ -551,7 +552,8 @@ def run_ants_apply_warp(moving_image, reference, initial=None, rigid=None, # to be converted to ITK format via c3d_affine_tool cmd = ['c3d_affine_tool', '-ref', anatomical_brain, '-src', moving_image, func_to_anat, '-fsl2ras', '-oitk', 'affine.txt'] - retcode = subprocess.check_output(cmd) + # pylint: disable=unused-variable + retcode = subprocess.check_output(cmd) # noqa: F841 func_to_anat = change_itk_transform_type(os.path.join(os.getcwd(), 'affine.txt')) @@ -597,7 +599,8 @@ def run_ants_apply_warp(moving_image, reference, initial=None, rigid=None, else: cmd.append(os.path.abspath(func_to_anat)) - retcode = subprocess.check_output(cmd) + # pylint: disable=unused-variable + retcode = subprocess.check_output(cmd) # noqa: F841 return out_image @@ -618,16 +621,19 @@ def cpac_ants_apply_nonlinear_inverse_warp(cpac_dir, moving_image, reference, # run_ants_apply_warp() -def run_c3d(reference_file, source_file, transform_file): - +def run_c3d(reference_file, source_file, transform_file, + retry=False, previous_failure=False): + # pylint: disable=redefined-outer-name,reimported import os import subprocess itk_transform = os.path.join(os.getcwd(), 'affine.txt') - cmd = ['c3d_affine_tool', '-ref', reference_file, '-src', - source_file, transform_file, '-fsl2ras', '-oitk', itk_transform] - retcode = subprocess.check_output(cmd) + if (not retry) or previous_failure: + cmd = ['c3d_affine_tool', '-ref', reference_file, '-src', + source_file, transform_file, '-fsl2ras', '-oitk', itk_transform] + # pylint: disable=unused-variable + retcode = subprocess.check_output(cmd) # noqa: F841 return itk_transform From fd5e27d32bb25bcd726073c3f9da1d577fcb5c0d Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 26 Oct 2022 16:19:51 +0000 Subject: [PATCH 47/72] :goal_net: Guardrail `acpc_alignment` --- CPAC/anat_preproc/anat_preproc.py | 34 +++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 72e5bd873e..9cbe430e9e 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -32,7 +32,9 @@ wb_command, \ fslmaths_command, \ VolumeRemoveIslands -from CPAC.registration.guardrails import registration_guardrail_workflow +from CPAC.registration.guardrails import guardrail_selection, \ + nodes_and_guardrails, \ + retry_clone from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.interfaces.function.seg_preproc import \ pick_tissue_from_labels_file_interface @@ -59,8 +61,9 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, name='outputspec') if config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'robustfov': robust_fov = pe.Node(interface=fsl_utils.RobustFOV(), - name='anat_acpc_1_robustfov') - robust_fov.inputs.brainsize = config.anatomical_preproc['acpc_alignment']['brain_size'] + name='anat_acpc_1_robustfov') + robust_fov.inputs.brainsize = config.anatomical_preproc[ + 'acpc_alignment']['brain_size'] robust_fov.inputs.out_transform = 'fov_xfm.mat' fov, in_file = (robust_fov, 'in_file') @@ -71,8 +74,7 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, # robustfov doesn't work on some monkey data. prefer using flirt. # ${FSLDIR}/bin/flirt -in "${Input}" -applyxfm -ref "${Input}" -omat "$WD"/roi2full.mat -out "$WD"/robustroi.nii.gz # adopted from DCAN NHP https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/ACPCAlignment.sh#L80-L81 - flirt_fov = pe.Node(interface=fsl.FLIRT(), - name='anat_acpc_1_fov') + flirt_fov = pe.Node(interface=fsl.FLIRT(), name='anat_acpc_1_fov') flirt_fov.inputs.args = '-applyxfm' fov, in_file = (flirt_fov, 'in_file') @@ -105,25 +107,35 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, align.inputs.searchr_x = [30, 30] align.inputs.searchr_y = [30, 30] align.inputs.searchr_z = [30, 30] + align_nodes, align_guardrails = nodes_and_guardrails(align, + retry_clone(align)) - preproc.connect(fov, fov_outfile, align, 'in_file') + preproc.connect_retries(align_nodes, [(fov, fov_outfile, 'in_file')]) # align head-to-head to get acpc.mat (for human) if acpc_target == 'whole-head': - preproc.connect(inputnode, 'template_head_for_acpc', align, - 'reference') + preproc.connect_retries( + align_nodes, [(inputnode, 'template_head_for_acpc', 'reference')]) + preproc.connect_retries( + align_guardrails, [(inputnode, 'template_head_for_acpc', + 'reference')]) # align brain-to-brain to get acpc.mat (for monkey) if acpc_target == 'brain': - preproc.connect(inputnode, 'template_brain_for_acpc', align, - 'reference') + preproc.connect_retries( + align_nodes, [(inputnode, 'template_brain_for_acpc', 'reference')]) + preproc.connect_retries( + align_guardrails, [(inputnode, 'template_brain_for_acpc', + 'reference')]) concat_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), name='anat_acpc_4_concatxfm') concat_xfm.inputs.concat_xfm = True preproc.connect(convert_fov_xfm, 'out_file', concat_xfm, 'in_file') - preproc.connect(align, 'out_matrix_file', concat_xfm, 'in_file2') + select_align = guardrail_selection(preproc, *align_nodes, + 'out_matrix_file', align_guardrails[0]) + preproc.connect(select_align, 'out', concat_xfm, 'in_file2') aff_to_rig_imports = ['import os', 'from numpy import *'] aff_to_rig = pe.Node(util.Function(input_names=['in_xfm', 'out_name'], From 162cf6a41d9a26724ad88c6f3159ce9812437862 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 26 Oct 2022 17:30:26 +0000 Subject: [PATCH 48/72] :art: Simplify calls to `nodes_and_guardrails` --- .../pipeline/nipype_pipeline_engine/engine.py | 27 +++++++++++++ CPAC/registration/guardrails.py | 6 ++- CPAC/registration/registration.py | 39 +++++++------------ 3 files changed, 45 insertions(+), 27 deletions(-) diff --git a/CPAC/pipeline/nipype_pipeline_engine/engine.py b/CPAC/pipeline/nipype_pipeline_engine/engine.py index 9f18969a39..c74dd86c45 100644 --- a/CPAC/pipeline/nipype_pipeline_engine/engine.py +++ b/CPAC/pipeline/nipype_pipeline_engine/engine.py @@ -663,6 +663,33 @@ def _handle_just_in_time_exception(self, node): # TODO: handle S3 files node._apply_mem_x(UNDEFINED_SIZE) # noqa: W0212 + def nodes_and_guardrails(self, *nodes, registered, add_clones=True): + """Returns a two tuples of Nodes: (try, retry) and their + respective guardrails + + Parameters + ---------- + nodes : any number of Nodes + + Returns + ------- + nodes : tuple of Nodes + + guardrails : tuple of Nodes + """ + from CPAC.registration.guardrails import registration_guardrail_node, \ + retry_clone + nodes = list(nodes) + if add_clones is True: + retries = [retry_clone(node) for node in nodes] + nodes.extend(retries) + guardrails = [None] * len(nodes) + for i, node in enumerate(nodes): + guardrails[i] = registration_guardrail_node( + f'guardrail_{node.name}', i) + self.connect(node, registered, guardrails[i], 'registered') + return tuple(nodes), tuple(guardrails) + def write_graph( self, dotfilename="graph.dot", diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index ae677a25a5..cb975bed8f 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -134,7 +134,7 @@ def registration_guardrail(registered: str, reference: str, return registered, failed_qc -def nodes_and_guardrails(*nodes): +def nodes_and_guardrails(*nodes, add_clones=True): """Returns a two tuples of Nodes: (try, retry) and their respective guardrails @@ -148,6 +148,10 @@ def nodes_and_guardrails(*nodes): guardrails : tuple of Nodes """ + nodes = list(nodes) + if add_clones is True: + retries = [retry_clone(node) for node in nodes] + nodes.extend(retries) guardrails = [None] * len(nodes) for i, node in enumerate(nodes): guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 241b4bab42..07add09990 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -328,8 +328,7 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' - nodes, guardrails = nodes_and_guardrails(linear_reg, - retry_clone(linear_reg)) + nodes, guardrails = nodes_and_guardrails(linear_reg) for i, node in enumerate(nodes): linear_register.connect(inputspec, 'reference_brain', guardrails[i], 'reference') @@ -428,8 +427,7 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): name='nonlinear_reg_1') nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True - nodes, guardrails = nodes_and_guardrails(nonlinear_reg, - retry_clone(nonlinear_reg)) + nodes, guardrails = nodes_and_guardrails(nonlinear_reg) for i, node in enumerate(nodes): nonlinear_register.connect(inputspec, 'reference_skull', guardrails[i], 'reference') @@ -539,8 +537,7 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True nonlinear_reg.inputs.field_file = True - nodes, guardrails = nodes_and_guardrails(nonlinear_reg, - retry_clone(nonlinear_reg)) + nodes, guardrails = nodes_and_guardrails(nonlinear_reg) fieldcoeff_file = guardrail_selection(nonlinear_register, *nodes, 'fieldcoeff_file', guardrails[0]) field_file = guardrail_selection(nonlinear_register, *nodes, 'field_file', @@ -664,8 +661,7 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, 'coregistration']['arguments'] is not None: linear_reg.inputs.args = config.registration_workflows[ 'functional_registration']['coregistration']['arguments'] - nodes, guardrails = nodes_and_guardrails(linear_reg, - retry_clone(linear_reg)) + nodes, guardrails = nodes_and_guardrails(linear_reg) if phase_diff_distcor: register_func_to_anat.connect_retries(nodes, [ @@ -753,9 +749,7 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t2.inputs.searchr_x = [30, 30] linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - retry_linear_reg_func_to_t2 = retry_clone(linear_reg_func_to_t2) - nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t2, - retry_linear_reg_func_to_t2) + nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t2) for i, node in enumerate(nodes): register_func_to_anat_use_T2.connect(inputspec, 'func', node, 'in_file') @@ -766,8 +760,7 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): register_func_to_anat_use_T2.connect(inputspec, 'T2_head', guardrails[i], 'reference') linear_reg_func_to_t2_matrix = guardrail_selection( - register_func_to_anat_use_T2, linear_reg_func_to_t2, - retry_linear_reg_func_to_t2, 'out_matrix_file', guardrails[0]) + register_func_to_anat_use_T2, *nodes, 'out_matrix_file', guardrails[0]) # ${FSLDIR}/bin/convert_xfm -omat "$fMRIFolder"/T2w2Scout.mat -inverse "$fMRIFolder"/Scout2T2w.mat invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') @@ -816,9 +809,7 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t1.inputs.searchr_x = [30, 30] linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - retry_linear_reg_func_to_t1 = retry_clone(linear_reg_func_to_t1) - nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t1, - retry_linear_reg_func_to_t1) + nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t1) for i, node in enumerate(nodes): register_func_to_anat_use_T2.connect(func_brain, 'out_file', node, 'in_file') @@ -832,8 +823,7 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): select_linear_reg_func_to_t1 = guardrail_selection( register_func_to_anat_use_T2, *guardrails) linear_reg_func_to_t1_matrix = guardrail_selection( - register_func_to_anat_use_T2, linear_reg_func_to_t1, - retry_linear_reg_func_to_t1, 'out_matrix_file', guardrails[0]) + register_func_to_anat_use_T2, *nodes, 'out_matrix_file', guardrails[0]) # #taking out warpfield as it is not being made without a fieldmap. # ${FSLDIR}/bin/convertwarp --relout --rel -r ${T1wFolder}/${T2wRestoreImage} --postmat=${fMRIFolder}/${ScoutName}_gdc2T1w_init.mat -o ${fMRIFolder}/${ScoutName}_gdc2T1w_init_warp @@ -941,12 +931,8 @@ def bbreg_args(bbreg_target): bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name='bbreg_func_to_anat') bbreg_func_to_anat.inputs.dof = 6 - nodes, guardrails = nodes_and_guardrails(bbreg_func_to_anat) - if retry: - retry_nodes, retry_guardrails = nodes_and_guardrails( - retry_clone(bbreg_func_to_anat)) - nodes += retry_nodes - guardrails += retry_guardrails + nodes, guardrails = nodes_and_guardrails(bbreg_func_to_anat, + add_clones=bool(retry)) register_bbregister_func_to_anat.connect_retries(nodes, [ (inputspec, 'bbr_schedule', 'schedule'), (wm_bb_mask, ('out_file', bbreg_args), 'args'), @@ -1160,10 +1146,11 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', 'retry_docstring']), name='retry_calc_ants_warp', mem_gb=2.8, mem_x=(2e-7, 'moving_brain', 'xyz'))) - nodes, guardrails = nodes_and_guardrails(calculate_ants_warp, - retry_calculate_ants_warp) calculate_ants_warp.interface.num_threads = num_threads retry_calculate_ants_warp.interface.num_threads = num_threads + nodes, guardrails = nodes_and_guardrails(calculate_ants_warp, + retry_calculate_ants_warp, + add_clones=False) select_forward_initial = pe.Node(util.Function( input_names=['warp_list', 'selection'], From c0e67c811a60c14d33b9713709eca4f58adae4aa Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 26 Oct 2022 19:36:26 +0000 Subject: [PATCH 49/72] :recycle: Semi-abstract connecting guardrailed nodes --- CPAC/anat_preproc/anat_preproc.py | 33 ++++++------ CPAC/registration/guardrails.py | 25 --------- CPAC/registration/registration.py | 90 +++++++++++++------------------ 3 files changed, 52 insertions(+), 96 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 9cbe430e9e..8038f2dc8e 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -32,9 +32,7 @@ wb_command, \ fslmaths_command, \ VolumeRemoveIslands -from CPAC.registration.guardrails import guardrail_selection, \ - nodes_and_guardrails, \ - retry_clone +from CPAC.registration.guardrails import guardrail_selection from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.interfaces.function.seg_preproc import \ pick_tissue_from_labels_file_interface @@ -69,7 +67,7 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, fov, in_file = (robust_fov, 'in_file') fov, fov_mtx = (robust_fov, 'out_transform') fov, fov_outfile = (robust_fov, 'out_roi') - + elif config.anatomical_preproc['acpc_alignment']['FOV_crop'] == 'flirt': # robustfov doesn't work on some monkey data. prefer using flirt. # ${FSLDIR}/bin/flirt -in "${Input}" -applyxfm -ref "${Input}" -omat "$WD"/roi2full.mat -out "$WD"/robustroi.nii.gz @@ -107,9 +105,8 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, align.inputs.searchr_x = [30, 30] align.inputs.searchr_y = [30, 30] align.inputs.searchr_z = [30, 30] - align_nodes, align_guardrails = nodes_and_guardrails(align, - retry_clone(align)) - + align_nodes, align_guardrails = preproc.nodes_and_guardrails( + align, registered=fov_outfile) preproc.connect_retries(align_nodes, [(fov, fov_outfile, 'in_file')]) # align head-to-head to get acpc.mat (for human) @@ -188,7 +185,7 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, def T2wToT1wReg(wf_name='T2w_to_T1w_reg'): - + # Adapted from DCAN lab # https://github.com/DCAN-Labs/dcan-macaque-pipeline/blob/master/PreFreeSurfer/scripts/T2wToT1wReg.sh @@ -201,16 +198,17 @@ def T2wToT1wReg(wf_name='T2w_to_T1w_reg'): name='inputspec') outputnode = pe.Node(util.IdentityInterface(fields=['T2w_to_T1w']), - name='outputspec') + name='outputspec') # ${FSLDIR}/bin/epi_reg --epi="$T2wImageBrain" --t1="$T1wImage" --t1brain="$WD"/"$T1wImageBrainFile" --out="$WD"/T2w2T1w - T2w2T1w_reg = pe.Node(interface=fsl.EpiReg(), - name='T2w2T1w_reg') + T2w2T1w_reg = pe.Node(interface=fsl.EpiReg(), name='T2w2T1w_reg') T2w2T1w_reg.inputs.out_base = 'T2w2T1w' - - preproc.connect(inputnode, 'T2w_brain', T2w2T1w_reg ,'epi') - preproc.connect(inputnode, 'T1w', T2w2T1w_reg ,'t1_head') - preproc.connect(inputnode, 'T1w_brain', T2w2T1w_reg ,'t1_brain') + reg_nodes, reg_guardrails = preproc.nodes_and_guardrails( + T2w2T1w_reg, registered='out_file') + preproc.connect_retries(reg_nodes, [(inputnode, 'T2w_brain', 'epi'), + (inputnode, 'T1w', 't1_head'), + (inputnode, 'T1w_brain', 't1_brain')]) + preproc.connect_retries(reg_guardrails, [(inputnode, 'T1w', 'reference')]) # ${FSLDIR}/bin/applywarp --rel --interp=spline --in="$T2wImage" --ref="$T1wImage" --premat="$WD"/T2w2T1w.mat --out="$WD"/T2w2T1w T2w2T1w = pe.Node(interface=fsl.ApplyWarp(), @@ -223,9 +221,8 @@ def T2wToT1wReg(wf_name='T2w_to_T1w_reg'): preproc.connect(T2w2T1w_reg, 'epi2str_mat', T2w2T1w, 'premat') # ${FSLDIR}/bin/fslmaths "$WD"/T2w2T1w -add 1 "$WD"/T2w2T1w -odt float - T2w2T1w_final = pe.Node(interface=fsl.ImageMaths(), - name='T2w2T1w_final') - T2w2T1w_final.inputs.op_string = "-add 1" + T2w2T1w_final = pe.Node(interface=fsl.ImageMaths(), name='T2w2T1w_final') + T2w2T1w_final.inputs.op_string = "-add 1" preproc.connect(T2w2T1w, 'out_file', T2w2T1w_final, 'in_file') preproc.connect(T2w2T1w_final, 'out_file', outputnode, 'T2w_to_T1w') diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index cb975bed8f..5839a2c8d9 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -134,31 +134,6 @@ def registration_guardrail(registered: str, reference: str, return registered, failed_qc -def nodes_and_guardrails(*nodes, add_clones=True): - """Returns a two tuples of Nodes: (try, retry) and their - respective guardrails - - Parameters - ---------- - nodes : any number of Nodes - - Returns - ------- - nodes : tuple of Nodes - - guardrails : tuple of Nodes - """ - nodes = list(nodes) - if add_clones is True: - retries = [retry_clone(node) for node in nodes] - nodes.extend(retries) - guardrails = [None] * len(nodes) - for i, node in enumerate(nodes): - guardrails[i] = registration_guardrail_node(f'guardrail_{node.name}', - i) - return tuple(nodes), tuple(guardrails) - - def registration_guardrail_node(name=None, retry_num=0): """Convenience method to get a new registration_guardrail Node diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 07add09990..42394a82ae 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -24,9 +24,7 @@ from CPAC.func_preproc.utils import chunk_ts, split_ts_chunks from CPAC.pipeline.random_state.seed import increment_seed from CPAC.registration.guardrails import guardrail_selection, \ - nodes_and_guardrails, \ - registration_guardrail_node, \ - retry_clone + registration_guardrail_node from CPAC.registration.utils import seperate_warps_list, \ check_transforms, \ generate_inverse_transform_flags, \ @@ -328,11 +326,10 @@ def create_fsl_flirt_linear_reg(name='fsl_flirt_linear_reg'): linear_reg = pe.Node(interface=fsl.FLIRT(), name='linear_reg_0') linear_reg.inputs.cost = 'corratio' - nodes, guardrails = nodes_and_guardrails(linear_reg) - for i, node in enumerate(nodes): - linear_register.connect(inputspec, 'reference_brain', - guardrails[i], 'reference') - linear_register.connect(node, 'out_file', guardrails[i], 'registered') + nodes, guardrails = linear_register.nodes_and_guardrails( + linear_reg, registered='out_file') + linear_register.connect_retries( + guardrails, [(inputspec, 'reference_brain', 'reference')]) inv_flirt_xfm = pe.Node(interface=fsl.utils.ConvertXFM(), name='inv_linear_reg0_xfm') @@ -427,12 +424,10 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): name='nonlinear_reg_1') nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True - nodes, guardrails = nodes_and_guardrails(nonlinear_reg) - for i, node in enumerate(nodes): - nonlinear_register.connect(inputspec, 'reference_skull', - guardrails[i], 'reference') - nonlinear_register.connect(node, 'warped_file', - guardrails[i], 'registered') + nodes, guardrails = nonlinear_register.nodes_and_guardrails( + nonlinear_reg, registered='warped_file') + nonlinear_register.connect_retries( + guardrails, [(inputspec, 'reference_skull', 'reference')]) nonlinear_register.connect_retries(nodes, [ (inputspec, 'input_skull', 'in_file'), (inputspec, 'reference_skull', 'ref_file'), @@ -440,8 +435,7 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): # FNIRT parameters are specified by FSL config file # ${FSLDIR}/etc/flirtsch/TI_2_MNI152_2mm.cnf (or user-specified) (inputspec, 'fnirt_config', 'config_file'), - (inputspec, 'linear_aff', 'affine_file') - ]) + (inputspec, 'linear_aff', 'affine_file')]) brain_warp = pe.Node(interface=fsl.ApplyWarp(), name='brain_warp') nonlinear_register.connect([ @@ -537,7 +531,10 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): nonlinear_reg.inputs.fieldcoeff_file = True nonlinear_reg.inputs.jacobian_file = True nonlinear_reg.inputs.field_file = True - nodes, guardrails = nodes_and_guardrails(nonlinear_reg) + nodes, guardrails = nonlinear_register.nodes_and_guardrails( + nonlinear_reg, registered='warped_file') + nonlinear_register.connect_retries( + guardrails, [(inputspec, 'reference_skull', 'reference')]) fieldcoeff_file = guardrail_selection(nonlinear_register, *nodes, 'fieldcoeff_file', guardrails[0]) field_file = guardrail_selection(nonlinear_register, *nodes, 'field_file', @@ -661,7 +658,8 @@ def create_register_func_to_anat(config, phase_diff_distcor=False, 'coregistration']['arguments'] is not None: linear_reg.inputs.args = config.registration_workflows[ 'functional_registration']['coregistration']['arguments'] - nodes, guardrails = nodes_and_guardrails(linear_reg) + nodes, guardrails = register_func_to_anat.nodes_and_guardrails( + linear_reg, registered='out_file') if phase_diff_distcor: register_func_to_anat.connect_retries(nodes, [ @@ -749,16 +747,12 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t2.inputs.searchr_x = [30, 30] linear_reg_func_to_t2.inputs.searchr_y = [30, 30] linear_reg_func_to_t2.inputs.searchr_z = [30, 30] - nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t2) - for i, node in enumerate(nodes): - register_func_to_anat_use_T2.connect(inputspec, 'func', - node, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', - node, 'reference') - register_func_to_anat_use_T2.connect(node, 'out_file', - guardrails[i], 'registered') - register_func_to_anat_use_T2.connect(inputspec, 'T2_head', - guardrails[i], 'reference') + nodes, guardrails = register_func_to_anat_use_T2.nodes_and_guardrails( + linear_reg_func_to_t2, registered='out_file') + register_func_to_anat_use_T2.connect_retries(nodes, [ + (inputspec, 'func', 'in_file'), (inputspec, 'T2_head', 'reference')]) + register_func_to_anat_use_T2.connect_retries(guardrails, [ + (inputspec, 'T2_head', 'reference')]) linear_reg_func_to_t2_matrix = guardrail_selection( register_func_to_anat_use_T2, *nodes, 'out_matrix_file', guardrails[0]) @@ -809,16 +803,13 @@ def create_register_func_to_anat_use_T2(name='register_func_to_anat_use_T2'): linear_reg_func_to_t1.inputs.searchr_x = [30, 30] linear_reg_func_to_t1.inputs.searchr_y = [30, 30] linear_reg_func_to_t1.inputs.searchr_z = [30, 30] - nodes, guardrails = nodes_and_guardrails(linear_reg_func_to_t1) - for i, node in enumerate(nodes): - register_func_to_anat_use_T2.connect(func_brain, 'out_file', - node, 'in_file') - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', - node, 'reference') - register_func_to_anat_use_T2.connect(node, 'out_file', - guardrails[i], 'registered') - register_func_to_anat_use_T2.connect(inputspec, 'T1_brain', - guardrails[i], 'reference') + nodes, guardrails = register_func_to_anat_use_T2.nodes_and_guardrails( + linear_reg_func_to_t1, registered='out_file') + register_func_to_anat_use_T2.connect_retries(nodes, [ + (func_brain, 'out_file', 'in_file'), + (inputspec, 'T1_brain', 'reference')]) + register_func_to_anat_use_T2.connect_retries(guardrails, [ + (inputspec, 'T1_brain', 'reference')]) # pylint: disable=no-value-for-parameter select_linear_reg_func_to_t1 = guardrail_selection( register_func_to_anat_use_T2, *guardrails) @@ -931,8 +922,8 @@ def bbreg_args(bbreg_target): bbreg_func_to_anat = pe.Node(interface=fsl.FLIRT(), name='bbreg_func_to_anat') bbreg_func_to_anat.inputs.dof = 6 - nodes, guardrails = nodes_and_guardrails(bbreg_func_to_anat, - add_clones=bool(retry)) + nodes, guardrails = register_bbregister_func_to_anat.nodes_and_guardrails( + bbreg_func_to_anat, registered='out_file', add_clones=bool(retry)) register_bbregister_func_to_anat.connect_retries(nodes, [ (inputspec, 'bbr_schedule', 'schedule'), (wm_bb_mask, ('out_file', bbreg_args), 'args'), @@ -945,11 +936,8 @@ def bbreg_args(bbreg_target): (inputspec, 'fieldmap', 'fieldmap'), (inputspec, 'fieldmapmask', 'fieldmapmask'), (inputNode_echospacing, 'echospacing', 'echospacing')]) - for i, node in enumerate(nodes): - register_bbregister_func_to_anat.connect(inputspec, 'anat', - guardrails[i], 'reference') - register_bbregister_func_to_anat.connect(node, 'out_file', - guardrails[i], 'registered') + register_bbregister_func_to_anat.connect_retries(guardrails, [ + (inputspec, 'anat', 'reference')]) if retry: # pylint: disable=no-value-for-parameter outfile = guardrail_selection(register_bbregister_func_to_anat, @@ -1148,9 +1136,9 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', mem_x=(2e-7, 'moving_brain', 'xyz'))) calculate_ants_warp.interface.num_threads = num_threads retry_calculate_ants_warp.interface.num_threads = num_threads - nodes, guardrails = nodes_and_guardrails(calculate_ants_warp, - retry_calculate_ants_warp, - add_clones=False) + nodes, guardrails = calc_ants_warp_wf.nodes_and_guardrails( + calculate_ants_warp, retry_calculate_ants_warp, + registered='warped_image', add_clones=False) select_forward_initial = pe.Node(util.Function( input_names=['warp_list', 'selection'], @@ -1208,10 +1196,6 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', (inputspec, 'ants_para', 'ants_para'), (inputspec, 'interp', 'interp')]) # inter-workflow connections - calc_ants_warp_wf.connect(calculate_ants_warp, 'warped_image', - guardrails[0], 'registered') - calc_ants_warp_wf.connect(retry_calculate_ants_warp, 'warped_image', - guardrails[1], 'registered') # pylint: disable=no-value-for-parameter select = guardrail_selection(calc_ants_warp_wf, *guardrails) warp_list = guardrail_selection(calc_ants_warp_wf, *nodes, 'warp_list', @@ -2747,7 +2731,7 @@ def coregistration(wf, cfg, strat_pool, pipe_num, opt=None): 'bbr_wm_mask_args'] if fallback: bbreg_guardrail = registration_guardrail_node( - f'bbreg{bbreg_status}_guardrail_{pipe_num}') + f'bbreg{bbreg_status}_guardrail_{pipe_num}', 1) node, out = strat_pool.get_data('desc-reginput_bold') wf.connect(node, out, func_to_anat_bbreg, 'inputspec.func') From a398b1835980fec95d296adddf261dea755e93a6 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Wed, 26 Oct 2022 21:35:07 +0000 Subject: [PATCH 50/72] :goal_net: Guardrail `unet_brain_connector` --- CPAC/anat_preproc/anat_preproc.py | 100 ++++++++++++++++-------------- 1 file changed, 54 insertions(+), 46 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 8038f2dc8e..cd6ff903e0 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -817,41 +817,49 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): f'brain_{pipe_num}') native_brain_to_template_brain.inputs.dof = 6 native_brain_to_template_brain.inputs.interp = 'sinc' - wf.connect(unet_masked_brain, 'out_file', - native_brain_to_template_brain, 'in_file') - guardrail_native_brain_to_template_brain = registration_guardrail_workflow( - native_brain_to_template_brain) + brain_nodes, brain_guardrails = wf.nodes_and_guardrails( + native_brain_to_template_brain, registered='out_file') node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, native_brain_to_template_brain, 'reference') + wf.connect_retries(brain_nodes, [ + (unet_masked_brain, 'out_file', 'in_file'), + (node, out, 'reference')]) + wf.connect_retries(brain_guardrails, [(node, out, 'reference')]) # flirt -in head.nii.gz -ref NMT_0.5mm.nii.gz -o head_rot2atl -applyxfm -init brain_rot2atl.mat native_head_to_template_head = pe.Node(interface=fsl.FLIRT(), name=f'native_head_to_template_' f'head_{pipe_num}') native_head_to_template_head.inputs.apply_xfm = True - guardrail_native_head_to_template_head = registration_guardrail_workflow( - native_head_to_template_head) + + head_nodes, head_guardrails = wf.nodes_and_guardrails( + native_head_to_template_head, registered='out_file') + select_head = guardrail_selection(wf, *head_guardrails) if strat_pool.check_rpool('desc-preproc_T1w') or \ strat_pool.check_rpool('desc-reorient_T1w') or \ strat_pool.check_rpool('T1w'): - node, out = strat_pool.get_data(['desc-preproc_T1w', 'desc-reorient_T1w','T1w']) - wf.connect(node, out, native_head_to_template_head, 'in_file') - + node, out = strat_pool.get_data( + ['desc-preproc_T1w', 'desc-reorient_T1w','T1w']) + wf.connect_retries(head_nodes, [(node, out, 'in_file')]) + elif strat_pool.check_rpool('desc-preproc_T2w') or \ strat_pool.check_rpool('desc-reorient_T2w') or \ strat_pool.check_rpool('T2w'): node, out = strat_pool.get_data([ 'desc-preproc_T2w', 'desc-reorient_T2w', 'T2w']) - wf.connect(node, out, native_head_to_template_head, 'in_file') + wf.connect_retries(head_nodes, [(node, out, 'in_file')]) - wf.connect(guardrail_native_brain_to_template_brain, - 'outputspec.out_matrix_file', + select_template_brain_matrix = guardrail_selection(wf, *brain_nodes, + 'out_matrix_file', + brain_guardrails[0]) + + wf.connect(select_template_brain_matrix, 'out', native_head_to_template_head, 'in_matrix_file') node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, native_head_to_template_head, 'reference') + wf.connect_retries(head_nodes, [(node, out, 'reference')]) + wf.connect_retries(head_guardrails, [(node, out, 'reference')]) # fslmaths NMT_SS_0.5mm.nii.gz -bin templateMask.nii.gz template_brain_mask = pe.Node(interface=fsl.maths.MathsCommand(), @@ -875,67 +883,67 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): ants_template_head_to_template.inputs.smoothing_sigmas = [[0.6, 0.2, 0.0]] ants_template_head_to_template.inputs.shrink_factors = [[4, 2, 1]] ants_template_head_to_template.inputs.convergence_threshold = [1.e-8] - guardrail_ants_template_head_to_template = registration_guardrail_workflow( - ants_template_head_to_template, retry=True) - wf.connect(guardrail_native_head_to_template_head, 'outputspec.out_file', - ants_template_head_to_template, 'fixed_image') + athtt_nodes, athtt_guardrails = wf.nodes_and_guardrails( + ants_template_head_to_template, registered='warped_image') + select_ants_template_head_to_template = guardrail_selection( + wf, *athtt_nodes, 'forward_transforms', athtt_guardrails[0]) node, out = strat_pool.get_data('T1w-brain-template') - wf.connect(node, out, ants_template_head_to_template, 'moving_image') + wf.connect_retries(athtt_nodes, [ + (select_head, 'out', 'fixed_image'), + (node, out, 'moving_image')]) + wf.connect_retries(athtt_guardrails, [(select_head, 'out', 'reference')]) - # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz + node, out = strat_pool.get_data('T1w-brain-template') + wf.connect_retries(athtt_nodes, [(node, out, 'moving_image')]) + + # antsApplyTransforms -d 3 -i templateMask.nii.gz -t atl2T1rotWarp.nii.gz atl2T1rotAffine.txt -r brain_rot2atl.nii.gz -o brain_rot2atl_mask.nii.gz template_head_transform_to_template = pe.Node( interface=ants.ApplyTransforms(), name=f'template_head_transform_to_template_{pipe_num}') template_head_transform_to_template.inputs.dimension = 3 - guardrail_template_head_transform_to_template = ( - registration_guardrail_workflow(template_head_transform_to_template)) wf.connect(template_brain_mask, 'out_file', template_head_transform_to_template, 'input_image') - wf.connect(guardrail_native_brain_to_template_brain, 'outputspec.out_file', + wf.connect(native_brain_to_template_brain, 'out_file', template_head_transform_to_template, 'reference_image') - wf.connect(guardrail_ants_template_head_to_template, - 'outputspec.forward_transforms', + wf.connect(select_ants_template_head_to_template, 'out', template_head_transform_to_template, 'transforms') - # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat  + # convert_xfm -omat brain_rot2native.mat -inverse brain_rot2atl.mat invt = pe.Node(interface=fsl.ConvertXFM(), name='convert_xfm') invt.inputs.invert_xfm = True - wf.connect(guardrail_native_brain_to_template_brain, - 'outputspec.out_matrix_file', + wf.connect(native_brain_to_template_brain, 'out_matrix_file', invt, 'in_file') # flirt -in brain_rot2atl_mask.nii.gz -ref brain.nii.gz -o brain_mask.nii.gz -applyxfm -init brain_rot2native.mat template_brain_to_native_brain = pe.Node(interface=fsl.FLIRT(), - name=f'template_brain_to_native_' + name='template_brain_to_native_' f'brain_{pipe_num}') template_brain_to_native_brain.inputs.apply_xfm = True - guardrail_template_brain_to_native_brain = registration_guardrail_workflow( - template_brain_to_native_brain) - wf.connect(guardrail_template_head_transform_to_template, - 'outputspec.output_image', - template_brain_to_native_brain, 'in_file') - wf.connect(unet_masked_brain, 'out_file', template_brain_to_native_brain, - 'reference') - wf.connect(invt, 'out_file', template_brain_to_native_brain, - 'in_matrix_file') + tbtnb_nodes, tbtnb_guardrails = wf.nodes_and_guardrails( + template_brain_to_native_brain, registered='out_file') + wf.connect_retries(tbtnb_nodes, [ + (template_head_transform_to_template, 'output_image', 'in_file'), + (unet_masked_brain, 'out_file', 'reference'), + (invt, 'out_file', 'in_matrix_file')]) + wf.connect_retries(tbtnb_guardrails, [ + (unet_masked_brain, 'out_file', 'reference')]) + select_template_brain_to_native_brain = guardrail_selection( + wf, *tbtnb_guardrails) # fslmaths brain_mask.nii.gz -thr .5 -bin brain_mask_thr.nii.gz - refined_mask = pe.Node(interface=fsl.Threshold(), name=f'refined_mask' - f'_{pipe_num}') + refined_mask = pe.Node(interface=fsl.Threshold(), + name=f'refined_mask_{pipe_num}') refined_mask.inputs.thresh = 0.5 refined_mask.inputs.args = '-bin' - wf.connect(guardrail_template_brain_to_native_brain, - 'outputspec.out_file', + wf.connect(select_template_brain_to_native_brain, 'out', refined_mask, 'in_file') - outputs = { - 'space-T1w_desc-brain_mask': (refined_mask, 'out_file') - } + outputs = {'space-T1w_desc-brain_mask': (refined_mask, 'out_file')} - return (wf, outputs) + return wf, outputs def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): From 056afa28d083650dc1d84c10e8ece376abe18231 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 27 Oct 2022 13:59:57 +0000 Subject: [PATCH 51/72] :memo: Fix registration docstring workflow graphs [skip ci] --- CPAC/registration/exceptions.py | 2 ++ CPAC/registration/registration.py | 45 ++++++++++++++++++++++--------- 2 files changed, 35 insertions(+), 12 deletions(-) diff --git a/CPAC/registration/exceptions.py b/CPAC/registration/exceptions.py index f7caac0d0e..d962ddfa30 100644 --- a/CPAC/registration/exceptions.py +++ b/CPAC/registration/exceptions.py @@ -15,6 +15,8 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . """Custom registration exceptions""" + + class BadRegistrationError(ValueError): """Exception for when a QC measure for a registration falls below a specified threshold""" diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 42394a82ae..1813489bfb 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -394,14 +394,24 @@ def create_fsl_fnirt_nonlinear_reg(name='fsl_fnirt_nonlinear_reg'): transformation (affine only) from the space of the reference file to the input file. + .. exec:: + from CPAC.registration.registration import \ + create_fsl_fnirt_nonlinear_reg + wf = create_fsl_fnirt_nonlinear_reg() + wf.write_graph( + graph2use='orig', + dotfilename='./images/generated/nonlinear_register.dot' + ) + Workflow Graph: - .. image:: ../images/nonlinear_register.dot.png - :width: 500 + .. image:: ../../images/generated/nonlinear_register.png + :width: 500 Detailed Workflow Graph: - .. image:: ../images/nonlinear_register_detailed.dot.png - :width: 500 + + .. image:: ../../images/generated/nonlinear_register_detailed.png + :width: 500 """ nonlinear_register = pe.Workflow(name=name) @@ -496,14 +506,24 @@ def create_fsl_fnirt_nonlinear_reg_nhp(name='fsl_fnirt_nonlinear_reg_nhp'): transformation (affine only) from the space of the reference file to the input file. + .. exec:: + from CPAC.registration.registration import \ + create_fsl_fnirt_nonlinear_reg_nhp + wf = create_fsl_fnirt_nonlinear_reg_nhp() + wf.write_graph( + graph2use='orig', + dotfilename='./images/generated/nonlinear_register_nhp.dot' + ) + Workflow Graph: - .. image:: ../images/nonlinear_register.dot.png - :width: 500 + .. image:: ../../images/generated/nonlinear_register_nhp.png + :width: 500 Detailed Workflow Graph: - .. image:: ../images/nonlinear_register_detailed.dot.png - :width: 500 + + .. image:: ../../images/generated/nonlinear_register_nhp_detailed.png + :width: 500 """ nonlinear_register = pe.Workflow(name=name) @@ -1078,13 +1098,14 @@ def create_wf_calculate_ants_warp(name='create_wf_calculate_ants_warp', ) Workflow Graph: - .. image:: - :width: 500 + + .. image:: ../../images/generated/calculate_ants_warp.png + :width: 500 Detailed Workflow Graph: - .. image:: - :width: 500 + .. image:: ../../images/generated/calculate_ants_warp_detailed.png + :width: 500 ''' from CPAC.registration.guardrails import retry_hardcoded_reg calc_ants_warp_wf = pe.Workflow(name=name) From dda6eb0b79cfb6f989d9f1b426d91e692de114a9 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Thu, 27 Oct 2022 17:13:06 -0400 Subject: [PATCH 52/72] :page_facing_up: Move license declaration from docstring to comments [skip ci] --- CPAC/registration/registration.py | 55 +++++++++++++++---------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index 1813489bfb..8df41a6b20 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -3585,34 +3585,6 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, Copyright (c) 2015-2018, the CRN developers team. All rights reserved. - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - * Neither the name of fmriprep nor the names of its contributors - may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, - INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - OF THE POSSIBILITY OF SUCH DAMAGE. - Ref: https://github.com/nipreps/fmriprep/blob/84a6005b/fmriprep/workflows/bold/resampling.py#L159-L419 Node Block: @@ -3637,6 +3609,33 @@ def single_step_resample_timeseries_to_T1template(wf, cfg, strat_pool, "space-template_desc-brain_bold", "space-template_desc-bold_mask"]} """ # noqa: 501 + # Redistribution and use in source and binary forms, with or without + # modification, are permitted provided that the following conditions + # are met: + + # * Redistributions of source code must retain the above copyright + # notice, this list of conditions and the following disclaimer. + + # * Redistributions in binary form must reproduce the above copyright + # notice, this list of conditions and the following disclaimer in the + # documentation and/or other materials provided with the distribution. + + # * Neither the name of fmriprep nor the names of its contributors + # may be used to endorse or promote products derived from this + # software without specific prior written permission. + + # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + # OF THE POSSIBILITY OF SUCH DAMAGE. bbr2itk = pe.Node(util.Function(input_names=['reference_file', 'source_file', 'transform_file'], From 65b7bf2fa53cd4cb967c03856cbece6638bbc064 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 14:15:16 +0000 Subject: [PATCH 53/72] :rewind: Remove prototype guardrails from `freesurfer*connector` functions --- CPAC/anat_preproc/anat_preproc.py | 55 +++++++++++++------------------ 1 file changed, 23 insertions(+), 32 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index cd6ff903e0..2393d6d5ed 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1030,12 +1030,12 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(wb_command_fill_holes, 'out_file', binary_filled_mask, 'in_file') - brain_mask_to_t1_restore = pe.Node(interface=fsl.ApplyWarp(), - name=f'brain_mask_to_t1_restore_{pipe_num}') + brain_mask_to_t1_restore = pe.Node( + interface=fsl.ApplyWarp(), name=f'brain_mask_to_t1_restore_{pipe_num}') brain_mask_to_t1_restore.inputs.interp = 'nn' - brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] - guardrail_brain_mask_to_t1_restore = registration_guardrail_workflow( - brain_mask_to_t1_restore) + brain_mask_to_t1_restore.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration' + ]['registration']['FSL-FNIRT']['identity_matrix'] wf.connect(binary_filled_mask, 'out_file', brain_mask_to_t1_restore, 'in_file') @@ -1044,9 +1044,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, brain_mask_to_t1_restore, 'ref_file') outputs = { - 'space-T1w_desc-brain_mask': (guardrail_brain_mask_to_t1_restore, - 'outputspec.out_file') - } + 'space-T1w_desc-brain_mask': (brain_mask_to_t1_restore, 'out_file')} return (wf, outputs) @@ -1116,23 +1114,20 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): convert_head_to_template.inputs.searchr_x = [-90, 90] convert_head_to_template.inputs.searchr_y = [-90, 90] convert_head_to_template.inputs.searchr_z = [-90, 90] - guardrail_convert_head_to_template = registration_guardrail_workflow( - convert_head_to_template) wf.connect(reorient_fs_T1, 'out_file', - convert_head_to_template, 'in_file') + convert_head_to_template, 'in_file') node, out = strat_pool.get_data('T1w-ACPC-template') wf.connect(node, out, convert_head_to_template, 'reference') # convert_xfm -omat tmp_standard2head_fs.mat -inverse tmp_head_fs2standard.mat convert_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name=f'convert_xfm_{node_id}') + name=f'convert_xfm_{node_id}') convert_xfm.inputs.invert_xfm = True - wf.connect(guardrail_convert_head_to_template, - 'outputspec.out_matrix_file', - convert_xfm, 'in_file') + wf.connect(convert_head_to_template, 'out_matrix_file', + convert_xfm, 'in_file') # bet tmp_head_fs2standard.nii.gz tmp.nii.gz -f ${bet_thr_tight} -m skullstrip = pe.Node(interface=fsl.BET(), @@ -1141,12 +1136,11 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): skullstrip.inputs.mask=True if opt == 'FreeSurfer-BET-Tight': - skullstrip.inputs.frac = 0.3 + skullstrip.inputs.frac=0.3 elif opt == 'FreeSurfer-BET-Loose': - skullstrip.inputs.frac = 0.1 + skullstrip.inputs.frac=0.1 - wf.connect(guardrail_convert_head_to_template, 'outputspec.out_file', - skullstrip, 'in_file') + wf.connect(convert_head_to_template, 'out_file', skullstrip, 'in_file') # fslmaths tmp_mask.nii.gz -mas ${CCSDIR}/templates/MNI152_T1_1mm_first_brain_mask.nii.gz tmp_mask.nii.gz apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), @@ -1166,17 +1160,15 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): convert_template_mask_to_native.inputs.apply_xfm = True convert_template_mask_to_native.inputs.padding_size = 0 convert_template_mask_to_native.inputs.interp = 'nearestneighbour' - guardrail_convert_template_mask_to_native = ( - registration_guardrail_workflow(convert_template_mask_to_native)) wf.connect(apply_mask, 'out_file', - convert_template_mask_to_native, 'in_file') + convert_template_mask_to_native, 'in_file') wf.connect(convert_xfm, 'out_file', - convert_template_mask_to_native, 'in_matrix_file') + convert_template_mask_to_native, 'in_matrix_file') wf.connect(reorient_fs_T1, 'out_file', - convert_template_mask_to_native, 'reference') + convert_template_mask_to_native, 'reference') # fslmaths brain_fs_mask.nii.gz -add brain_fsl_mask_tight.nii.gz -bin brain_mask_tight.nii.gz # BinaryMaths doesn't use -bin! @@ -1189,11 +1181,10 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): combine_mask.inputs.operation = 'mul' wf.connect(binarize_fs_brain, 'out_file', - combine_mask, 'in_file') + combine_mask, 'in_file') - wf.connect(guardrail_convert_template_mask_to_native, - 'outputspec.out_file', - combine_mask, 'operand_file') + wf.connect(convert_template_mask_to_native, 'out_file', + combine_mask, 'operand_file') binarize_combined_mask = pe.Node(interface=fsl.maths.MathsCommand(), name=f'binarize_combined_mask_{node_id}') @@ -1220,12 +1211,12 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): if opt == 'FreeSurfer-BET-Tight': outputs = { - 'space-T1w_desc-tight_brain_mask': ( - fs_fsl_brain_mask_to_native, 'transformed_file')} + 'space-T1w_desc-tight_brain_mask': (fs_fsl_brain_mask_to_native, + 'transformed_file')} elif opt == 'FreeSurfer-BET-Loose': outputs = { - 'space-T1w_desc-loose_brain_mask': ( - fs_fsl_brain_mask_to_native, 'outputspec.transformed_file')} + 'space-T1w_desc-loose_brain_mask': (fs_fsl_brain_mask_to_native, + 'transformed_file')} return (wf, outputs) From 6bd09c3c0dbbbfbd50df6429e60a0f2ed1472895 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 14:27:04 +0000 Subject: [PATCH 54/72] :goal_net: Guardrail `freesurfer_fsl_brain_connector` --- CPAC/anat_preproc/anat_preproc.py | 62 ++++++++++++++++--------------- 1 file changed, 33 insertions(+), 29 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 2393d6d5ed..d1eaa8dfbd 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1114,20 +1114,26 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): convert_head_to_template.inputs.searchr_x = [-90, 90] convert_head_to_template.inputs.searchr_y = [-90, 90] convert_head_to_template.inputs.searchr_z = [-90, 90] - - wf.connect(reorient_fs_T1, 'out_file', - convert_head_to_template, 'in_file') + (head_to_template_nodes, + head_to_template_guardrails) = wf.nodes_and_guardrails( + convert_head_to_template, registered='out_file') node, out = strat_pool.get_data('T1w-ACPC-template') - wf.connect(node, out, convert_head_to_template, 'reference') + wf.connect_retries(head_to_template_nodes, [ + (reorient_fs_T1, 'out_file', 'in_file'), + (node, out, 'reference')]) + wf.connect_retries(head_to_template_guardrails, [(node, out, 'reference')]) + head_to_template = guardrail_selection(wf, *head_to_template_guardrails) + head_to_template_matrix = guardrail_selection( + wf, *head_to_template_nodes, 'out_matrix_file', + head_to_template_guardrails[0]) # convert_xfm -omat tmp_standard2head_fs.mat -inverse tmp_head_fs2standard.mat convert_xfm = pe.Node(interface=fsl_utils.ConvertXFM(), - name=f'convert_xfm_{node_id}') + name=f'convert_xfm_{node_id}') convert_xfm.inputs.invert_xfm = True - wf.connect(convert_head_to_template, 'out_matrix_file', - convert_xfm, 'in_file') + wf.connect(head_to_template_matrix, 'out', convert_xfm, 'in_file') # bet tmp_head_fs2standard.nii.gz tmp.nii.gz -f ${bet_thr_tight} -m skullstrip = pe.Node(interface=fsl.BET(), @@ -1140,14 +1146,13 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): elif opt == 'FreeSurfer-BET-Loose': skullstrip.inputs.frac=0.1 - wf.connect(convert_head_to_template, 'out_file', skullstrip, 'in_file') + wf.connect(head_to_template, 'out', skullstrip, 'in_file') # fslmaths tmp_mask.nii.gz -mas ${CCSDIR}/templates/MNI152_T1_1mm_first_brain_mask.nii.gz tmp_mask.nii.gz apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f'apply_mask_{node_id}') - wf.connect(skullstrip, 'out_file', - apply_mask, 'in_file') + wf.connect(skullstrip, 'out_file', apply_mask, 'in_file') node, out = strat_pool.get_data('T1w-brain-template-mask-ccs') wf.connect(node, out, apply_mask, 'mask_file') @@ -1160,15 +1165,18 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): convert_template_mask_to_native.inputs.apply_xfm = True convert_template_mask_to_native.inputs.padding_size = 0 convert_template_mask_to_native.inputs.interp = 'nearestneighbour' - - wf.connect(apply_mask, 'out_file', - convert_template_mask_to_native, 'in_file') - - wf.connect(convert_xfm, 'out_file', - convert_template_mask_to_native, 'in_matrix_file') - - wf.connect(reorient_fs_T1, 'out_file', - convert_template_mask_to_native, 'reference') + (templatemask_to_native_nodes, + templatemask_to_native_guardrails) = wf.nodes_and_guardrails( + convert_template_mask_to_native, registered='out_file') + + wf.connect_retries(templatemask_to_native_nodes, [ + (apply_mask, 'out_file', 'in_file'), + (convert_xfm, 'out_file', 'in_matrix_file'), + (reorient_fs_T1, 'out_file', 'reference')]) + wf.connect_retries(templatemask_to_native_guardrails, [ + (reorient_fs_T1, 'out_file', 'reference')]) + template_mask_to_native = guardrail_selection( + wf, *templatemask_to_native_guardrails) # fslmaths brain_fs_mask.nii.gz -add brain_fsl_mask_tight.nii.gz -bin brain_mask_tight.nii.gz # BinaryMaths doesn't use -bin! @@ -1180,18 +1188,14 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): elif opt == 'FreeSurfer-BET-Loose': combine_mask.inputs.operation = 'mul' - wf.connect(binarize_fs_brain, 'out_file', - combine_mask, 'in_file') - - wf.connect(convert_template_mask_to_native, 'out_file', - combine_mask, 'operand_file') + wf.connect(binarize_fs_brain, 'out_file', combine_mask, 'in_file') + wf.connect(template_mask_to_native, 'out', combine_mask, 'operand_file') binarize_combined_mask = pe.Node(interface=fsl.maths.MathsCommand(), name=f'binarize_combined_mask_{node_id}') binarize_combined_mask.inputs.args = '-bin' - wf.connect(combine_mask, 'out_file', - binarize_combined_mask, 'in_file') + wf.connect(combine_mask, 'out_file', binarize_combined_mask, 'in_file') # CCS brain mask is in FS space, transfer it back to native T1 space fs_fsl_brain_mask_to_native = pe.Node( @@ -1200,8 +1204,8 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): fs_fsl_brain_mask_to_native.inputs.reg_header = True fs_fsl_brain_mask_to_native.inputs.interp = 'nearest' - wf.connect(binarize_combined_mask, 'out_file', - fs_fsl_brain_mask_to_native, 'source_file') + wf.connect(binarize_combined_mask, 'out_file', + fs_fsl_brain_mask_to_native, 'source_file') node, out = strat_pool.get_data('raw-average') wf.connect(node, out, fs_fsl_brain_mask_to_native, 'target_file') @@ -1218,7 +1222,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): 'space-T1w_desc-loose_brain_mask': (fs_fsl_brain_mask_to_native, 'transformed_file')} - return (wf, outputs) + return wf, outputs def mask_T2(wf_name='mask_T2'): From 243fdd9e1225171653e6700af6edef4a5ef9fb7b Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 14:52:31 +0000 Subject: [PATCH 55/72] :goal_net: Guardrail `mask_T2` --- CPAC/anat_preproc/anat_preproc.py | 38 +++++++++++++++++++------------ 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index d1eaa8dfbd..fb4c3eb478 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1253,25 +1253,36 @@ def mask_T2(wf_name='mask_T2'): # t1w2t2w_rigid = 'flirt -dof 6 -cost mutualinfo -in {t1w} -ref {t2w} ' \ # '-omat {t1w2t2w}'.format(**kwargs) - t1w2t2w_rigid = pe.Node(interface=fsl.FLIRT(), - name='t1w2t2w_rigid') - + t1w2t2w_rigid = pe.Node(interface=fsl.FLIRT(), name='t1w2t2w_rigid') t1w2t2w_rigid.inputs.dof = 6 t1w2t2w_rigid.inputs.cost = 'mutualinfo' - preproc.connect(inputnode, 'T1w', t1w2t2w_rigid, 'in_file') - preproc.connect(inputnode, 'T2w', t1w2t2w_rigid, 'reference') + rigid_nodes, rigid_guardrails = preproc.nodes_and_guardrails( + t1w2t2w_rigid, registered='out_file') + preproc.connect_retries(rigid_nodes, [ + (inputnode, 'T1w', 'in_file'), + (inputnode, 'T2w', 'reference')]) + preproc.connect_retries(rigid_guardrails, [ + (inputnode, 'T2w', 'reference')]) + rigid_matrix = guardrail_selection(preproc, *rigid_nodes, + 'out_matrix_file', rigid_guardrails[0]) # t1w2t2w_mask = 'flirt -in {t1w_mask_edit} -interp nearestneighbour -ref {' \ # 't2w} -o {t2w_brain_mask} -applyxfm -init {' \ # 't1w2t2w}'.format(**kwargs) - t1w2t2w_mask = pe.Node(interface=fsl.FLIRT(), - name='t1w2t2w_mask') + t1w2t2w_mask = pe.Node(interface=fsl.FLIRT(), name='t1w2t2w_mask') t1w2t2w_mask.inputs.apply_xfm = True t1w2t2w_mask.inputs.interp = 'nearestneighbour' - - preproc.connect(inputnode, 'T1w_mask', t1w2t2w_mask, 'in_file') - preproc.connect(inputnode, 'T2w', t1w2t2w_mask, 'reference') - preproc.connect(t1w2t2w_rigid, 'out_matrix_file', t1w2t2w_mask, 'in_matrix_file') + mask_nodes, mask_guardrails = preproc.nodes_and_guardrails( + t1w2t2w_mask, registered='out_file') + + preproc.connect_retries(mask_nodes, [ + (inputnode, 'T1w_mask', 'in_file'), + (inputnode, 'T2w', 'reference'), + (rigid_matrix, 'out', 'in_matrix_file')]) + preproc.connect_retries(mask_guardrails, [ + (inputnode, 'T2w', 'reference')]) + # pylint: disable=no-value-for-parameter + select_mask = guardrail_selection(preproc, *mask_guardrails) # mask_t2w = 'fslmaths {t2w} -mas {t2w_brain_mask} ' \ # '{t2w_brain}'.format(**kwargs) @@ -1280,11 +1291,10 @@ def mask_T2(wf_name='mask_T2'): mask_t2w.inputs.op_string = "-mas %s " preproc.connect(inputnode, 'T2w', mask_t2w, 'in_file') - preproc.connect(t1w2t2w_mask, 'out_file', mask_t2w, 'operand_files') - + preproc.connect(select_mask, 'out', mask_t2w, 'operand_files') preproc.connect(mask_t1w, 'out_file', outputnode, 'T1w_brain') preproc.connect(mask_t2w, 'out_file', outputnode, 'T2w_brain') - preproc.connect(t1w2t2w_mask, 'out_file', outputnode, 'T2w_mask') + preproc.connect(select_mask, 'out', outputnode, 'T2w_mask') return preproc From 1cbf4640b535c3bec970b43db12beef7d920dfd3 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 15:08:43 +0000 Subject: [PATCH 56/72] :goal_net: Guardrail `fnirt_based_brain_extraction` --- CPAC/anat_preproc/anat_preproc.py | 116 +++++++++++++----------------- 1 file changed, 49 insertions(+), 67 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index fb4c3eb478..8d18b52df8 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -1506,11 +1506,12 @@ def acpc_align_brain_with_mask(wf, cfg, strat_pool, pipe_num, opt=None): outputs = { 'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'), 'desc-acpcbrain_T1w': (acpc_align, 'outputspec.acpc_aligned_brain'), - 'space-T1w_desc-brain_mask': (acpc_align, 'outputspec.acpc_brain_mask'), - 'space-T1w_desc-prebrain_mask': (strat_pool.get_data('space-T1w_desc-brain_mask')) - } + 'space-T1w_desc-brain_mask': (acpc_align, + 'outputspec.acpc_brain_mask'), + 'space-T1w_desc-prebrain_mask': ( + strat_pool.get_data('space-T1w_desc-brain_mask'))} - return (wf, outputs) + return wf, outputs def registration_T2w_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): @@ -1543,11 +1544,9 @@ def registration_T2w_to_T1w(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data(['desc-acpcbrain_T2w']) wf.connect(node, out, T2_to_T1_reg, 'inputspec.T2w_brain') - outputs = { - 'desc-preproc_T2w': (T2_to_T1_reg, 'outputspec.T2w_to_T1w') - } + outputs = {'desc-preproc_T2w': (T2_to_T1_reg, 'outputspec.T2w_to_T1w')} - return (wf, outputs) + return wf, outputs def non_local_means(wf, cfg, strat_pool, pipe_num, opt=None): @@ -2799,13 +2798,13 @@ def fnirt_based_brain_extraction(config=None, wf_name='fnirt_based_brain_extract preproc = pe.Workflow(name=wf_name) - inputnode = pe.Node(util.IdentityInterface(fields=['anat_data', - 'template-ref-mask-res-2', - 'template_skull_for_anat', - 'template_skull_for_anat_2mm', - 'template_brain_mask_for_anat']), - name='inputspec') - + inputnode = pe.Node(util.IdentityInterface( + fields=['anat_data', + 'template-ref-mask-res-2', + 'template_skull_for_anat', + 'template_skull_for_anat_2mm', + 'template_brain_mask_for_anat']), name='inputspec') + outputnode = pe.Node(util.IdentityInterface(fields=['anat_brain', 'anat_brain_mask']), name='outputspec') @@ -2818,12 +2817,17 @@ def fnirt_based_brain_extraction(config=None, wf_name='fnirt_based_brain_extract linear_reg.inputs.dof = 12 linear_reg.inputs.interp = 'spline' linear_reg.inputs.no_search = True - - preproc.connect(inputnode, 'anat_data', - linear_reg, 'in_file') - - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - linear_reg, 'reference') + lreg_nodes, lreg_guardrails = preproc.nodes_and_guardrails( + linear_reg, registered='out_file') + + preproc.connect_retries(lreg_nodes, [ + (inputnode, 'anat_data', 'in_file'), + (inputnode, 'template_skull_for_anat_2mm', 'reference')]) + preproc.connect_retries(lreg_guardrails, [ + (inputnode, 'template_skull_for_anat_2mm', 'reference')]) + linear_reg_matrix = guardrail_selection(preproc, *lreg_nodes, + 'out_matrix_file', + lreg_guardrails[0]) # non-linear registration to 2mm reference # fnirt --in="$Input" --ref="$Reference2mm" --aff="$WD"/roughlin.mat --refmask="$Reference2mmMask" \ @@ -2831,9 +2835,7 @@ def fnirt_based_brain_extraction(config=None, wf_name='fnirt_based_brain_extract # --refout="$WD"/IntensityModulatedT1.nii.gz --iout="$WD"/"$BaseName"_to_MNI_nonlin.nii.gz \ # --logout="$WD"/NonlinearReg.txt --intout="$WD"/NonlinearIntensities.nii.gz \ # --cout="$WD"/NonlinearReg.nii.gz --config="$FNIRTConfig" - non_linear_reg = pe.Node(interface=fsl.FNIRT(), - name='non_linear_reg') - + non_linear_reg = pe.Node(interface=fsl.FNIRT(), name='non_linear_reg') non_linear_reg.inputs.field_file = True # --fout non_linear_reg.inputs.jacobian_file = True # --jout non_linear_reg.inputs.modulatedref_file = True # --refout @@ -2843,36 +2845,30 @@ def fnirt_based_brain_extraction(config=None, wf_name='fnirt_based_brain_extract non_linear_reg.inputs.fieldcoeff_file = True # --cout non_linear_reg.inputs.config_file = config.registration_workflows[ 'anatomical_registration']['registration']['FSL-FNIRT']['fnirt_config'] - - preproc.connect(inputnode, 'anat_data', - non_linear_reg, 'in_file') - - preproc.connect(inputnode, 'template_skull_for_anat_2mm', - non_linear_reg, 'ref_file') - - preproc.connect(linear_reg, 'out_matrix_file', - non_linear_reg, 'affine_file') - - preproc.connect(inputnode, 'template-ref-mask-res-2', - non_linear_reg, 'refmask_file') + nlreg_nodes, nlreg_guardrails = preproc.nodes_and_guardrails( + non_linear_reg, registered='warped_file') + + preproc.connect_retries(nlreg_nodes, [ + (inputnode, 'anat_data', 'in_file'), + (inputnode, 'template_skull_for_anat_2mm', 'ref_file'), + (linear_reg_matrix, 'out', 'affine_file'), + (inputnode, 'template-ref-mask-res-2', 'refmask_file')]) + preproc.connect_retries(nlreg_guardrails, [ + (inputnode, 'template_skull_for_anat_2mm', 'reference')]) + field_file = guardrail_selection(preproc, *nlreg_nodes, 'field_file', + nlreg_guardrails[0]) # Overwrite the image output from FNIRT with a spline interpolated highres version # creating spline interpolated hires version # applywarp --rel --interp=spline --in="$Input" --ref="$Reference" -w "$WD"/str2standard.nii.gz --out="$WD"/"$BaseName"_to_MNI_nonlin.nii.gz - apply_warp = pe.Node(interface=fsl.ApplyWarp(), - name='apply_warp') - + apply_warp = pe.Node(interface=fsl.ApplyWarp(), name='apply_warp') apply_warp.inputs.interp = 'spline' apply_warp.inputs.relwarp = True - preproc.connect(inputnode, 'anat_data', - apply_warp, 'in_file') - + preproc.connect(inputnode, 'anat_data', apply_warp, 'in_file') preproc.connect(inputnode, 'template_skull_for_anat', apply_warp, 'ref_file') - - preproc.connect(non_linear_reg, 'field_file', - apply_warp, 'field_file') + preproc.connect(field_file, 'out', apply_warp, 'field_file') # Invert warp and transform dilated brain mask back into native space, and use it to mask input image # Input and reference spaces are the same, using 2mm reference to save time @@ -2882,44 +2878,30 @@ def fnirt_based_brain_extraction(config=None, wf_name='fnirt_based_brain_extract preproc.connect(inputnode, 'template_skull_for_anat_2mm', inverse_warp, 'reference') - - preproc.connect(non_linear_reg, 'field_file', - inverse_warp, 'warp') + preproc.connect(field_file, inverse_warp, 'warp') # Apply inverse warp # applywarp --rel --interp=nn --in="$ReferenceMask" --ref="$Input" -w "$WD"/standard2str.nii.gz -o "$OutputBrainMask" - apply_inv_warp = pe.Node(interface=fsl.ApplyWarp(), - name='apply_inv_warp') + apply_inv_warp = pe.Node(interface=fsl.ApplyWarp(), name='apply_inv_warp') apply_inv_warp.inputs.interp = 'nn' apply_inv_warp.inputs.relwarp = True preproc.connect(inputnode, 'template_brain_mask_for_anat', apply_inv_warp, 'in_file') + preproc.connect(inputnode, 'anat_data', apply_inv_warp, 'ref_file') + preproc.connect(inverse_warp, 'inverse_warp', apply_inv_warp, 'field_file') + preproc.connect(apply_inv_warp, 'out_file', outputnode, 'anat_brain_mask') - preproc.connect(inputnode, 'anat_data', - apply_inv_warp, 'ref_file') - - preproc.connect(inverse_warp, 'inverse_warp', - apply_inv_warp, 'field_file') - - preproc.connect(apply_inv_warp, 'out_file', - outputnode, 'anat_brain_mask') - # Apply mask to create brain # fslmaths "$Input" -mas "$OutputBrainMask" "$OutputBrainExtractedImage" apply_mask = pe.Node(interface=fsl.MultiImageMaths(), name='apply_mask') apply_mask.inputs.op_string = '-mas %s' - preproc.connect(inputnode, 'anat_data', - apply_mask, 'in_file') + preproc.connect(inputnode, 'anat_data', apply_mask, 'in_file') + preproc.connect(apply_inv_warp, 'out_file', apply_mask, 'operand_files') + preproc.connect(apply_mask, 'out_file', outputnode, 'anat_brain') - preproc.connect(apply_inv_warp, 'out_file', - apply_mask, 'operand_files') - - preproc.connect(apply_mask, 'out_file', - outputnode, 'anat_brain') - return preproc From 611066f4dd6a1aecc5f46eac4af5ff9abd8251ad Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 15:20:09 +0000 Subject: [PATCH 57/72] :goal_net: Guardrail `freesurfer_abcd_preproc` --- CPAC/anat_preproc/anat_preproc.py | 117 +++++++++++++++--------------- 1 file changed, 59 insertions(+), 58 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 8d18b52df8..2c9c34bba7 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -2905,8 +2905,8 @@ def fnirt_based_brain_extraction(config=None, wf_name='fnirt_based_brain_extract return preproc -def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction'): - +def fast_bias_field_correction(config=None, + wf_name='fast_bias_field_correction'): ### ABCD Harmonization - FAST bias field correction ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PreFreeSurfer/PreFreeSurferPipeline.sh#L688-L694 @@ -2931,10 +2931,8 @@ def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction' preproc.connect(inputnode, 'anat_brain', fast_bias_field_correction, 'in_files') - preproc.connect(fast_bias_field_correction, 'restored_image', outputnode, 'anat_brain_restore') - preproc.connect(fast_bias_field_correction, 'bias_field', outputnode, 'bias_field') @@ -2944,7 +2942,7 @@ def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction' # fslmaths ${T1wFolder}/T1w_acpc_brain_mask.nii.gz -mul -1 -add 1 ${T1wFolder}/T1w_acpc_inverse_brain_mask.nii.gz inverse_brain_mask = pe.Node(interface=fsl.ImageMaths(), - name='inverse_brain_mask') + name='inverse_brain_mask') inverse_brain_mask.inputs.op_string = '-mul -1 -add 1' preproc.connect(inputnode, 'anat_brain_mask', @@ -2955,25 +2953,19 @@ def fast_bias_field_correction(config=None, wf_name='fast_bias_field_correction' name='apply_mask') apply_mask.inputs.op_string = '-mul %s' - preproc.connect(inputnode, 'anat_data', - apply_mask, 'in_file') - + preproc.connect(inputnode, 'anat_data', apply_mask, 'in_file') preproc.connect(inverse_brain_mask, 'out_file', apply_mask, 'operand_files') # fslmaths ${T1wFolder}/T1w_fast_restore.nii.gz -add ${T1wFolder}/T1w_acpc_dc_skull.nii.gz ${T1wFolder}/${T1wImage}_acpc_dc_restore anat_restore = pe.Node(interface=fsl.MultiImageMaths(), - name='get_anat_restore') + name='get_anat_restore') anat_restore.inputs.op_string = '-add %s' preproc.connect(fast_bias_field_correction, 'restored_image', anat_restore, 'in_file') - - preproc.connect(apply_mask, 'out_file', - anat_restore, 'operand_files') - - preproc.connect(anat_restore, 'out_file', - outputnode, 'anat_restore') + preproc.connect(apply_mask, 'out_file', anat_restore, 'operand_files') + preproc.connect(anat_restore, 'out_file', outputnode, 'anat_restore') return preproc @@ -3014,34 +3006,40 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): ''' # fnirt-based brain extraction - brain_extraction = fnirt_based_brain_extraction(config=cfg, - wf_name=f'fnirt_based_brain_extraction_{pipe_num}') + fn_brain_extraction = fnirt_based_brain_extraction( + config=cfg, wf_name=f'fnirt_based_brain_extraction_{pipe_num}') node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, brain_extraction, 'inputspec.anat_data') + wf.connect(node, out, fn_brain_extraction, 'inputspec.anat_data') node, out = strat_pool.get_data('template-ref-mask-res-2') - wf.connect(node, out, brain_extraction, 'inputspec.template-ref-mask-res-2') + wf.connect(node, out, + fn_brain_extraction, 'inputspec.template-ref-mask-res-2') node, out = strat_pool.get_data('T1w-template') - wf.connect(node, out, brain_extraction, 'inputspec.template_skull_for_anat') + wf.connect(node, out, + fn_brain_extraction, 'inputspec.template_skull_for_anat') node, out = strat_pool.get_data('T1w-template-res-2') - wf.connect(node, out, brain_extraction, 'inputspec.template_skull_for_anat_2mm') + wf.connect(node, out, + fn_brain_extraction, 'inputspec.template_skull_for_anat_2mm') node, out = strat_pool.get_data('T1w-brain-template-mask') - wf.connect(node, out, brain_extraction, 'inputspec.template_brain_mask_for_anat') + wf.connect(node, out, + fn_brain_extraction, 'inputspec.template_brain_mask_for_anat') # fast bias field correction - fast_correction = fast_bias_field_correction(config=cfg, - wf_name=f'fast_bias_field_correction_{pipe_num}') + fast_correction = fast_bias_field_correction( + config=cfg, wf_name=f'fast_bias_field_correction_{pipe_num}') node, out = strat_pool.get_data('desc-preproc_T1w') wf.connect(node, out, fast_correction, 'inputspec.anat_data') - wf.connect(brain_extraction, 'outputspec.anat_brain', fast_correction, 'inputspec.anat_brain') + wf.connect(fn_brain_extraction, 'outputspec.anat_brain', + fast_correction, 'inputspec.anat_brain') - wf.connect(brain_extraction, 'outputspec.anat_brain_mask', fast_correction, 'inputspec.anat_brain_mask') + wf.connect(fn_brain_extraction, 'outputspec.anat_brain_mask', + fast_correction, 'inputspec.anat_brain_mask') ### ABCD Harmonization ### # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/FreeSurfer/FreeSurferPipeline.sh#L140-L144 @@ -3051,40 +3049,44 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): name=f'resample_anat_head_1mm_{pipe_num}') resample_head_1mm.inputs.interp = 'spline' resample_head_1mm.inputs.apply_isoxfm = 1 + head_1mm_nodes, head_1mm_guardrails = wf.nodes_and_guardrails( + resample_head_1mm, registered='out_file') node, out = strat_pool.get_data('desc-preproc_T1w') - wf.connect(node, out, resample_head_1mm, 'in_file') - - wf.connect(node, out, resample_head_1mm, 'reference') - + wf.connect_retries(head_1mm_nodes, [(node, out, 'in_file'), + (node, out, 'reference')]) + wf.connect_retries(head_1mm_guardrails, [(node, out, 'reference')]) + head_1mm = guardrail_selection(wf, *head_1mm_guardrails) # applywarp --rel --interp=spline -i "$T1wImage" -r "$T1wImageFile"_1mm.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o "$T1wImageFile"_1mm.nii.gz - applywarp_head_to_head_1mm = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_head_to_head_1mm_{pipe_num}') + applywarp_head_to_head_1mm = pe.Node( + interface=fsl.ApplyWarp(), + name=f'applywarp_head_to_head_1mm_{pipe_num}') applywarp_head_to_head_1mm.inputs.relwarp = True applywarp_head_to_head_1mm.inputs.interp = 'spline' - applywarp_head_to_head_1mm.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_head_to_head_1mm.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration' + ]['registration']['FSL-FNIRT']['identity_matrix'] wf.connect(node, out, applywarp_head_to_head_1mm, 'in_file') - - wf.connect(resample_head_1mm, 'out_file', - applywarp_head_to_head_1mm, 'ref_file') + wf.connect(head_1mm, 'out', applywarp_head_to_head_1mm, 'ref_file') # applywarp --rel --interp=nn -i "$T1wImageBrain" -r "$T1wImageFile"_1mm.nii.gz --premat=$FSLDIR/etc/flirtsch/ident.mat -o "$T1wImageBrainFile"_1mm.nii.gz - applywarp_brain_to_head_1mm = pe.Node(interface=fsl.ApplyWarp(), - name=f'applywarp_brain_to_head_1mm_{pipe_num}') + applywarp_brain_to_head_1mm = pe.Node( + interface=fsl.ApplyWarp(), + name=f'applywarp_brain_to_head_1mm_{pipe_num}') applywarp_brain_to_head_1mm.inputs.relwarp = True applywarp_brain_to_head_1mm.inputs.interp = 'nn' - applywarp_brain_to_head_1mm.inputs.premat = cfg.registration_workflows['anatomical_registration']['registration']['FSL-FNIRT']['identity_matrix'] + applywarp_brain_to_head_1mm.inputs.premat = cfg.registration_workflows[ + 'anatomical_registration' + ]['registration']['FSL-FNIRT']['identity_matrix'] wf.connect(fast_correction, 'outputspec.anat_brain_restore', - applywarp_brain_to_head_1mm, 'in_file') - - wf.connect(resample_head_1mm, 'out_file', - applywarp_brain_to_head_1mm, 'ref_file') + applywarp_brain_to_head_1mm, 'in_file') + wf.connect(head_1mm, 'out', applywarp_brain_to_head_1mm, 'ref_file') # fslstats $T1wImageBrain -M average_brain = pe.Node(interface=fsl.ImageStats(), - name=f'average_brain_{pipe_num}') + name=f'average_brain_{pipe_num}') average_brain.inputs.op_string = '-M' average_brain.inputs.output_type = 'NIFTI_GZ' @@ -3092,17 +3094,16 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): average_brain, 'in_file') # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz - normalize_head = pe.Node(util.Function(input_names=['in_file', 'number', 'out_file_suffix'], + normalize_head = pe.Node(util.Function(input_names=['in_file', 'number', + 'out_file_suffix'], output_names=['out_file'], function=fslmaths_command), name=f'normalize_head_{pipe_num}') normalize_head.inputs.out_file_suffix = '_norm' - wf.connect(applywarp_head_to_head_1mm, 'out_file', + wf.connect(applywarp_head_to_head_1mm, 'out_file', normalize_head, 'in_file') - - wf.connect(average_brain, 'out_stat', - normalize_head, 'number') + wf.connect(average_brain, 'out_stat', normalize_head, 'number') ### recon-all -all step ### reconall = pe.Node(interface=freesurfer.ReconAll(), @@ -3110,10 +3111,9 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): mem_gb=2.7) sub_dir = cfg.pipeline_setup['working_directory']['path'] - freesurfer_subject_dir = os.path.join(sub_dir, - 'cpac_'+cfg['subject_id'], - f'anat_preproc_freesurfer_{pipe_num}', - 'anat_freesurfer') + freesurfer_subject_dir = os.path.join( + sub_dir, 'cpac_'+cfg['subject_id'], + f'anat_preproc_freesurfer_{pipe_num}', 'anat_freesurfer') # create the directory for FreeSurfer node if not os.path.exists(freesurfer_subject_dir): @@ -3121,10 +3121,10 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): reconall.inputs.directive = 'all' reconall.inputs.subjects_dir = freesurfer_subject_dir - reconall.inputs.openmp = cfg.pipeline_setup['system_config']['num_OMP_threads'] + reconall.inputs.openmp = cfg.pipeline_setup['system_config'][ + 'num_OMP_threads'] - wf.connect(normalize_head, 'out_file', - reconall, 'T1_files') + wf.connect(normalize_head, 'out_file', reconall, 'T1_files') wf, hemisphere_outputs = freesurfer_hemispheres(wf, reconall, pipe_num) @@ -3138,10 +3138,11 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): **hemisphere_outputs } - return (wf, outputs) + return wf, outputs -def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None): +def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, + opt=None): ''' {"name": "correct_restore_brain_intensity_abcd", "config": ["anatomical_preproc", "brain_extraction"], From 00c3116f636f7504eba1ad8f97adce4c45d5c09d Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 15:24:12 +0000 Subject: [PATCH 58/72] :rewind: Remove prototype guardrails from `correct_restore_brain_intensity_abcd` --- CPAC/anat_preproc/anat_preproc.py | 50 ++++++++++++++----------------- 1 file changed, 22 insertions(+), 28 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 2c9c34bba7..4cec796781 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -3149,7 +3149,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, "switch": "None", "option_key": "using", "option_val": "FreeSurfer-ABCD", - "inputs": [(["desc-preproc_T1w", "desc-reorient_T1w", "T1w"], + "inputs": [("desc-preproc_T1w", "desc-n4_T1w", "desc-restore-brain_T1w", "space-T1w_desc-brain_mask", @@ -3175,23 +3175,23 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, merge_t1_acpc.inputs.dimension = 't' - wf.connect(merge_t1_acpc_to_list, 'out', - merge_t1_acpc, 'in_files') + wf.connect(merge_t1_acpc_to_list, 'out', merge_t1_acpc, 'in_files') # fslmaths ${T1wFolder}/xfms/${T1wImage}_dc -mul 0 ${T1wFolder}/xfms/${T1wImage}_dc - multiply_t1_acpc_by_zero = pe.Node(interface=fsl.ImageMaths(), - name=f'multiply_t1_acpc_by_zero_{pipe_num}') - + multiply_t1_acpc_by_zero = pe.Node( + interface=fsl.ImageMaths(), + name=f'multiply_t1_acpc_by_zero_{pipe_num}') multiply_t1_acpc_by_zero.inputs.op_string = '-mul 0' - wf.connect(merge_t1_acpc, 'merged_file', - multiply_t1_acpc_by_zero, 'in_file') + wf.connect(merge_t1_acpc, 'merged_file', + multiply_t1_acpc_by_zero, 'in_file') # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/PostFreeSurferPipeline.sh#L157 # convertwarp --relout --rel --ref="$T1wFolder"/"$T1wImageBrainMask" --premat="$T1wFolder"/xfms/"$InitialT1wTransform" \ # --warp1="$T1wFolder"/xfms/"$dcT1wTransform" --out="$T1wFolder"/xfms/"$OutputOrigT1wToT1w" - convertwarp_orig_t1_to_t1 = pe.Node(interface=fsl.ConvertWarp(), - name=f'convertwarp_orig_t1_to_t1_{pipe_num}') + convertwarp_orig_t1_to_t1 = pe.Node( + interface=fsl.ConvertWarp(), + name=f'convertwarp_orig_t1_to_t1_{pipe_num}') convertwarp_orig_t1_to_t1.inputs.out_relwarp = True convertwarp_orig_t1_to_t1.inputs.relwarp = True @@ -3199,19 +3199,19 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, node, out = strat_pool.get_data('space-T1w_desc-brain_mask') wf.connect(node, out, convertwarp_orig_t1_to_t1, 'reference') - node, out = strat_pool.get_data('from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') + node, out = strat_pool.get_data( + 'from-T1w_to-ACPC_mode-image_desc-aff2rig_xfm') wf.connect(node, out, convertwarp_orig_t1_to_t1, 'premat') wf.connect(multiply_t1_acpc_by_zero, 'out_file', - convertwarp_orig_t1_to_t1, 'warp1') + convertwarp_orig_t1_to_t1, 'warp1') # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L72-L73 # applywarp --rel --interp=spline -i "$BiasField" -r "$T1wImageBrain" -w "$AtlasTransform" -o "$BiasFieldOutput" applywarp_biasfield = pe.Node(interface=fsl.ApplyWarp(), name=f'applywarp_biasfield_{pipe_num}') + applywarp_biasfield.inputs.relwarp = True applywarp_biasfield.inputs.interp = 'spline' - guardrail_applywarp_biasfield = registration_guardrail_workflow( - applywarp_biasfield) node, out = strat_pool.get_data('desc-fast_biasfield') wf.connect(node, out, applywarp_biasfield, 'in_file') @@ -3227,20 +3227,19 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, name=f'threshold_biasfield_{pipe_num}') threshold_biasfield.inputs.op_string = '-thr 0.1' - wf.connect(guardrail_applywarp_biasfield, 'outputspec.out_file', - threshold_biasfield, 'in_file') + wf.connect(applywarp_biasfield, 'out_file', threshold_biasfield, 'in_file') # Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/master/PostFreeSurfer/scripts/CreateMyelinMaps.sh#L67-L70 # applywarp --rel --interp=spline -i "$OrginalT1wImage" -r "$T1wImageBrain" -w "$OutputOrigT1wToT1w" -o "$OutputT1wImage" applywarp_t1 = pe.Node(interface=fsl.ApplyWarp(), name=f'applywarp_t1_{pipe_num}') + applywarp_t1.inputs.relwarp = True applywarp_t1.inputs.interp = 'spline' - guardrail_applywarp_t1 = registration_guardrail_workflow(applywarp_t1) node, out = strat_pool.get_data('desc-n4_T1w') wf.connect(node, out, applywarp_t1, 'in_file') - + node, out = strat_pool.get_data('space-T1w_desc-brain_mask') wf.connect(node, out, applywarp_t1, 'ref_file') @@ -3248,12 +3247,10 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, applywarp_t1, 'field_file') # fslmaths "$OutputT1wImage" -abs "$OutputT1wImage" -odt float - abs_t1 = pe.Node(interface=fsl.ImageMaths(), - name=f'abs_t1_{pipe_num}') + abs_t1 = pe.Node(interface=fsl.ImageMaths(), name=f'abs_t1_{pipe_num}') abs_t1.inputs.op_string = '-abs' - wf.connect(guardrail_applywarp_t1, 'outputspec.out_file', - abs_t1, 'in_file') + wf.connect(applywarp_t1, 'out_file', abs_t1, 'in_file') # fslmaths "$OutputT1wImage" -div "$BiasField" "$OutputT1wImageRestore" div_t1_by_biasfield = pe.Node(interface=fsl.ImageMaths(), @@ -3270,14 +3267,11 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, apply_mask = pe.Node(interface=fsl.maths.ApplyMask(), name=f'get_restored_corrected_brain_{pipe_num}') - wf.connect(div_t1_by_biasfield, 'out_file', - apply_mask, 'in_file') + wf.connect(div_t1_by_biasfield, 'out_file', apply_mask, 'in_file') node, out = strat_pool.get_data('space-T1w_desc-brain_mask') wf.connect(node, out, apply_mask, 'mask_file') - outputs = { - 'desc-restore-brain_T1w': (apply_mask, 'out_file') - } + outputs = {'desc-restore-brain_T1w': (apply_mask, 'out_file')} - return (wf, outputs) + return wf, outputs From 20e87e48a1daf18d5c0be9e5645ffeb72e9dc102 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 16:14:07 +0000 Subject: [PATCH 59/72] :goal_net: Guardrail `init_brain_extraction_wf` --- CPAC/anat_preproc/ants.py | 131 ++++++++++++++++++++------------------ 1 file changed, 69 insertions(+), 62 deletions(-) diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 368d4b847d..41d5b6f60e 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -17,23 +17,21 @@ from packaging.version import parse as parseversion, Version # nipype +# pylint: disable=wrong-import-order import CPAC.pipeline.nipype_pipeline_engine as pe from nipype.interfaces import utility as niu from nipype.interfaces.fsl.maths import ApplyMask -from nipype.interfaces.ants import N4BiasFieldCorrection, Atropos, MultiplyImages - -from ..utils.misc import get_template_specs +from nipype.interfaces.ants import (N4BiasFieldCorrection, Atropos, + MultiplyImages) +from CPAC.registration.guardrails import guardrail_selection, retry_clone # niworkflows -from ..utils.interfaces.ants import ( - ImageMath, - ResampleImageBySpacing, - AI, - ThresholdImage, -) +from CPAC.utils.interfaces.ants import (ImageMath, + ResampleImageBySpacing, + AI, + ThresholdImage) from CPAC.utils.interfaces.fixes import ( FixHeaderRegistration as Registration, - FixHeaderApplyTransforms as ApplyTransforms, -) + FixHeaderApplyTransforms as ApplyTransforms) from CPAC.utils.interfaces.utils import CopyXForm @@ -114,8 +112,9 @@ def init_brain_extraction_wf(tpl_target_path, Estimated peak memory consumption of the most hungry nodes in the workflow bids_suffix : str - Sequence type of the first input image. For a list of acceptable values - see https://bids-specification.readthedocs.io/en/latest/\ + Sequence type of the first input image. For a list of + acceptable values see + https://bids-specification.readthedocs.io/en/latest/\ 04-modality-specific-files/01-magnetic-resonance-imaging-data.html#anatomy-imaging-data atropos_refine : bool Enables or disables the whole ATROPOS sub-workflow @@ -146,12 +145,14 @@ def init_brain_extraction_wf(tpl_target_path, computation to a specific region. **Outputs** out_file - Skull-stripped and :abbr:`INU (intensity non-uniformity)`-corrected ``in_files`` + Skull-stripped and :abbr:`INU (intensity non-uniformity) + `-corrected ``in_files`` out_mask Calculated brain mask bias_corrected - The ``in_files`` input images, after :abbr:`INU (intensity non-uniformity)` - correction, before skull-stripping. + The ``in_files`` input images, after + :abbr:`INU (intensity non-uniformity)` correction, before + skull-stripping. bias_image The :abbr:`INU (intensity non-uniformity)` field estimated for each input in ``in_files`` @@ -187,7 +188,8 @@ def init_brain_extraction_wf(tpl_target_path, mem_gb=1.3, mem_x=(3811976743057169 / 302231454903657293676544, 'hdr_file')) - trunc = pe.MapNode(ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256'), + trunc = pe.MapNode(ImageMath(operation='TruncateImageIntensity', + op2='0.01 0.999 256'), name='truncate_images', iterfield=['op1']) inu_n4 = pe.MapNode( N4BiasFieldCorrection( @@ -226,6 +228,7 @@ def init_brain_extraction_wf(tpl_target_path, _ants_version = Registration().version if _ants_version and parseversion(_ants_version) >= Version('2.3.0'): init_aff.inputs.search_grid = (40, (0, 40, 40)) + init_aff_nodes = (init_aff, retry_clone(init_aff)) # Set up spatial normalization settings_file = 'antsBrainExtraction_%s.json' if use_laplacian \ @@ -241,10 +244,12 @@ def init_brain_extraction_wf(tpl_target_path, if _ants_version and parseversion(_ants_version) >= Version('2.2.0'): fixed_mask_trait += 's' + norm_nodes, norm_guardrails = wf.nodes_and_guardrails( + norm, registered='warped_image') + map_brainmask = pe.Node( ApplyTransforms(interpolation='Gaussian', float=True), - name='map_brainmask' - ) + name='map_brainmask') map_brainmask.inputs.input_image = str(tpl_mask_path) thr_brainmask = pe.Node(ThresholdImage( @@ -267,24 +272,34 @@ def init_brain_extraction_wf(tpl_target_path, n_procs=omp_nthreads, name='inu_n4_final', iterfield=['input_image']) # Apply mask - apply_mask = pe.MapNode(ApplyMask(), iterfield=['in_file'], name='apply_mask') - + apply_mask = pe.MapNode(ApplyMask(), iterfield=['in_file'], + name='apply_mask') + + wf.connect_retries(init_aff_nodes, [ + (inputnode, 'in_mask', 'fixed_image_mask'), + (res_tmpl, 'output_image', 'fixed_image'), + (res_target, 'output_image', 'moving_image')]) + for i, node in enumerate(norm_nodes): + wf.connect(init_aff_nodes[i], node, [ + ('output_transform', 'initial_moving_transform')]) + wf.connect_retries(norm_nodes, [ + (inputnode, norm, 'in_mask', fixed_mask_trait)]) + norm_rtransforms = guardrail_selection(wf, *norm_nodes, + 'reverse_transforms', + norm_guardrails[0]) + norm_rinvert_flags = guardrail_selection(wf, *norm_nodes, + 'reverse_invert_flags', + norm_guardrails[0]) wf.connect([ (inputnode, trunc, [('in_files', 'op1')]), (inputnode, copy_xform, [(('in_files', _pop), 'hdr_file')]), (inputnode, inu_n4_final, [('in_files', 'input_image')]), - (inputnode, init_aff, [('in_mask', 'fixed_image_mask')]), - (inputnode, norm, [('in_mask', fixed_mask_trait)]), (inputnode, map_brainmask, [(('in_files', _pop), 'reference_image')]), (trunc, inu_n4, [('output_image', 'input_image')]), - (inu_n4, res_target, [ - (('output_image', _pop), 'input_image')]), - (res_tmpl, init_aff, [('output_image', 'fixed_image')]), - (res_target, init_aff, [('output_image', 'moving_image')]), - (init_aff, norm, [('output_transform', 'initial_moving_transform')]), - (norm, map_brainmask, [ - ('reverse_transforms', 'transforms'), - ('reverse_invert_flags', 'invert_transform_flags')]), + (inu_n4, res_target, [(('output_image', _pop), 'input_image')]), + (norm_rtransforms, map_brainmask, [('out', 'transforms')]), + (norm_rinvert_flags, map_brainmask, [ + ('out', 'invert_transform_flags')]), (map_brainmask, thr_brainmask, [('output_image', 'input_image')]), (thr_brainmask, dil_brainmask, [('output_image', 'op1')]), (dil_brainmask, get_brainmask, [('output_image', 'op1')]), @@ -294,12 +309,10 @@ def init_brain_extraction_wf(tpl_target_path, (apply_mask, copy_xform, [('out_file', 'out_file')]), (inu_n4_final, copy_xform, [('output_image', 'bias_corrected'), ('bias_image', 'bias_image')]), - (copy_xform, outputnode, [ - ('out_file', 'out_file'), - ('out_mask', 'out_mask'), - ('bias_corrected', 'bias_corrected'), - ('bias_image', 'bias_image')]), - ]) + (copy_xform, outputnode, [('out_file', 'out_file'), + ('out_mask', 'out_mask'), + ('bias_corrected', 'bias_corrected'), + ('bias_image', 'bias_image')])]) if use_laplacian: lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='1.5 1'), @@ -311,29 +324,28 @@ def init_brain_extraction_wf(tpl_target_path, mrg_tmpl.inputs.in1 = tpl_target_path mrg_target = pe.Node(niu.Merge(2), name='mrg_target') wf.connect([ - (inu_n4, lap_target, [ - (('output_image', _pop), 'op1')]), + (inu_n4, lap_target, [(('output_image', _pop), 'op1')]), (lap_tmpl, mrg_tmpl, [('output_image', 'in2')]), (inu_n4, mrg_target, [('output_image', 'in1')]), - (lap_target, mrg_target, [('output_image', 'in2')]), - (mrg_tmpl, norm, [('out', 'fixed_image')]), - (mrg_target, norm, [('out', 'moving_image')]), - ]) + (lap_target, mrg_target, [('output_image', 'in2')])]) + wf.connect_retries(norm_nodes, [(mrg_tmpl, 'out', 'fixed_image'), + (mrg_target, 'out', 'moving_image')]) + wf.connect_retries(norm_guardrails, [(mrg_tmpl, 'out', 'reference')]) else: - norm.inputs.fixed_image = tpl_target_path - wf.connect([ - (inu_n4, norm, [ - (('output_image', _pop), 'moving_image')]), - ]) + for i, node in enumerate(norm_nodes): + node.inputs.fixed_image = tpl_target_path + norm_guardrails[i].inputs.reference = tpl_target_path + wf.connect_retries(norm_nodes, [ + (inu_n4, ('output_image', _pop), 'moving_image')]) if atropos_refine: - atropos_model = atropos_model or list(ATROPOS_MODELS[bids_suffix].values()) + atropos_model = atropos_model or list( + ATROPOS_MODELS[bids_suffix].values()) atropos_wf = init_atropos_wf( use_random_seed=atropos_use_random_seed, omp_nthreads=omp_nthreads, mem_gb=mem_gb, - in_segmentation_model=atropos_model, - ) + in_segmentation_model=atropos_model) sel_wm = pe.Node(niu.Select(index=atropos_model[-1] - 1), name='sel_wm', run_without_submitting=True, @@ -343,24 +355,19 @@ def init_brain_extraction_wf(tpl_target_path, wf.disconnect([ (get_brainmask, apply_mask, [('output_image', 'mask_file')]), - (copy_xform, outputnode, [('out_mask', 'out_mask')]), - ]) + (copy_xform, outputnode, [('out_mask', 'out_mask')])]) wf.connect([ - (inu_n4, atropos_wf, [ - ('output_image', 'inputnode.in_files')]), + (inu_n4, atropos_wf, [('output_image', 'inputnode.in_files')]), (thr_brainmask, atropos_wf, [ ('output_image', 'inputnode.in_mask')]), (get_brainmask, atropos_wf, [ ('output_image', 'inputnode.in_mask_dilated')]), (atropos_wf, sel_wm, [('outputnode.out_tpms', 'inlist')]), (sel_wm, inu_n4_final, [('out', 'weight_image')]), - (atropos_wf, apply_mask, [ - ('outputnode.out_mask', 'mask_file')]), - (atropos_wf, outputnode, [ - ('outputnode.out_mask', 'out_mask'), - ('outputnode.out_segm', 'out_segm'), - ('outputnode.out_tpms', 'out_tpms')]), - ]) + (atropos_wf, apply_mask, [('outputnode.out_mask', 'mask_file')]), + (atropos_wf, outputnode, [('outputnode.out_mask', 'out_mask'), + ('outputnode.out_segm', 'out_segm'), + ('outputnode.out_tpms', 'out_tpms')])]) return wf From eed56b3cebbf351bfbf5dd83485d8bf2d938cbac Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 17:08:00 +0000 Subject: [PATCH 60/72] :wrench: Add `fail_fast` configuration option to expose Nipype's `stop_on_first_crash` Co-authored-by: Greg Kiar --- CHANGELOG.md | 1 + CPAC/pipeline/cpac_pipeline.py | 6 ++--- CPAC/pipeline/schema.py | 26 +++++++++++++++++++ .../configs/pipeline_config_blank.yml | 3 ++- .../configs/pipeline_config_default.yml | 2 ++ .../configs/pipeline_config_rbc-options.yml | 2 ++ CPAC/utils/utils.py | 2 ++ dev/docker_data/run.py | 6 +++++ 8 files changed, 44 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d58af9559..e1eb4d6977 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Added the ability to use TotalReadoutTime of epi field maps in the calculation of FSL topup distortion correction. - Added ability to set minimum quality measure thresholds to all registration steps - Difference method (``-``) for ``CPAC.utils.configuration.Configuration`` instances +- Added ``fail_fast`` configuration setting and CLI flag ### Changed - Added a level of depth to `working` directories to match `log` and `output` directory structure diff --git a/CPAC/pipeline/cpac_pipeline.py b/CPAC/pipeline/cpac_pipeline.py index 432e9ab0d4..993473c1d6 100644 --- a/CPAC/pipeline/cpac_pipeline.py +++ b/CPAC/pipeline/cpac_pipeline.py @@ -278,9 +278,9 @@ def run_workflow(sub_dict, c, run, pipeline_timing_info=None, p_name=None, }, 'execution': { 'crashfile_format': 'txt', - 'resource_monitor_frequency': 0.2 - } - }) + 'resource_monitor_frequency': 0.2, + 'stop_on_first_crash': c['pipeline_setup', 'system_config', + 'fail_fast']}}) config.enable_resource_monitor() logging.update_logging(config) diff --git a/CPAC/pipeline/schema.py b/CPAC/pipeline/schema.py index 969c0b02f2..e19ffaa094 100644 --- a/CPAC/pipeline/schema.py +++ b/CPAC/pipeline/schema.py @@ -25,6 +25,7 @@ from CPAC import docs_prefix from CPAC.pipeline.random_state.seed import MAX_SEED from CPAC.utils.datatypes import ListFromItem +from CPAC.utils.utils import YAML_BOOLS # 1 or more digits, optional decimal, 'e', optional '-', 1 or more digits scientific_notation_str_regex = r'^([0-9]+(\.[0-9]*)*(e)-{0,1}[0-9]+)*$' @@ -37,6 +38,30 @@ r'(x[0-9]+(\.[0-9]*){0,1}[a-z]*)*$' Number = Any(float, int, All(str, Match(scientific_notation_str_regex))) + + +def str_to_bool1_1(x): # pylint: disable=invalid-name + '''Convert strings to Booleans for YAML1.1 syntax + Ref https://yaml.org/type/bool.html + Parameters + ---------- + x : any + Returns + ------- + bool + ''' + if isinstance(x, str): + try: + x = float(x) + if x == 0: + return False + except ValueError: + pass + x = (True if str(x).lower() in YAML_BOOLS[True] else + False if str(x).lower() in YAML_BOOLS[False] else x) + return bool(x) + + forkable = All(Coerce(ListFromItem), [bool], Length(max=2)) valid_options = { 'acpc': { @@ -244,6 +269,7 @@ def sanitize(filename): 'path': Maybe(str), }, 'system_config': { + 'fail_fast': bool, 'FSLDIR': Maybe(str), 'on_grid': { 'run': bool, diff --git a/CPAC/resources/configs/pipeline_config_blank.yml b/CPAC/resources/configs/pipeline_config_blank.yml index 1b47e0681a..6a86f9a83f 100644 --- a/CPAC/resources/configs/pipeline_config_blank.yml +++ b/CPAC/resources/configs/pipeline_config_blank.yml @@ -11,7 +11,7 @@ pipeline_setup: # Name for this pipeline configuration - useful for identification. pipeline_name: cpac-blank-template - + output_directory: # Directory where C-PAC should write out processed data, logs, and crash reports. @@ -80,6 +80,7 @@ pipeline_setup: path: /output/crash system_config: + fail_fast: Off # Select Off if you intend to run CPAC on a single machine. # If set to On, CPAC will attempt to submit jobs through the job scheduler / resource manager selected below. diff --git a/CPAC/resources/configs/pipeline_config_default.yml b/CPAC/resources/configs/pipeline_config_default.yml index 3a4be49e18..dc1f8062e8 100644 --- a/CPAC/resources/configs/pipeline_config_default.yml +++ b/CPAC/resources/configs/pipeline_config_default.yml @@ -98,6 +98,8 @@ pipeline_setup: path: /outputs/crash system_config: + # Stop worklow execution on first crash? + fail_fast: Off # Random seed used to fix the state of execution. # If unset, each process uses its own default. diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index 0385026393..b79f9a8cda 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -15,6 +15,8 @@ pipeline_setup: system_config: + fail_fast: Off + # Random seed used to fix the state of execution. # If unset, each process uses its own default. # If set, a `random.log` file will be generated logging the random seed and each node to which that seed was applied. diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index 1bd6953a40..fffc272a48 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -36,6 +36,8 @@ CONFIGS_DIR, '1.7-1.8-nesting-mappings.yml'), 'r')) NESTED_CONFIG_DEPRECATIONS = yaml.safe_load(open(os.path.join( CONFIGS_DIR, '1.7-1.8-deprecations.yml'), 'r')) +YAML_BOOLS = {True: ('on', 't', 'true', 'y', 'yes'), + False: ('f', 'false', 'n', 'no', 'off')} def get_last_prov_entry(prov): diff --git a/dev/docker_data/run.py b/dev/docker_data/run.py index 0d0d549516..b7379e0bbf 100755 --- a/dev/docker_data/run.py +++ b/dev/docker_data/run.py @@ -28,6 +28,7 @@ from CPAC import license_notice, __version__ from CPAC.pipeline import AVAILABLE_PIPELINE_CONFIGS from CPAC.pipeline.random_state import set_up_random_state +from CPAC.pipeline.schema import str_to_bool1_1 from CPAC.utils.bids_utils import create_cpac_data_config, \ load_cpac_data_config, \ load_yaml_config, \ @@ -252,6 +253,8 @@ def run_main(): help='Disable file logging, this is useful for ' 'clusters that have disabled file locking.', default=False) + parser.add_argument('--fail_fast', type=str.title, + help='# Stop worklow execution on first crash?') parser.add_argument('--participant_label', help='The label of the participant that should be ' @@ -612,6 +615,9 @@ def run_main(): 'Either change the output directory to something ' 'local or turn off the --save_working_dir flag', category=UserWarning) + if args.fail_fast is not None: + c['pipeline_setup', 'system_config', + 'fail_fast'] = str_to_bool1_1(args.fail_fast) if c['pipeline_setup']['output_directory']['quality_control'][ 'generate_xcpqc_files']: From 5cc28cfa3124b960d7becd52e6803d3cba83e4fe Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 18:46:37 +0000 Subject: [PATCH 61/72] :fire: Remove prototype guardrail definitions --- CPAC/registration/guardrails.py | 119 +------------------------------- 1 file changed, 3 insertions(+), 116 deletions(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index 5839a2c8d9..e1eb1b8a76 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -16,11 +16,9 @@ # License along with C-PAC. If not, see . """Guardrails to protect against bad registrations""" import logging -from typing import Tuple, Union -from copy import deepcopy -from nipype.interfaces.ants import Registration -from nipype.interfaces.fsl import FLIRT +from typing import Tuple from nipype.interfaces.utility import Function, Merge, Select +# pylint: disable=unused-import from CPAC.pipeline.nipype_pipeline_engine import Node, Workflow from CPAC.pipeline.random_state.seed import increment_seed from CPAC.qc import qc_masks, registration_guardrail_thresholds @@ -28,10 +26,6 @@ from CPAC.registration.utils import hardcoded_reg from CPAC.utils.docs import retry_docstring -_SPEC_KEYS = { - FLIRT: {'reference': 'reference', 'registered': 'out_file'}, - Registration: {'reference': 'reference', 'registered': 'out_file'}} - def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', output_key: str = 'registered', @@ -164,39 +158,6 @@ def registration_guardrail_node(name=None, retry_num=0): return node -def registration_guardrail_workflow(registration_node, retry=True): - """A workflow to handle hitting a registration guardrail - - Parameters - ---------- - name : str - - registration_node : Node - - retry : bool, optional - - Returns - ------- - Workflow - """ - name = f'{registration_node.name}_guardrail' - wf = Workflow(name=f'{name}_wf') - outputspec = deepcopy(registration_node.outputs) - guardrail = registration_guardrail_node(name) - outkey = spec_key(registration_node, 'registered') - wf.connect([ - (registration_node, guardrail, [ - (spec_key(registration_node, 'reference'), 'reference')]), - (registration_node, guardrail, [(outkey, 'registered')])]) - if retry: - wf = retry_registration(wf, registration_node, - guardrail.outputs.registered)[0] - else: - wf.connect(guardrail, 'registered', outputspec, outkey) - # connect_from_spec(outputspec, registration_node, outkey) - return wf - - def retry_clone(node: 'Node') -> 'Node': """Function to clone a node, name the clone, and increment its random seed @@ -212,42 +173,7 @@ def retry_clone(node: 'Node') -> 'Node': return increment_seed(node.clone(f'retry_{node.name}')) -def retry_registration(wf, registration_node, registered): - """Function conditionally retry registration if previous attempt failed - - Parameters - ---------- - wf : Workflow - - registration_node : Node - - registered : str - - Returns - ------- - Workflow - - Node - """ - name = f'retry_{registration_node.name}' - retry_node = Node(Function(function=retry_registration_node, - inputs=['registered', 'registration_node'], - outputs=['registered']), name=name) - retry_node.inputs.registration_node = registration_node - inputspec = registration_node.inputs - outputspec = registration_node.outputs - outkey = spec_key(registration_node, 'registered') - guardrail = registration_guardrail_node(f'{name}_guardrail') - # connect_from_spec(inputspec, retry_node) - wf.connect([ - (inputspec, guardrail, [ - (spec_key(retry_node, 'reference'), 'reference')]), - (retry_node, guardrail, [(outkey, 'registered')]), - (guardrail, outputspec, [('registered', outkey)])]) - # connect_from_spec(retry_node, outputspec, registered) - return wf, retry_node - - +# pylint: disable=missing-function-docstring,too-many-arguments @retry_docstring(hardcoded_reg) def retry_hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, ants_para, moving_mask=None, @@ -259,42 +185,3 @@ def retry_hardcoded_reg(moving_brain, reference_brain, moving_skull, reference_skull, ants_para, moving_mask, reference_mask, fixed_image_mask, interp, reg_with_skull) - - -def retry_registration_node(registered, registration_node): - """Retry registration if previous attempt failed - - Parameters - ---------- - registered : str - - registration_node : Node - - Returns - ------- - Node - """ - from CPAC.pipeline.random_state.seed import increment_seed - if registered.endswith('-failed'): - retry_node = increment_seed(registration_node.clone( - name=f'{registration_node.name}-retry')) - return retry_node - return registration_node - - -def spec_key(interface, guardrail_key): - """Function to get the canonical key to connect to a guardrail - - Parameters - ---------- - interface : Interface or Node - - guardrail_key : str - - Returns - ------- - str - """ - if isinstance(interface, Node): - interface = interface.interface - return _SPEC_KEYS.get(interface, {}).get(guardrail_key, guardrail_key) From 50261d41f1a9364d58041644d0baa67efb19adad Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 19:08:22 +0000 Subject: [PATCH 62/72] :memo: Expand guardrail docstrings --- CPAC/registration/guardrails.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/CPAC/registration/guardrails.py b/CPAC/registration/guardrails.py index e1eb1b8a76..1ba3db6d91 100644 --- a/CPAC/registration/guardrails.py +++ b/CPAC/registration/guardrails.py @@ -31,7 +31,25 @@ def guardrail_selection(wf: 'Workflow', node1: 'Node', node2: 'Node', output_key: str = 'registered', guardrail_node: 'Node' = None) -> Node: """Generate requisite Nodes for choosing a path through the graph - with retries + with retries. + + Takes two nodes to choose an output from. These nodes are assumed + to be guardrail nodes if `output_key` and `guardrail_node` are not + specified. + + A ``nipype.interfaces.utility.Merge`` is generated, connecting + ``output_key`` from ``node1`` and ``node2`` in that order. + + A ``nipype.interfaces.utility.Select`` node is generated taking the + output from the generated ``Merge`` and using the ``failed_qc`` + output of ``guardrail_node`` (``node1`` if ``guardrail_node`` is + unspecified). + + All relevant connections are made in the given Workflow. + + The ``Select`` node is returned; its output is keyed ``out`` and + contains the value of the given ``output_key`` (``registered`` if + unspecified). Parameters ---------- @@ -102,6 +120,9 @@ def registration_guardrail(registered: str, reference: str, failed_qc : int metrics met specified thresholds?, used as index for selecting outputs + .. seealso:: + + :py:mod:`guardrail_selection` """ logger = logging.getLogger('nipype.workflow') qc_metrics = qc_masks(registered, reference) From d4b7d0247fe21133089efcf833d2a678f99c525c Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 19:24:25 +0000 Subject: [PATCH 63/72] :goal_net: Guardrail `distcor_blip_afni_qwarp` --- CPAC/anat_preproc/anat_preproc.py | 6 ++-- CPAC/anat_preproc/ants.py | 6 ++-- .../distortion_correction.py | 35 ++++++++++--------- 3 files changed, 24 insertions(+), 23 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 4cec796781..094599baa5 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -96,11 +96,9 @@ def acpc_alignment(config=None, acpc_target='whole-head', mask=False, name='anat_acpc_2_fov_convertxfm') convert_fov_xfm.inputs.invert_xfm = True - preproc.connect(fov, fov_mtx, - convert_fov_xfm, 'in_file') + preproc.connect(fov, fov_mtx, convert_fov_xfm, 'in_file') - align = pe.Node(interface=fsl.FLIRT(), - name='anat_acpc_3_flirt') + align = pe.Node(interface=fsl.FLIRT(), name='anat_acpc_3_flirt') align.inputs.interp = 'spline' align.inputs.searchr_x = [30, 30] align.inputs.searchr_y = [30, 30] diff --git a/CPAC/anat_preproc/ants.py b/CPAC/anat_preproc/ants.py index 41d5b6f60e..868b981ac2 100644 --- a/CPAC/anat_preproc/ants.py +++ b/CPAC/anat_preproc/ants.py @@ -280,10 +280,10 @@ def init_brain_extraction_wf(tpl_target_path, (res_tmpl, 'output_image', 'fixed_image'), (res_target, 'output_image', 'moving_image')]) for i, node in enumerate(norm_nodes): - wf.connect(init_aff_nodes[i], node, [ - ('output_transform', 'initial_moving_transform')]) + wf.connect(init_aff_nodes[i], 'output_transform', + node, 'initial_moving_transform') wf.connect_retries(norm_nodes, [ - (inputnode, norm, 'in_mask', fixed_mask_trait)]) + (inputnode, 'in_mask', fixed_mask_trait)]) norm_rtransforms = guardrail_selection(wf, *norm_nodes, 'reverse_transforms', norm_guardrails[0]) diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index d47398438e..b497596c98 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -20,21 +20,19 @@ import subprocess import nibabel as nb - +# pylint: disable=ungrouped-imports,wrong-import-order from CPAC.pipeline import nipype_pipeline_engine as pe -from nipype.interfaces import afni, fsl -import nipype.interfaces.utility as util -import nipype.interfaces.ants as ants -import nipype.interfaces.afni.preprocess as preprocess -import nipype.interfaces.afni.utils as afni_utils +from nipype.interfaces import afni, ants, fsl, utility as util +from nipype.interfaces.afni import preprocess, utils as afni_utils -from CPAC.pipeline.engine import wrap_block +# from CPAC.pipeline.engine import wrap_block +from CPAC.registration.guardrails import guardrail_selection from CPAC.utils import function from CPAC.utils.interfaces.function import Function from CPAC.utils.datasource import match_epi_fmaps -from CPAC.func_preproc.func_preproc import bold_mask_afni, bold_masking +# from CPAC.func_preproc.func_preproc import bold_mask_afni, bold_masking from CPAC.distortion_correction.utils import run_convertwarp, \ phase_encode, \ @@ -398,17 +396,22 @@ def distcor_blip_afni_qwarp(wf, cfg, strat_pool, pipe_num, opt=None): func_edge_detect.inputs.expr = 'a*b' func_edge_detect.inputs.outputtype = 'NIFTI_GZ' - - wf.connect(match_epi_fmaps_node, 'opposite_pe_epi', func_edge_detect, 'in_file_a') + + wf.connect(match_epi_fmaps_node, 'opposite_pe_epi', + func_edge_detect, 'in_file_a') wf.connect(func_get_brain_mask, 'out_file', func_edge_detect, 'in_file_b') - opp_pe_to_func = pe.Node(interface=fsl.FLIRT(), name='opp_pe_to_func') - opp_pe_to_func.inputs.cost = 'corratio' - - wf.connect(func_edge_detect, 'out_file', opp_pe_to_func, 'in_file') + _opp_pe_to_func = pe.Node(interface=fsl.FLIRT(), name='opp_pe_to_func') + _opp_pe_to_func.inputs.cost = 'corratio' + optf_nodes, optf_guardrails = wf.nodes_and_guardrails( + _opp_pe_to_func, registered='out_file') node, out = strat_pool.get_data('desc-mean_bold') - wf.connect(node, out, opp_pe_to_func, 'reference') + wf.connect_retries(optf_nodes, [(func_edge_detect, 'out_file', 'in_file'), + (node, out, 'reference')]) + wf.connect_retries(optf_guardrails, [ + (node, out, 'reference')]) + opp_pe_to_func = guardrail_selection(wf, *optf_guardrails) prep_qwarp_input_imports = ['import os', 'import subprocess'] prep_qwarp_input = \ @@ -433,7 +436,7 @@ def distcor_blip_afni_qwarp(wf, cfg, strat_pool, pipe_num, opt=None): imports=calculate_blip_warp_imports), name='calc_blip_warp') - wf.connect(opp_pe_to_func, 'out_file', calc_blip_warp, 'opp_pe') + wf.connect(opp_pe_to_func, 'out', calc_blip_warp, 'opp_pe') wf.connect(prep_qwarp_input, 'qwarp_input', calc_blip_warp, 'same_pe') convert_afni_warp_imports = ['import os', 'import nibabel as nb'] From 8f917a7e4b02344d22fcdb9cfc83862bd4f73875 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 19:31:48 +0000 Subject: [PATCH 64/72] :goal_net: Guardrail 'distcor_blip_fsl_topup' --- .../distortion_correction.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index b497596c98..4b981678e9 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -715,21 +715,27 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): flirt.inputs.dof = 6 flirt.inputs.interp = 'spline' flirt.inputs.out_matrix_file = 'SBRef2PhaseTwo_gdc.mat' + flirt_nodes, flirt_guardrails = wf.nodes_and_guardrails( + flirt, registered='out_file') - wf.connect(mean_bold.node, mean_bold.out, flirt, 'in_file') - wf.connect(choose_phase, 'out_phase_image', flirt, 'reference') + wf.connect_retries(flirt_nodes, [ + (mean_bold.node, mean_bold.out, 'in_file'), + (choose_phase, 'out_phase_image', 'reference')]) + wf.connect_retries(flirt_guardrails, [ + (choose_phase, 'out_phase_image', 'reference')]) + flirt_matrix = guardrail_selection(wf, *flirt_nodes, 'out_matrix_file', + flirt_guardrails[0]) # fsl_convert_xfm convert_xfm = pe.Node(interface=fsl.ConvertXFM(), name="convert_xfm") convert_xfm.inputs.concat_xfm = True convert_xfm.inputs.out_file = 'SBRef2WarpField.mat' - wf.connect(flirt, 'out_matrix_file', convert_xfm, 'in_file') + wf.connect(flirt_matrix, 'out', convert_xfm, 'in_file') wf.connect(vnum_base, 'out_motion_mat', convert_xfm, 'in_file2') # fsl_convert_warp - convert_warp = pe.Node(interface=fsl.ConvertWarp(), - name="convert_warp") + convert_warp = pe.Node(interface=fsl.ConvertWarp(), name="convert_warp") convert_warp.inputs.relwarp = True convert_warp.inputs.out_relwarp = True convert_warp.inputs.out_file = 'WarpField.nii.gz' From 744411f9289eaa463873ccde1f7e08236fa55818 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 19:42:53 +0000 Subject: [PATCH 65/72] :goal_net: Guardrail `linear_reg_func_to_anat` --- CPAC/func_preproc/func_preproc.py | 54 ++++++++++++++++--------------- 1 file changed, 28 insertions(+), 26 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index c8661b7e8c..f03e4fb852 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1,6 +1,8 @@ # pylint: disable=ungrouped-imports,wrong-import-order,wrong-import-position from nipype import logging from nipype.interfaces import afni, ants, fsl, utility as util + +from CPAC.registration.guardrails import guardrail_selection logger = logging.getLogger('nipype.workflow') from CPAC.pipeline import nipype_pipeline_engine as pe from nipype.interfaces.afni import preprocess @@ -104,20 +106,23 @@ def anat_refined_mask(init_bold_mask=True, wf_name='init_bold_mask'): name='func_to_anat_linear_reg') linear_reg_func_to_anat.inputs.cost = 'mutualinfo' linear_reg_func_to_anat.inputs.dof = 6 - - wf.connect(func_tmp_brain, 'out_file', - linear_reg_func_to_anat, 'in_file') - - wf.connect(input_node, 'anat_brain', - linear_reg_func_to_anat, 'reference') + linear_reg_nodes, linear_reg_guardrails = wf.nodes_and_guardrails( + linear_reg_func_to_anat, registered='out_file') + wf.connect_retries(linear_reg_nodes, [ + (func_tmp_brain, 'out_file', 'in_file'), + (input_node, 'anat_brain', 'reference')]) + wf.connect_retries(linear_reg_guardrails, [ + (input_node, 'anat_brain', 'reference')]) + linear_reg_matrix = guardrail_selection(wf, *linear_reg_nodes, + 'out_matrix_file', + linear_reg_guardrails[0]) # 3.2 Inverse func to anat affine inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), name='inv_func2anat_affine') inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect(linear_reg_matrix, 'out', inv_func_to_anat_affine, 'in_file') # 4. anat mask to func space # Transform anatomical mask to functional space to get BOLD mask @@ -127,32 +132,29 @@ def anat_refined_mask(init_bold_mask=True, wf_name='init_bold_mask'): reg_anat_mask_to_func.inputs.cost = 'mutualinfo' reg_anat_mask_to_func.inputs.dof = 6 reg_anat_mask_to_func.inputs.interp = 'nearestneighbour' - - wf.connect(input_node, 'anatomical_brain_mask', - reg_anat_mask_to_func, 'in_file') - - wf.connect(func_tmp_brain, 'out_file', - reg_anat_mask_to_func, 'reference') - - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_mask_to_func, 'in_matrix_file') + ramtf_nodes, ramtf_guardrails = wf.nodes_and_guardrails( + reg_anat_mask_to_func, registered='out_file') + wf.connect_retries(ramtf_nodes, [ + (input_node, 'anatomical_brain_mask', 'in_file'), + (func_tmp_brain, 'out_file', 'reference'), + (inv_func_to_anat_affine, 'out_file', 'in_matrix_file')]) + wf.connect_retries(ramtf_guardrails, [ + (func_tmp_brain, 'out_file', 'reference')]) + # pylint: disable=no-value-for-parameter + anat_mask_to_func = guardrail_selection(wf, *ramtf_guardrails) # 5. get final func mask: refine func tmp mask with anat_mask_in_func mask func_mask = pe.Node(interface=fsl.MultiImageMaths(), name='func_mask') func_mask.inputs.op_string = "-mul %s" - wf.connect(reg_anat_mask_to_func, 'out_file', - func_mask, 'operand_files') + wf.connect(anat_mask_to_func, 'out', func_mask, 'operand_files') - if init_bold_mask == True: - wf.connect(func_tmp_brain_mask_dil, 'out_file', - func_mask, 'in_file') + if init_bold_mask is True: + wf.connect(func_tmp_brain_mask_dil, 'out_file', func_mask, 'in_file') else: - wf.connect(input_node, 'init_func_brain_mask', - func_mask, 'in_file') + wf.connect(input_node, 'init_func_brain_mask', func_mask, 'in_file') - wf.connect(func_mask, 'out_file', - output_node, 'func_brain_mask') + wf.connect(func_mask, 'out_file', output_node, 'func_brain_mask') return wf From 51051d9544d92a31a9d893e62c170324434a4180 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 20:11:55 +0000 Subject: [PATCH 66/72] :goal_net: Guardrail `anat_refined_mask` --- CPAC/func_preproc/func_preproc.py | 69 +++++++++++++++---------------- 1 file changed, 33 insertions(+), 36 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index f03e4fb852..286278084e 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -158,9 +158,11 @@ def anat_refined_mask(init_bold_mask=True, wf_name='init_bold_mask'): return wf + def anat_based_mask(wf_name='bold_mask'): -# reference DCAN lab BOLD mask -# https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh + # pylint: disable=line-too-long + # reference DCAN lab BOLD mask + # https://github.com/DCAN-Labs/DCAN-HCP/blob/master/fMRIVolume/scripts/DistortionCorrectionAndEPIToT1wReg_FLIRTBBRAndFreeSurferBBRbased.sh wf = pe.Workflow(name=wf_name) input_node = pe.Node(util.IdentityInterface(fields=['func', @@ -169,11 +171,11 @@ def anat_based_mask(wf_name='bold_mask'): name='inputspec') output_node = pe.Node(util.IdentityInterface(fields=['func_brain_mask']), - name='outputspec') + name='outputspec') - # 0. Take single volume of func + # 0. Take single volume of func func_single_volume = pe.Node(interface=afni.Calc(), - name='func_single_volume') + name='func_single_volume') func_single_volume.inputs.set( expr='a', @@ -181,59 +183,54 @@ def anat_based_mask(wf_name='bold_mask'): outputtype='NIFTI_GZ' ) - wf.connect(input_node, 'func', - func_single_volume, 'in_file_a') + wf.connect(input_node, 'func', func_single_volume, 'in_file_a') # 1. Register func head to anat head to get func2anat matrix linear_reg_func_to_anat = pe.Node(interface=fsl.FLIRT(), - name='func_to_anat_linear_reg') + name='func_to_anat_linear_reg') linear_reg_func_to_anat.inputs.dof = 6 linear_reg_func_to_anat.inputs.interp = 'spline' linear_reg_func_to_anat.inputs.searchr_x = [30, 30] linear_reg_func_to_anat.inputs.searchr_y = [30, 30] linear_reg_func_to_anat.inputs.searchr_z = [30, 30] + func_to_anat_nodes, func_to_anat_guardrails = wf.nodes_and_guardrails( + linear_reg_func_to_anat, registered='out_file') + wf.connect_retries(func_to_anat_nodes, [ + (func_single_volume, 'out_file', 'in_file'), + (input_node, 'anat_head', 'reference')]) + wf.connect_retries(func_to_anat_guardrails, [ + (input_node, 'anat_head', 'reference')]) + func_to_anat_matrix = guardrail_selection(wf, *func_to_anat_nodes, + 'out_matrix_file', + func_to_anat_guardrails[0]) - wf.connect(func_single_volume, 'out_file', - linear_reg_func_to_anat, 'in_file') - - wf.connect(input_node, 'anat_head', - linear_reg_func_to_anat, 'reference') - - # 2. Inverse func to anat affine, to get anat-to-func transform + # 2. Inverse func to anat affine, to get anat-to-func transform inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), - name='inv_func2anat_affine') + name='inv_func2anat_affine') inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect(func_to_anat_matrix, 'out', inv_func_to_anat_affine, 'in_file') # 3. get BOLD mask - # 3.1 Apply anat-to-func transform to transfer anatomical brain to functional space + # 3.1 Apply anat-to-func transform to transfer anatomical brain to + # functional space reg_anat_brain_to_func = pe.Node(interface=fsl.ApplyWarp(), - name='reg_anat_brain_to_func') + name='reg_anat_brain_to_func') reg_anat_brain_to_func.inputs.interp = 'nn' reg_anat_brain_to_func.inputs.relwarp = True - wf.connect(input_node, 'anat_brain', - reg_anat_brain_to_func, 'in_file') - - wf.connect(input_node, 'func', - reg_anat_brain_to_func, 'ref_file') - - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'premat') + wf.connect(input_node, 'anat_brain', reg_anat_brain_to_func, 'in_file') + wf.connect(input_node, 'func', reg_anat_brain_to_func, 'ref_file') + wf.connect(inv_func_to_anat_affine, 'out_file', + reg_anat_brain_to_func, 'premat') - # 3.2 Binarize transfered image and fill holes to get BOLD mask. + # 3.2 Binarize transfered image and fill holes to get BOLD mask. # Binarize - func_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name='func_mask') + func_mask_bin = pe.Node(interface=fsl.ImageMaths(), name='func_mask') func_mask_bin.inputs.op_string = '-bin' - wf.connect(reg_anat_brain_to_func, 'out_file', - func_mask_bin, 'in_file') - - wf.connect(func_mask_bin, 'out_file', - output_node, 'func_brain_mask') + wf.connect(reg_anat_brain_to_func, 'out_file', func_mask_bin, 'in_file') + wf.connect(func_mask_bin, 'out_file', output_node, 'func_brain_mask') return wf From 3cce8759f4a91bfdfc7a293f33d8dd328455dc41 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 20:31:21 +0000 Subject: [PATCH 67/72] :goal_net: Guardrail `bold_mask_anatomical_based` --- CPAC/func_preproc/func_preproc.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 286278084e..d90952021c 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -1914,20 +1914,25 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): linear_reg_func_to_anat.inputs.searchr_y = [30, 30] linear_reg_func_to_anat.inputs.searchr_z = [30, 30] - wf.connect(func_single_volume, 'out_file', - linear_reg_func_to_anat, 'in_file') + func_to_anat_nodes, func_to_anat_guardrails = wf.nodes_and_guardrails( + linear_reg_func_to_anat, registered='out_file') node, out = strat_pool.get_data(["desc-preproc_T1w", "desc-reorient_T1w", "T1w"]) - wf.connect(node, out, linear_reg_func_to_anat, 'reference') + wf.connect_retries(func_to_anat_nodes, [ + (func_single_volume, 'out_file', 'in_file'), + (node, out, 'reference')]) + wf.connect_retries(func_to_anat_guardrails, [(node, out, 'reference')]) + func_to_anat_matrix = guardrail_selection(wf, *func_to_anat_nodes, + 'out_matrix_file', + func_to_anat_guardrails[0]) # 2. Inverse func to anat affine, to get anat-to-func transform inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), name='inv_func2anat_affine') inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(linear_reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect(func_to_anat_matrix, 'out', inv_func_to_anat_affine, 'in_file') # 3. get BOLD mask # 3.1 Apply anat-to-func transform to transfer anatomical brain to functional space @@ -1939,16 +1944,14 @@ def bold_mask_anatomical_based(wf, cfg, strat_pool, pipe_num, opt=None): node, out = strat_pool.get_data("desc-brain_T1w") wf.connect(node, out, reg_anat_brain_to_func, 'in_file') - node, out = strat_pool.get_data(["desc-preproc_bold", - "bold"]) + node, out = strat_pool.get_data(["desc-preproc_bold", "bold"]) wf.connect(node, out, reg_anat_brain_to_func, 'ref_file') wf.connect(inv_func_to_anat_affine, 'out_file', reg_anat_brain_to_func, 'premat') # 3.2 Binarize transfered image - func_mask_bin = pe.Node(interface=fsl.ImageMaths(), - name='func_mask_bin') + func_mask_bin = pe.Node(interface=fsl.ImageMaths(), name='func_mask_bin') func_mask_bin.inputs.op_string = '-abs -bin' wf.connect(reg_anat_brain_to_func, 'out_file', From cde0c7c77058d0c3f6b84b7a4e68cc778bef911a Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 20:38:59 +0000 Subject: [PATCH 68/72] :goal_net: Guardrail `bold_mask_css` --- CPAC/func_preproc/func_preproc.py | 58 +++++++++++++++---------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index d90952021c..5d50c0eb68 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -2062,7 +2062,6 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): "outputs": ["space-bold_desc-brain_mask", "desc-ROIbrain_bold"]} ''' - # Run 3dAutomask to generate func initial mask func_tmp_brain_mask = pe.Node(interface=preprocess.Automask(), name=f'func_tmp_brain_mask_AFNI_{pipe_num}') @@ -2080,7 +2079,7 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): func_roi.inputs.t_min = 7 func_roi.inputs.t_size = 1 - node, out = strat_pool.get_data(["desc-motion_bold", + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) wf.connect(node, out, func_roi, 'in_file') @@ -2102,49 +2101,56 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): reg_func_to_anat.inputs.cost = 'corratio' reg_func_to_anat.inputs.dof = 6 - wf.connect(func_tmp_brain, 'out_file', - reg_func_to_anat, 'in_file') + func_to_anat_nodes, func_to_anat_guardrails = wf.nodes_and_guardrails( + reg_func_to_anat, registered='out_file') node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_func_to_anat, 'reference') + wf.connect_retries(func_to_anat_nodes, [ + (func_tmp_brain, 'out_file', 'in_file'), + (node, out, 'reference')]) + wf.connect_retries(func_to_anat_guardrails, [(node, out, 'reference')]) + func_to_anat_matrix = guardrail_selection(wf, *func_to_anat_nodes, + 'out_matrix_file', + func_to_anat_guardrails[0]) # Inverse func2anat matrix inv_func_to_anat_affine = pe.Node(interface=fsl.ConvertXFM(), name=f'inv_func2anat_affine_{pipe_num}') inv_func_to_anat_affine.inputs.invert_xfm = True - wf.connect(reg_func_to_anat, 'out_matrix_file', - inv_func_to_anat_affine, 'in_file') + wf.connect(func_to_anat_matrix, 'out', inv_func_to_anat_affine, 'in_file') # Transform anat brain to func space reg_anat_brain_to_func = pe.Node(interface=fsl.FLIRT(), name=f'reg_anat_brain_to_func_{pipe_num}') reg_anat_brain_to_func.inputs.apply_xfm = True reg_anat_brain_to_func.inputs.interp = 'trilinear' + (anat_brain_to_func_nodes, + anat_brain_to_func_guardrails) = wf.nodes_and_guardrails( + reg_anat_brain_to_func, registered='out_file') node, out = strat_pool.get_data("desc-brain_T1w") - wf.connect(node, out, reg_anat_brain_to_func, 'in_file') - - wf.connect(func_roi, 'roi_file', - reg_anat_brain_to_func, 'reference') - - wf.connect(inv_func_to_anat_affine, 'out_file', - reg_anat_brain_to_func, 'in_matrix_file') + wf.connect_retries(anat_brain_to_func_nodes, [ + (node, out, 'in_file'), + (func_roi, 'roi_file', 'reference'), + (inv_func_to_anat_affine, 'out_file', 'in_matrix_file')]) + wf.connect_retries(anat_brain_to_func_guardrails, [ + (func_roi, 'roi_file', 'reference')]) + anat_brain_to_func = guardrail_selection(wf, + *anat_brain_to_func_guardrails) # Binarize and dilate anat brain in func space bin_anat_brain_in_func = pe.Node(interface=fsl.ImageMaths(), name=f'bin_anat_brain_in_func_{pipe_num}') bin_anat_brain_in_func.inputs.op_string = '-bin -dilM' - wf.connect(reg_anat_brain_to_func, 'out_file', - bin_anat_brain_in_func, 'in_file') + wf.connect(anat_brain_to_func, 'out', bin_anat_brain_in_func, 'in_file') # Binarize detectable func signals - bin_func = pe.Node(interface=fsl.ImageMaths(), - name=f'bin_func_{pipe_num}') + bin_func = pe.Node(interface=fsl.ImageMaths(), name=f'bin_func_{pipe_num}') bin_func.inputs.op_string = '-Tstd -bin' - node, out = strat_pool.get_data(["desc-motion_bold", + node, out = strat_pool.get_data(["desc-motion_bold", "desc-preproc_bold", "bold"]) wf.connect(node, out, bin_func, 'in_file') @@ -2164,22 +2170,16 @@ def bold_mask_ccs(wf, cfg, strat_pool, pipe_num, opt=None): intersect_mask.inputs.op_string = '-mul %s -mul %s' intersect_mask.inputs.output_datatype = 'char' - wf.connect(bin_func, 'out_file', - intersect_mask, 'in_file') - - wf.connect(merge_func_mask, 'out', - intersect_mask, 'operand_files') + wf.connect(bin_func, 'out_file', intersect_mask, 'in_file') + wf.connect(merge_func_mask, 'out', intersect_mask, 'operand_files') # this is the func input for coreg in ccs # TODO evaluate if it's necessary to use this brain example_func_brain = pe.Node(interface=fsl.maths.ApplyMask(), name=f'get_example_func_brain_{pipe_num}') - wf.connect(func_roi, 'roi_file', - example_func_brain, 'in_file') - - wf.connect(intersect_mask, 'out_file', - example_func_brain, 'mask_file') + wf.connect(func_roi, 'roi_file', example_func_brain, 'in_file') + wf.connect(intersect_mask, 'out_file', example_func_brain, 'mask_file') outputs = { 'space-bold_desc-brain_mask': (intersect_mask, 'out_file'), From e0e21386beee7afad9c76a596cd60d1f1447aaac Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 20:46:35 +0000 Subject: [PATCH 69/72] :goal_net: Guardrail `dual_regression` --- CPAC/sca/sca.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/CPAC/sca/sca.py b/CPAC/sca/sca.py index 64f6fb37db..cafddf3539 100644 --- a/CPAC/sca/sca.py +++ b/CPAC/sca/sca.py @@ -6,6 +6,7 @@ import nipype.interfaces.io as nio import nipype.interfaces.utility as util +from CPAC.registration.guardrails import guardrail_selection from CPAC.sca.utils import * from CPAC.utils.utils import extract_one_d from CPAC.utils.datasource import resample_func_roi, \ @@ -505,6 +506,9 @@ def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None): 'func_registration_to_template']['FNIRT_pipelines'][ 'identity_matrix'] ) + (spatial_to_native_nodes, + spatial_to_native_guardrails) = wf.nodes_and_guardrails( + resample_spatial_map_to_native_space_for_dr, registered='out_file') spatial_map_dataflow_for_dr = create_spatial_map_dataflow( cfg.seed_based_correlation_analysis['sca_atlases']['DualReg'], @@ -528,18 +532,19 @@ def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None): "space-template_desc-motion_bold", "space-template_desc-preproc_bold", "space-template_bold"]) - wf.connect(node, out, - resample_spatial_map_to_native_space_for_dr, 'reference') wf.connect(node, out, spatial_map_timeseries_for_dr, 'inputspec.subject_rest') - - wf.connect(spatial_map_dataflow_for_dr, 'select_spatial_map.out_file', - resample_spatial_map_to_native_space_for_dr, 'in_file') + wf.connect_retries(spatial_to_native_nodes, [ + (node, out, 'reference'), + (spatial_map_dataflow_for_dr, 'select_spatial_map.out_file', 'in_file') + ]) + wf.connect_retries(spatial_to_native_guardrails, [ + (node, out, 'reference')]) + spatial_to_native = guardrail_selection(wf, *spatial_to_native_guardrails) # connect it to the spatial_map_timeseries - wf.connect(resample_spatial_map_to_native_space_for_dr, 'out_file', - spatial_map_timeseries_for_dr, 'inputspec.spatial_map' - ) + wf.connect(spatial_to_native, 'out', + spatial_map_timeseries_for_dr, 'inputspec.spatial_map') dr_temp_reg = create_temporal_reg(f'temporal_regression_{pipe_num}') dr_temp_reg.inputs.inputspec.normalize = \ From 97fd7fb04e07fca43f0baa6d70a15dec11551918 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 20:53:44 +0000 Subject: [PATCH 70/72] :goal_net: Guardrail `spatial_regression` --- CPAC/seg_preproc/seg_preproc.py | 22 +++++++++++++--------- CPAC/timeseries/timeseries_analysis.py | 16 ++++++++++++---- 2 files changed, 25 insertions(+), 13 deletions(-) diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index e157c52e23..10c4c23b3c 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -3,6 +3,7 @@ from CPAC.anat_preproc.utils import freesurfer_hemispheres, mri_convert from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.registration.guardrails import guardrail_selection from CPAC.registration.registration import apply_transform from CPAC.registration.utils import ( check_transforms, @@ -300,16 +301,19 @@ def tissue_mask_template_to_t1(wf_name, use_ants): tissueprior_mni_to_t1.inputs.apply_xfm = True tissueprior_mni_to_t1.inputs.interp = 'nearestneighbour' - # mni to t1 - preproc.connect(inputNode, 'tissue_mask_template', - tissueprior_mni_to_t1, 'in_file') - preproc.connect(inputNode, 'brain', tissueprior_mni_to_t1, - 'reference') - preproc.connect(inputNode, 'standard2highres_mat', - tissueprior_mni_to_t1, 'in_matrix_file') + mni_to_t1_nodes, mni_to_t1_guardrails = preproc.nodes_and_guardrails( + tissueprior_mni_to_t1, registered='out_file') - preproc.connect(tissueprior_mni_to_t1, 'out_file', - outputNode, 'segment_mask_temp2t1') + # mni to t1 + preproc.connect_retries(mni_to_t1_nodes, [ + (inputNode, 'tissue_mask_template', 'in_file'), + (inputNode, 'brain', 'reference'), + (inputNode, 'standard2highres_mat', 'in_matrix_file')]) + preproc.connect_retries(mni_to_t1_guardrails, [ + (inputNode, 'brain', 'reference')]) + # pylint: disable=no-value-for-parameter + mni_to_t1 = guardrail_selection(preproc, *mni_to_t1_guardrails) + preproc.connect(mni_to_t1, 'out', outputNode, 'segment_mask_temp2t1') return preproc diff --git a/CPAC/timeseries/timeseries_analysis.py b/CPAC/timeseries/timeseries_analysis.py index 415ea6fe09..187a0282aa 100644 --- a/CPAC/timeseries/timeseries_analysis.py +++ b/CPAC/timeseries/timeseries_analysis.py @@ -6,6 +6,7 @@ create_connectome_nilearn, \ get_connectome_method from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.registration.guardrails import guardrail_selection from CPAC.utils.datasource import create_roi_mask_dataflow, \ create_spatial_map_dataflow, \ resample_func_roi @@ -1001,6 +1002,9 @@ def spatial_regression(wf, cfg, strat_pool, pipe_num, opt=None): in_matrix_file=cfg.registration_workflows['functional_registration'][ 'func_registration_to_template']['FNIRT_pipelines'][ 'identity_matrix']) + (spatial_to_native_nodes, + spatial_to_native_guardrails) = wf.nodes_and_guardrails( + resample_spatial_map_to_native_space, registered='out_file') spatial_map_dataflow = create_spatial_map_dataflow( cfg.timeseries_extraction['tse_atlases']['SpatialReg'], @@ -1022,14 +1026,18 @@ def spatial_regression(wf, cfg, strat_pool, pipe_num, opt=None): # resample the input functional file and functional mask # to spatial map - wf.connect(node, out, resample_spatial_map_to_native_space, 'reference') - wf.connect(spatial_map_dataflow, 'select_spatial_map.out_file', - resample_spatial_map_to_native_space, 'in_file') + wf.connect_retries(spatial_to_native_nodes, [ + (node, out, 'reference'), + (spatial_map_dataflow, 'select_spatial_map.out_file', 'in_file')]) + wf.connect_retries(spatial_to_native_guardrails, [ + (node, out, 'reference')]) + # pylint: disable=no-value-for-parameter + spatial_to_native = guardrail_selection(wf, *spatial_to_native_guardrails) wf.connect(node, out, spatial_map_timeseries, 'inputspec.subject_rest') # connect it to the spatial_map_timeseries - wf.connect(resample_spatial_map_to_native_space, 'out_file', + wf.connect(spatial_to_native, 'out', spatial_map_timeseries, 'inputspec.spatial_map') node, out = strat_pool.get_data('space-template_desc-bold_mask') From 307979e7e09586307afb1da04db7af4e1f5f1df8 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 21:11:58 +0000 Subject: [PATCH 71/72] :white_check_mark: Update random seed test --- CHANGELOG.md | 1 + CPAC/pipeline/random_state/seed.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e1eb4d6977..283c54383c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Added crash messages from during and before graph building to logs - Added data-config-specific hash string to C-PAC-generated config files - Updated `rbc-options` preconfig to use `fmriprep-options` preprocessing +- Changed `random.log` to `random.tsv` and updated logic to log random seed when not specified ### Fixed - Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1795) that was causing `cpac run` to fail when passing a manual random seed via `--random_seed`. diff --git a/CPAC/pipeline/random_state/seed.py b/CPAC/pipeline/random_state/seed.py index dceb9e29d9..7373f938db 100644 --- a/CPAC/pipeline/random_state/seed.py +++ b/CPAC/pipeline/random_state/seed.py @@ -210,8 +210,8 @@ def set_up_random_state(seed): >>> set_up_random_state(0) Traceback (most recent call last): ValueError: Valid random seeds are positive integers up to 2147483647, "random", or None, not 0 - >>> set_up_random_state(None) - + >>> 1 <= set_up_random_state(None) <= MAX_SEED + True ''' # noqa: E501 # pylint: disable=line-too-long if seed is not None: if seed == 'random': @@ -220,10 +220,12 @@ def set_up_random_state(seed): try: seed = int(seed) assert 0 < seed <= MAX_SEED - except(ValueError, TypeError, AssertionError): - raise ValueError('Valid random seeds are positive integers up to ' - f'2147483647, "random", or None, not {seed}') - + except (ValueError, TypeError, AssertionError) as error: + raise ValueError( + 'Valid random seeds are positive integers up ' + f'to {MAX_SEED}, "random", or None, not {seed}' + ) from error + _seed['seed'] = seed return random_seed() From 02887af0bb1f66b09b092a2490024c9936673035 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Fri, 28 Oct 2022 21:25:58 +0000 Subject: [PATCH 72/72] fixup! :wrench: Add `fail_fast` configuration option to expose Nipype's `stop_on_first_crash` --- CPAC/resources/configs/pipeline_config_rbc-options.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CPAC/resources/configs/pipeline_config_rbc-options.yml b/CPAC/resources/configs/pipeline_config_rbc-options.yml index b79f9a8cda..b20f0c617a 100644 --- a/CPAC/resources/configs/pipeline_config_rbc-options.yml +++ b/CPAC/resources/configs/pipeline_config_rbc-options.yml @@ -15,7 +15,7 @@ pipeline_setup: system_config: - fail_fast: Off + fail_fast: On # Random seed used to fix the state of execution. # If unset, each process uses its own default.