Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
eca7a54
:fire: DRY ROI ingress function
shnizzedy Feb 7, 2023
80d19e9
fixup! :fire: DRY ROI ingress function
shnizzedy Feb 8, 2023
8349365
:recycle: Create `gather_atlases` NodeBlock
shnizzedy Feb 9, 2023
5cbe702
:construction: WIP :recycle: Connect `gather_atlases` NodeBlock
shnizzedy Feb 9, 2023
03b5591
:construction: WIP :recycle: Connect TSE Node Blocks to `gather_atlases`
shnizzedy Feb 9, 2023
43b5c17
:art: Resequence function definitions
shnizzedy Feb 9, 2023
54b5c94
:construction: WIP :recycle: Connect SCA to `gather_atlases`
shnizzedy Feb 9, 2023
3585816
:recycle: Connect `gather_atlases` in `pipeline_blocks`
shnizzedy Feb 9, 2023
465ea6c
:pencil2: Remove extra `and`
shnizzedy Feb 10, 2023
cf1327a
fixup! :recycle: Create `gather_atlases` NodeBlock
shnizzedy Feb 10, 2023
3007315
:bug: Dedupe `align_template_mask_to_template_data`
shnizzedy Feb 10, 2023
a8dba26
:art: Match quotes
shnizzedy Feb 10, 2023
7351508
:alembic: Print debugging statement
shnizzedy Feb 10, 2023
bef2f7f
:recycle: Skip ROI analysis nodes with no atlas assigned
shnizzedy Feb 10, 2023
53edfe5
:recycle: Fork ROI analyses on atlas
shnizzedy Feb 13, 2023
be1d31d
:pencil2: Fix typo in NodeBlock docstring
shnizzedy Feb 13, 2023
5e0bd8d
:necktie: Update engine to allow lists of 2-tuples
shnizzedy Feb 13, 2023
675f723
:white_check_mark: Add doctest for `insert_in_dict_of_lists`
shnizzedy Feb 13, 2023
7f4cde9
:recycle: Resample, don't realign
shnizzedy Feb 13, 2023
b447e90
:recycle: Insert lists of 2-tuples in resource pool from NodeBlocks
shnizzedy Feb 15, 2023
f19db22
:recycle: Resample atlases nearest-neighbor
shnizzedy Feb 16, 2023
cf697bf
:twisted_rightwards_arrows: Merge develop into atlas_ingress
shnizzedy Feb 16, 2023
80481d5
:recycle: Account for not-yet-in-output-res preproc_bold
shnizzedy Feb 16, 2023
d830d45
:necktie: Drop optional mask from AFNI connectivity matrices
shnizzedy Feb 20, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Updated some output filenaming conventions for human-readability and to move closer to BIDS-derivatives compliance
- Changed motion filter from single dictionary to list of dictionaries
- Changed CI logic to allow non-release tags
- Refactored ROI resampling to be more efficient (resample to output resolution before analysis, and resample each atlas just once if used for multiple analyses)

### Upgraded dependencies
- `nibabel` 2.3.3 → 3.0.1
Expand Down
22 changes: 12 additions & 10 deletions CPAC/anat_preproc/anat_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
fslmaths_command, \
VolumeRemoveIslands
from CPAC.pipeline.engine import flatten_list
from CPAC.utils.docs import docstring_parameter
from CPAC.utils.docs import docstring_parameter, list_items_unbracketed
from CPAC.utils.interfaces.fsl import Merge as fslMerge
from CPAC.utils.interfaces.function.seg_preproc import \
pick_tissue_from_labels_file_interface
Expand Down Expand Up @@ -1411,9 +1411,10 @@ def acpc_align_brain_with_mask(wf, cfg, strat_pool, pipe_num, opt=None):
outputs = {
'desc-preproc_T1w': (acpc_align, 'outputspec.acpc_aligned_head'),
'desc-acpcbrain_T1w': (acpc_align, 'outputspec.acpc_aligned_brain'),
'space-T1w_desc-brain_mask': (acpc_align, 'outputspec.acpc_brain_mask'),
'space-T1w_desc-prebrain_mask': (strat_pool.get_data('space-T1w_desc-brain_mask'))
}
'space-T1w_desc-brain_mask': (acpc_align,
'outputspec.acpc_brain_mask'),
'space-T1w_desc-prebrain_mask': strat_pool.get_data(
'space-T1w_desc-brain_mask')}

return (wf, outputs)

Expand Down Expand Up @@ -2478,9 +2479,11 @@ def brain_extraction_T2(wf, cfg, strat_pool, pipe_num, opt=None):
["space-T2w_desc-brain_mask", "space-T2w_desc-acpcbrain_mask"])],
"outputs": ["desc-brain_T2w"]}
'''
if cfg.anatomical_preproc['acpc_alignment']['run'] and cfg.anatomical_preproc['acpc_alignment']['acpc_target'] == 'brain':
if (cfg.anatomical_preproc['acpc_alignment']['run']
and cfg.anatomical_preproc['acpc_alignment']['acpc_target'] == 'brain'
):
outputs = {
'desc-brain_T2w': (strat_pool.get_data(["desc-acpcbrain_T2w"]))
'desc-brain_T2w': strat_pool.get_data(["desc-acpcbrain_T2w"])
}
else:
anat_skullstrip_orig_vol = pe.Node(interface=afni.Calc(),
Expand Down Expand Up @@ -2679,9 +2682,8 @@ def freesurfer_postproc(wf, cfg, strat_pool, pipe_num, opt=None):

# we're grabbing the postproc outputs and appending them to
# the reconall outputs
@docstring_parameter(postproc_outputs=str(flatten_list(
freesurfer_postproc, 'outputs')
).lstrip('[').replace("'", '"'))
@docstring_parameter(postproc_outputs=list_items_unbracketed(
flatten_list(freesurfer_postproc, 'outputs')))
def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None):
'''
{{"name": "freesurfer_reconall",
Expand All @@ -2696,7 +2698,7 @@ def freesurfer_reconall(wf, cfg, strat_pool, pipe_num, opt=None):
"brainmask",
"wmparc",
"T1",
{postproc_outputs}}}
{postproc_outputs}]}}
'''

reconall = pe.Node(interface=freesurfer.ReconAll(),
Expand Down
4 changes: 2 additions & 2 deletions CPAC/nuisance/nuisance.py
Original file line number Diff line number Diff line change
Expand Up @@ -2492,8 +2492,8 @@ def nuisance_regression(wf, cfg, strat_pool, pipe_num, opt, space, res=None):
# sometimes mm dimensions match but the voxel dimensions don't
# so here we align the mask to the resampled data before applying
match_grid = pe.Node(afni.Resample(),
name='align_template_mask_to_template_data_'
f'{name_suff}')
name='align_template_mask_to_template_'
f'data_{name_suff}')
match_grid.inputs.outputtype = 'NIFTI_GZ'
match_grid.inputs.resample_mode = 'Cu'
node, out = strat_pool.get_data('FSL-AFNI-brain-mask')
Expand Down
5 changes: 3 additions & 2 deletions CPAC/pipeline/cpac_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -1134,10 +1134,10 @@ def connect_pipeline(wf, cfg, rpool, pipeline_blocks):

def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None,
num_ants_cores=1):
from CPAC.utils.datasource import gather_extraction_maps
from CPAC.utils.datasource import gather_atlases, gather_extraction_maps

# Workflow setup
wf = initialize_nipype_wf(cfg, sub_dict)
wf = initialize_nipype_wf(cfg, sub_dict, name=pipeline_name)

# Extract credentials path if it exists
try:
Expand Down Expand Up @@ -1396,6 +1396,7 @@ def build_workflow(subject_id, sub_dict, cfg, pipeline_name=None,
pipeline_blocks += [surface_postproc]

# Extractions and Derivatives
pipeline_blocks += [gather_atlases]
tse_atlases, sca_atlases = gather_extraction_maps(cfg)
cfg.timeseries_extraction['tse_atlases'] = tse_atlases
cfg.seed_based_correlation_analysis['sca_atlases'] = sca_atlases
Expand Down
76 changes: 56 additions & 20 deletions CPAC/pipeline/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -1507,26 +1507,18 @@ def connect_block(self, wf, cfg, rpool):
if raw_label not in new_json_info['CpacVariant']:
new_json_info['CpacVariant'][raw_label] = []
new_json_info['CpacVariant'][raw_label].append(node_name)

rpool.set_data(label,
connection[0],
connection[1],
new_json_info,
pipe_idx, node_name, fork)

wf, post_labels = rpool.post_process(
wf, label, connection, new_json_info, pipe_idx,
pipe_x, outs)

if rpool.func_reg:
for postlabel in post_labels:
connection = (postlabel[1], postlabel[2])
wf = rpool.derivative_xfm(wf, postlabel[0],
connection,
new_json_info,
pipe_idx,
pipe_x)

if isinstance(connection, list):
for _connection in connection:
wf = load_into_rpool(
wf, rpool, label, _connection,
new_json_info, pipe_idx, node_name,
fork, pipe_x, outs)
else:
wf = load_into_rpool(
wf, rpool, label, connection,
new_json_info, pipe_idx, node_name, fork,
pipe_x, outs)
return wf


Expand Down Expand Up @@ -1564,6 +1556,50 @@ def flatten_list(node_block_function: Union[FunctionType, list, Tuple],
return flat_list


def load_into_rpool(wf, rpool, label, connection, new_json_info, pipe_idx,
node_name, fork, pipe_x, outs):
"""
Loads a single resource into a ResourcePool

Parameters
----------
wf : pe.Workflow

rpool : ResourcePool

label : str

connection : 2-tuple

new_json_info : dict

pipe_idx : dict

node_name : str

fork : bool

pipe_x : int

outs : dict

Returns
-------
wf : pe.Workflow
"""
rpool.set_data(label, connection[0], connection[1], new_json_info,
pipe_idx, node_name, fork)
wf, post_labels = rpool.post_process(
wf, label, connection, new_json_info, pipe_idx,
pipe_x, outs)
if rpool.func_reg:
for postlabel in post_labels:
connection = (postlabel[1], postlabel[2])
wf = rpool.derivative_xfm(wf, postlabel[0], connection,
new_json_info, pipe_idx, pipe_x)
return wf


def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt):
"""Wrap a list of node block functions to make them easier to use within
other node blocks.
Expand Down Expand Up @@ -1612,7 +1648,7 @@ def wrap_block(node_blocks, interface, wf, cfg, strat_pool, pipe_num, opt):
for in_resource, val in interface.items():
if isinstance(val, tuple):
strat_pool.set_data(in_resource, val[0], val[1], {}, "", "",
fork=True)#
fork=True)
if 'sub_num' not in strat_pool.get_pool_info():
strat_pool.set_pool_info({'sub_num': 0})
sub_num = strat_pool.get_pool_info()['sub_num']
Expand Down
100 changes: 37 additions & 63 deletions CPAC/sca/sca.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@
import nipype.interfaces.utility as util

from CPAC.sca.utils import *
from CPAC.utils.utils import extract_one_d
from CPAC.utils.datasource import resample_func_roi, \
create_roi_mask_dataflow, create_spatial_map_dataflow
# from CPAC.utils.utils import extract_one_d
from CPAC.utils.datasource import resample_func_roi, roi_input_node

from CPAC.timeseries.timeseries_analysis import get_roi_timeseries, \
get_spatial_map_timeseries, resample_function
Expand Down Expand Up @@ -405,12 +404,14 @@ def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None):
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": ["space-template_desc-preproc_bold"],
"inputs": [("atlas-sca-Avg", "atlas-sca-Avg_name"),
"space-template_desc-preproc_bold"],
"outputs": ["desc-MeanSCA_timeseries",
"space-template_desc-MeanSCA_correlations",
"atlas_name"]}
'''

if strat_pool.check_rpool('atlas-sca-Avg') is False:
return wf, {}
# same workflow, except to run TSE and send it to the resource
# pool so that it will not get sent to SCA
resample_functional_roi_for_sca = pe.Node(
Expand All @@ -429,31 +430,21 @@ def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None):
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines']['identity_matrix']

roi_dataflow_for_sca = create_roi_mask_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['Avg'],
f'roi_dataflow_for_sca_{pipe_num}'
)

roi_dataflow_for_sca.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)

roi_timeseries_for_sca = get_roi_timeseries(
f'roi_timeseries_for_sca_{pipe_num}')

node, out = strat_pool.get_data("space-template_desc-preproc_bold")
# resample the input functional file to roi
wf.connect(node, out,
resample_functional_roi_for_sca, 'in_func')
wf.connect(roi_dataflow_for_sca, 'outputspec.out_file',
resample_functional_roi_for_sca, 'in_roi')
wf.connect(*strat_pool.get_data("space-template_desc-preproc_bold"),
resample_functional_roi_for_sca, 'in_func')
fork_atlases = roi_input_node(wf, strat_pool, 'atlas-sca-Avg', pipe_num)
wf.connect(fork_atlases, 'atlas_file',
resample_functional_roi_for_sca, 'in_roi')

# connect it to the roi_timeseries
wf.connect(resample_functional_roi_for_sca, 'out_roi',
roi_timeseries_for_sca, 'input_roi.roi')
roi_timeseries_for_sca, 'input_roi.roi')
wf.connect(resample_functional_roi_for_sca, 'out_func',
roi_timeseries_for_sca, 'inputspec.rest')
roi_timeseries_for_sca, 'inputspec.rest')

sca_roi = create_sca(f'sca_roi_{pipe_num}')

Expand All @@ -471,7 +462,7 @@ def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None):
# extract_one_d)),
'space-template_desc-MeanSCA_correlations':
(sca_roi, 'outputspec.correlation_stack'),
'atlas_name': (roi_dataflow_for_sca, 'outputspec.out_name')
'atlas_name': (fork_atlases, 'atlas_name')
}

return (wf, outputs)
Expand All @@ -486,13 +477,15 @@ def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None):
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": ["space-template_desc-preproc_bold",
"space-template_desc-bold_mask"],
"inputs": [("atlas-sca-DualReg", "atlas-sca-DualReg_name"),
("space-template_desc-preproc_bold",
"space-template_desc-bold_mask")],
"outputs": ["space-template_desc-DualReg_correlations",
"desc-DualReg_statmap",
"atlas_name"]}
'''

if strat_pool.check_rpool('atlas-sca-DualReg') is False:
return wf, {}
resample_spatial_map_to_native_space_for_dr = pe.Node(
interface=fsl.FLIRT(),
name=f'resample_spatial_map_to_native_space_for_DR_{pipe_num}'
Expand All @@ -506,16 +499,6 @@ def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None):
'identity_matrix']
)

spatial_map_dataflow_for_dr = create_spatial_map_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['DualReg'],
f'spatial_map_dataflow_for_DR_{pipe_num}'
)

spatial_map_dataflow_for_dr.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)

spatial_map_timeseries_for_dr = get_spatial_map_timeseries(
f'spatial_map_timeseries_for_DR_{pipe_num}'
)
Expand All @@ -528,8 +511,9 @@ def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None):
resample_spatial_map_to_native_space_for_dr, 'reference')
wf.connect(node, out,
spatial_map_timeseries_for_dr, 'inputspec.subject_rest')

wf.connect(spatial_map_dataflow_for_dr, 'select_spatial_map.out_file',
fork_atlases = roi_input_node(wf, strat_pool, 'atlas-sca-DualReg',
pipe_num)
wf.connect(fork_atlases, 'atlas_file',
resample_spatial_map_to_native_space_for_dr, 'in_file')

# connect it to the spatial_map_timeseries
Expand All @@ -556,11 +540,9 @@ def dual_regression(wf, cfg, strat_pool, pipe_num, opt=None):
(dr_temp_reg, 'outputspec.temp_reg_map'),
'desc-DualReg_statmap':
(dr_temp_reg, 'outputspec.temp_reg_map_z'),
'atlas_name':
(spatial_map_dataflow_for_dr, 'select_spatial_map.out_name')
}
'atlas_name': (fork_atlases, 'atlas_name')}

return (wf, outputs)
return wf, outputs


def multiple_regression(wf, cfg, strat_pool, pipe_num, opt=None):
Expand All @@ -572,13 +554,15 @@ def multiple_regression(wf, cfg, strat_pool, pipe_num, opt=None):
"switch": ["run"],
"option_key": "None",
"option_val": "None",
"inputs": ["space-template_desc-preproc_bold",
"space-template_desc-bold_mask"],
"inputs": [("atlas-sca-MultReg", "atlas-sca-MultReg_name"),
("space-template_desc-preproc_bold",
"space-template_desc-bold_mask")],
"outputs": ["space-template_desc-MultReg_correlations",
"desc-MultReg_statmap",
"atlas_name"]}
'''

if strat_pool.check_rpool('atlas-sca-MultReg') is False:
return wf, {}
# same workflow, except to run TSE and send it to the resource
# pool so that it will not get sent to SCA
resample_functional_roi_for_multreg = pe.Node(
Expand All @@ -591,25 +575,16 @@ def multiple_regression(wf, cfg, strat_pool, pipe_num, opt=None):
cfg.registration_workflows['functional_registration'][
'func_registration_to_template']['FNIRT_pipelines']['identity_matrix']

roi_dataflow_for_multreg = create_roi_mask_dataflow(
cfg.seed_based_correlation_analysis['sca_atlases']['MultReg'],
f'roi_dataflow_for_mult_reg_{pipe_num}')

roi_dataflow_for_multreg.inputs.inputspec.set(
creds_path=cfg.pipeline_setup['input_creds_path'],
dl_dir=cfg.pipeline_setup['working_directory']['path']
)

roi_timeseries_for_multreg = get_roi_timeseries(
f'roi_timeseries_for_mult_reg_{pipe_num}')

node, out = strat_pool.get_data("space-template_desc-preproc_bold")
# resample the input functional file to roi
wf.connect(node, out, resample_functional_roi_for_multreg, 'in_func')
wf.connect(roi_dataflow_for_multreg,
'outputspec.out_file',
resample_functional_roi_for_multreg,
'in_roi')
wf.connect(*strat_pool.get_data("space-template_desc-preproc_bold"),
resample_functional_roi_for_multreg, 'in_func')
fork_atlases = roi_input_node(wf, strat_pool, 'atlas-sca-MultReg',
pipe_num)
wf.connect(fork_atlases, 'atlas_file',
resample_functional_roi_for_multreg, 'in_roi')

# connect it to the roi_timeseries
wf.connect(resample_functional_roi_for_multreg,
Expand Down Expand Up @@ -647,7 +622,6 @@ def multiple_regression(wf, cfg, strat_pool, pipe_num, opt=None):
(sc_temp_reg, 'outputspec.temp_reg_map'),
'desc-MultReg_statmap':
(sc_temp_reg, 'outputspec.temp_reg_map_z'),
'atlas_name': (roi_dataflow_for_multreg, 'outputspec.out_name')
}
'atlas_name': (fork_atlases, 'atlas_name')}

return (wf, outputs)
return wf, outputs
Loading