Skip to content

Commit 8042446

Browse files
authored
🔀 Merge v1.8.3 to main (#1658)
2 parents a33dfe8 + dd31ca5 commit 8042446

File tree

92 files changed

+2283
-782
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+2283
-782
lines changed

CHANGELOG.md

Lines changed: 32 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,36 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [1.8.3] - 2022-02-11
9+
10+
### Added
11+
- Added XCP-style quality control file
12+
- Added RBC-options pipeline preconfiguration
13+
- Added `engine.log` (when verbose debugging is on)
14+
- Added ability to fix random seed for
15+
- `antsAI`
16+
- `antsRegistration`
17+
- `Atropos` (fixed but not specified)
18+
- `fslmaths`
19+
- `mri_vol2vol`
20+
- `recon-all`
21+
- Added ability to use lateral ventricles mask in place of cerebrospinal fluid mask when when segmentation is Off, specifically for the rodent pipeline, but works on any dataset when segmentation is off
22+
23+
### Changed
24+
- In a given pipeline configuration, segmentation probability maps and binary tissue masks are warped to template space, and those warped masks are included in the output directory
25+
- if `registration_workflows['functional_registration']['EPI_registration']['run segmentation']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `EPI_Template`
26+
27+
and/or
28+
- if `registration_workflows['anatomical_registration']['run']` is `On` and `segmentation['tissue_segmentation']['Template_Based']['template_for_segmentation']` includes `T1_Template`
29+
- Renamed connectivity matrices from `*_connectome.tsv` to `*_correlations.tsv`
30+
- Moved some ephemeral logging statements into `pypeline.log`
31+
32+
### Fixed
33+
- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1638) in which working connectivity matrix filepaths were generated incorrectly, preventing generating matrices depending on container bindings
34+
- Fixed broken links in README
35+
- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1575) in which anatomical-only configurations required functional data directories
36+
- Fixed [bug](https://github.com/FCP-INDI/C-PAC/issues/1532) in which nuisance regressors would crash when segmentation is off and no CSF mask is provided
37+
838
## [1.8.2] - 2021-12-02
939

1040
### Added
@@ -41,5 +71,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
4171

4272
See [Version 1.8.1 Beta](https://fcp-indi.github.io/docs/user/release_notes/v1.8.1) for release notes for v1.8.1 and [Release Notes](https://fcp-indi.github.io/docs/user/release_notes) for all release notes back to v0.1.1.
4373

44-
[unreleased]: https://github.com/FCP-INDI/C-PAC/compare/v1.8.1...develop
74+
[1.8.3]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.3
75+
[1.8.2]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.2
4576
[1.8.1]: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.8.1

CPAC/anat_preproc/anat_preproc.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -687,22 +687,19 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
687687

688688
def niworkflows_ants_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
689689
# Skull-stripping using niworkflows-ants
690-
anat_skullstrip_ants = init_brain_extraction_wf(tpl_target_path=
691-
cfg.anatomical_preproc[
692-
'brain_extraction'][
693-
'niworkflows-ants'][
694-
'template_path'],
695-
tpl_mask_path=
696-
cfg.anatomical_preproc[
697-
'brain_extraction'][
698-
'niworkflows-ants'][
699-
'mask_path'],
700-
tpl_regmask_path=
701-
cfg.anatomical_preproc[
702-
'brain_extraction'][
703-
'niworkflows-ants'][
704-
'regmask_path'],
705-
name='anat_skullstrip_ants')
690+
anat_skullstrip_ants = init_brain_extraction_wf(
691+
tpl_target_path=cfg.anatomical_preproc['brain_extraction'][
692+
'niworkflows-ants'][
693+
'template_path'],
694+
tpl_mask_path=cfg.anatomical_preproc['brain_extraction'][
695+
'niworkflows-ants'][
696+
'mask_path'],
697+
tpl_regmask_path=cfg.anatomical_preproc['brain_extraction'][
698+
'niworkflows-ants'][
699+
'regmask_path'],
700+
name='anat_skullstrip_ants',
701+
atropos_use_random_seed=cfg.pipeline_setup['system_config'][
702+
'random_seed'] is None)
706703

707704
if strat_pool.check_rpool('desc-preproc_T1w') or \
708705
strat_pool.check_rpool('desc-reorient_T1w') or \
@@ -3191,7 +3188,7 @@ def correct_restore_brain_intensity_abcd(wf, cfg, strat_pool, pipe_num, opt=None
31913188
wf.connect(node, out, merge_t1_acpc_to_list, 'in3')
31923189

31933190
merge_t1_acpc = pe.Node(interface=fslMerge(),
3194-
name='merge_t1_acpc')
3191+
name=f'merge_t1_acpc_{pipe_num}')
31953192

31963193
merge_t1_acpc.inputs.dimension = 't'
31973194

CPAC/anat_preproc/ants.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
# general purpose
1414
from collections import OrderedDict
15-
from multiprocessing import cpu_count
15+
from logging import getLogger
1616
from pkg_resources import resource_filename as pkgr_fn
1717
from packaging.version import parse as parseversion, Version
1818

@@ -440,6 +440,9 @@ def init_atropos_wf(name='atropos_wf',
440440
use_random_seed=use_random_seed),
441441
name='01_atropos', n_procs=omp_nthreads, mem_gb=mem_gb)
442442

443+
if not use_random_seed:
444+
getLogger('random').info('%s # (Atropos constant)', atropos.name)
445+
443446
# massage outputs
444447
pad_segm = pe.Node(ImageMath(operation='PadImage', op2='%d' % padding),
445448
name='02_pad_segm')

CPAC/connectome/connectivity_matrix.py

Lines changed: 11 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
#!/usr/bin/env python
22
# -*- coding: utf-8 -*-
33
"""Functions for creating connectome connectivity matrices."""
4+
import os
45
from warnings import warn
56
import numpy as np
67
from nilearn.connectome import ConnectivityMeasure
@@ -20,13 +21,11 @@
2021
}
2122

2223

23-
def connectome_name(timeseries, atlas_name, tool, method):
24+
def connectome_name(atlas_name, tool, method):
2425
"""Helper function to create connectome file filename
2526
2627
Parameters
2728
----------
28-
timeseries : str
29-
path to input timeseries
3029
3130
atlas_name : str
3231
atlas name
@@ -41,19 +40,9 @@ def connectome_name(timeseries, atlas_name, tool, method):
4140
-------
4241
str
4342
"""
44-
method = ''.join(word.capitalize() for word in [tool, method])
45-
new_filename_parts = [part for part in timeseries.split('_')[:-1][::-1] if
46-
not part.startswith('space-')]
47-
atlas_index = len(new_filename_parts) - 1
48-
if any(filename_part.startswith('desc-') for filename_part in
49-
new_filename_parts):
50-
for i, filename_part in enumerate(new_filename_parts):
51-
if filename_part.startswith('desc-'):
52-
new_filename_parts[-i] = f'desc-{method}'
53-
atlas_index = -(i - 1)
54-
break
55-
new_filename_parts.insert(atlas_index, f'atlas-{atlas_name}')
56-
return '_'.join([*new_filename_parts[::-1], 'connectome.tsv'])
43+
return os.path.join(os.getcwd(), '_'.join([
44+
f'atlas-{atlas_name}', f'desc-{tool}{method}', 'connectome.tsv'
45+
]))
5746

5847

5948
def get_connectome_method(method, tool):
@@ -111,7 +100,7 @@ def compute_connectome_nilearn(in_rois, in_file, method, atlas_name):
111100
numpy.ndarray or NotImplemented
112101
"""
113102
tool = 'Nilearn'
114-
output = connectome_name(in_file, atlas_name, tool, method)
103+
output = connectome_name(atlas_name, tool, method)
115104
method = get_connectome_method(method, tool)
116105
if method is NotImplemented:
117106
return NotImplemented
@@ -156,21 +145,21 @@ def create_connectome_afni(name, method, pipe_num):
156145
name='netcorrStripHeader'
157146
f'{method}_{pipe_num}')
158147

159-
name_output_node = pe.Node(Function(input_names=['timeseries',
160-
'atlas_name',
148+
name_output_node = pe.Node(Function(input_names=['atlas_name',
161149
'tool',
162150
'method'],
163151
output_names=['filename'],
152+
imports=['import os'],
164153
function=connectome_name),
165-
name=f'connectomeName{method}_{pipe_num}')
154+
name=f'connectomeName{method}_{pipe_num}',
155+
as_module=True)
166156
name_output_node.inputs.tool = 'Afni'
167157

168158
wf.connect([
169159
(inputspec, timeseries_correlation, [('in_rois', 'in_rois'),
170160
('in_file', 'in_file'),
171161
('mask', 'mask')]),
172-
(inputspec, name_output_node, [('in_file', 'timeseries'),
173-
('atlas_name', 'atlas_name'),
162+
(inputspec, name_output_node, [('atlas_name', 'atlas_name'),
174163
('method', 'method')]),
175164
(timeseries_correlation, strip_header_node, [
176165
('out_corr_matrix', 'in_file')]),

CPAC/cwas/tests/test_cwas.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,8 @@ def run_cwas(self):
7878

7979
# Read in list of subject functionals
8080
subjects_list = [
81-
l.strip().strip('"') for l in open(sfile).readlines() # noqa E741
81+
l.strip().strip('"') for # noqa: E741
82+
l in open(sfile).readlines() # pylint: disable=consider-using-with
8283
]
8384

8485
# Read in design/regressor file
@@ -93,7 +94,7 @@ def run_cwas(self):
9394
c.inputs.inputspec.f_samples = nperms
9495
c.inputs.inputspec.parallel_nodes = 4
9596
# c.base_dir = op.join(obase, 'results_fs%i_pn%i' % \
96-
# (c.inputs.inputspec.f_samples, c.inputs.inputspec.parallel_nodes)) # noqa E501
97+
# (c.inputs.inputspec.f_samples, c.inputs.inputspec.parallel_nodes)) # noqa: E501 # pylint: disable=line-too-long
9798
c.base_dir = op.join(self.base, "results_%s.py" % self.name)
9899

99100
# export MKL_NUM_THREADS=X # in command line

CPAC/func_preproc/func_ingress.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from nipype import logging
2-
logger = logging.getLogger('workflow')
2+
logger = logging.getLogger('nipype.workflow')
33

44
from CPAC.pipeline import nipype_pipeline_engine as pe
55

CPAC/func_preproc/func_preproc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from nipype import logging
22
from nipype.interfaces import ants
33

4-
logger = logging.getLogger('workflow')
4+
logger = logging.getLogger('nipype.workflow')
55

66
from CPAC.pipeline import nipype_pipeline_engine as pe
77
import nipype.interfaces.fsl as fsl

CPAC/info.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
# version
1111
_version_major = 1
1212
_version_minor = 8
13-
_version_micro = 2
13+
_version_micro = 3
1414
_version_extra = ''
1515

1616

@@ -87,15 +87,15 @@ def get_cpac_gitversion():
8787
Website
8888
-------
8989
90-
CPAC website is located here: http://fcp-indi.github.com/
90+
CPAC website is located here: https://fcp-indi.github.io/
9191
9292
9393
Documentation
9494
-------------
9595
96-
User documentation can be found here: http://fcp-indi.github.com/docs/user/index.html
96+
User documentation can be found here: https://fcp-indi.github.io/docs/user/index.html
9797
98-
Developer documention can be found here: http://fcp-indi.github.com/docs/developer/index.html
98+
Developer documention can be found here: https://fcp-indi.github.io/docs/developer/index.html
9999
100100
Documentation pertaining to this latest release can be found here: https://github.com/FCP-INDI/C-PAC/releases/tag/v1.3.0
101101
@@ -161,6 +161,5 @@ def get_cpac_gitversion():
161161
"simplejson==3.15.0",
162162
"traits==4.6.0",
163163
"PyBASC==0.4.5",
164-
"pathlib==1.0.1",
165164
"voluptuous>=0.12.0",
166165
]

CPAC/network_centrality/pipeline.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from CPAC.network_centrality.utils import merge_lists, check_centrality_params
1010
from CPAC.pipeline.schema import valid_options
1111

12-
logger = logging.getLogger('workflow')
12+
logger = logging.getLogger('nipype.workflow')
1313

1414

1515
def connect_centrality_workflow(workflow, c, resample_functional_to_template,

0 commit comments

Comments
 (0)