Skip to content

Commit 7d7b022

Browse files
authored
Merge pull request #71 from rmarkello/cammoun_hcp
[REF] Adds fs_LR_32k Cammoun 2012 atlases
2 parents 8532cc1 + 37cf605 commit 7d7b022

File tree

7 files changed

+158
-55
lines changed

7 files changed

+158
-55
lines changed

netneurotools/data/osf.json

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,35 +5,42 @@
55
"mb37e",
66
"5ce6bb4423fec40017e82c5e"
77
],
8-
"md5": "c3435f2720da6a74c3d55db54ebdbfff"
8+
"md5": "266c4520af768e766328fb8e6648005d"
99
},
1010
"fsaverage": {
1111
"url": [
1212
"mb37e",
1313
"5ce6c30523fec40017e83439"
1414
],
15-
"md5": "478599b362a88198396fdb15ad999f9e"
15+
"md5": "2a19eb4744c0ce6c243f721bd43ecff0"
1616
},
1717
"fsaverage5": {
1818
"url": [
1919
"mb37e",
2020
"5e189a1c57341903868036dd"
2121
],
22-
"md5": "e96a456f14fd30f26734a7dfbcbb18e0"
22+
"md5": "2afb22e1887d47f1ca81c340fff7692b"
2323
},
2424
"fsaverage6": {
2525
"url": [
2626
"mb37e",
2727
"5e189a1b5734190380804072"
2828
],
29-
"md5": "b220fb658e1abdfdf8bdb55b26f2acf3"
29+
"md5": "1df743bff13316f67bd41d13ec691c97"
3030
},
31-
"volume": {
31+
"MNI152NLin2009aSym": {
3232
"url": [
3333
"mb37e",
34-
"5ce6bb438d6e05001860abca"
34+
"5e2f4bf0e71ef800301880c2"
3535
],
36-
"md5": "088fb64b397557dfa01901f04f4cd9d2"
36+
"md5": "9da30bad22d732aa5f00a6d178d087c4"
37+
},
38+
"fslr32k": {
39+
"url": [
40+
"mb37e",
41+
"5e2f4bf1e71ef80027189c56"
42+
],
43+
"md5": "a5177319d5e0b8825a91d503ded1a59e"
3744
}
3845
},
3946
"atl-pauli2018": [

netneurotools/datasets/__init__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,13 @@
55
__all__ = [
66
'fetch_cammoun2012', 'fetch_pauli2018', 'fetch_fsaverage', 'fetch_conte69',
77
'fetch_connectome', 'available_connectomes', 'fetch_vazquez_rodriguez2019',
8-
'fetch_mirchi2018', 'make_correlated_xy', 'fetch_schaefer2018'
8+
'fetch_mirchi2018', 'make_correlated_xy', 'fetch_schaefer2018',
9+
'fetch_hcp_standards'
910
]
1011

1112
from .fetchers import (fetch_cammoun2012, fetch_pauli2018, fetch_fsaverage,
1213
fetch_conte69, fetch_connectome, available_connectomes,
13-
fetch_vazquez_rodriguez2019, fetch_schaefer2018)
14+
fetch_vazquez_rodriguez2019, fetch_schaefer2018,
15+
fetch_hcp_standards)
1416
from .generators import (make_correlated_xy)
1517
from .mirchi import (fetch_mirchi2018)

netneurotools/datasets/fetchers.py

Lines changed: 83 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55

66
import itertools
77
import json
8-
import os
98
import os.path as op
109
import warnings
1110

@@ -17,19 +16,21 @@
1716
from ..utils import check_fs_subjid
1817

1918

20-
def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
21-
verbose=1):
19+
def fetch_cammoun2012(version='MNI125NLin2009aSym', data_dir=None, url=None,
20+
resume=True, verbose=1):
2221
"""
2322
Downloads files for Cammoun et al., 2012 multiscale parcellation
2423
2524
Parameters
2625
----------
27-
version : {'volume', 'fsaverage', 'fsaverage5', 'fsaverage6', 'gcs'}
28-
Specifies which version of the dataset to download, where 'volume' will
29-
return .nii.gz atlas files defined in MNI152 space, 'fsaverageX' will
30-
return .annot files defined in fsaverageX space (FreeSurfer 6.0.1), and
31-
'gcs' will return FreeSurfer-style .gcs probabilistic atlas files for
32-
generating new, subject-specific parcellations
26+
version : str, optional
27+
Specifies which version of the dataset to download, where
28+
'MNI125NLin2009aSym' will return .nii.gz atlas files defined in MNI152
29+
space, 'fsaverageX' will return .annot files defined in fsaverageX
30+
space (FreeSurfer 6.0.1), 'fslr32k' will return .label.gii files in
31+
fs_LR_32k HCP space, and 'gcs' will return FreeSurfer-style .gcs
32+
probabilistic atlas files for generating new, subject-specific
33+
parcellations. Default: 'MNI125NLin2009aSym'
3334
data_dir : str, optional
3435
Path to use as data directory. If not specified, will check for
3536
environmental variable 'NNT_DATA'; if that is not set, will use
@@ -62,8 +63,23 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
6263
License: https://raw.githubusercontent.com/LTS5/cmp/master/COPYRIGHT
6364
"""
6465

66+
if version == 'surface':
67+
warnings.warn('Providing `version="surface"` is deprecated and will '
68+
'be removed in a future release. For consistent '
69+
'behavior please use `version="fsaverage"` instead.',
70+
DeprecationWarning, stacklevel=2)
71+
version = 'fsaverage'
72+
elif version == 'volume':
73+
warnings.warn('Providing `version="volume"` is deprecated and will '
74+
'be removed in a future release. For consistent '
75+
'behavior please use `version="MNI152NLin2009aSym"` '
76+
'instead.',
77+
DeprecationWarning, stacklevel=2)
78+
version = 'MNI152NLin2009aSym'
79+
6580
versions = [
66-
'volume', 'surface', 'gcs', 'fsaverage', 'fsaverage5', 'fsaverage6'
81+
'gcs', 'fsaverage', 'fsaverage5', 'fsaverage6', 'fslr32k',
82+
'MNI152NLin2009aSym'
6783
]
6884
if version not in versions:
6985
raise ValueError('The version of Cammoun et al., 2012 parcellation '
@@ -73,13 +89,6 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
7389
dataset_name = 'atl-cammoun2012'
7490
keys = ['scale033', 'scale060', 'scale125', 'scale250', 'scale500']
7591

76-
if version == 'surface':
77-
warnings.warn('Providing `version="surface"` is deprecated and will '
78-
'be removed in a future release. For consistent '
79-
'behavior please use `version="fsaverage"` instead. ',
80-
DeprecationWarning, stacklevel=2)
81-
version = 'fsaverage'
82-
8392
data_dir = _get_data_dir(data_dir=data_dir)
8493
info = _get_dataset_info(dataset_name)[version]
8594
if url is None:
@@ -92,11 +101,17 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
92101
}
93102

94103
# filenames differ based on selected version of dataset
95-
if version == 'volume':
104+
if version == 'MNI152NLin2009aSym':
96105
filenames = [
97106
'atl-Cammoun2012_space-MNI152NLin2009aSym_res-{}_deterministic{}'
98107
.format(res[-3:], suff) for res in keys for suff in ['.nii.gz']
99108
] + ['atl-Cammoun2012_space-MNI152NLin2009aSym_info.csv']
109+
elif version == 'fslr32k':
110+
filenames = [
111+
'atl-Cammoun2012_space-fslr32k_res-{}_hemi-{}_deterministic{}'
112+
.format(res[-3:], hemi, suff) for res in keys
113+
for hemi in ['L', 'R'] for suff in ['.label.gii']
114+
]
100115
elif version in ('fsaverage', 'fsaverage5', 'fsaverage6'):
101116
filenames = [
102117
'atl-Cammoun2012_space-{}_res-{}_hemi-{}_deterministic{}'
@@ -111,12 +126,14 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
111126
for hemi in ['L', 'R'] for suff in ['.gcs', '.ctab']
112127
]
113128

114-
files = [(os.path.join(dataset_name, f), url, opts) for f in filenames]
129+
files = [
130+
(op.join(dataset_name, version, f), url, opts) for f in filenames
131+
]
115132
data = _fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
116133

117-
if version == 'volume':
134+
if version == 'MNI152NLin2009aSym':
118135
keys += ['info']
119-
elif version in ('fsaverage', 'fsaverage5', 'fsaverage6'):
136+
elif version in ('fslr32k', 'fsaverage', 'fsaverage5', 'fsaverage6'):
120137
data = [data[i:i + 2] for i in range(0, len(data), 2)]
121138
else:
122139
data = [data[::2][i:i + 2] for i in range(0, len(data) // 2, 2)]
@@ -313,7 +330,7 @@ def fetch_fsaverage(version='fsaverage', data_dir=None, url=None, resume=True,
313330

314331
try:
315332
data_dir = check_fs_subjid(version)[1]
316-
data = [os.path.join(data_dir, f) for f in filenames]
333+
data = [op.join(data_dir, f) for f in filenames]
317334
except FileNotFoundError:
318335
data = _fetch_files(data_dir, resume=resume, verbose=verbose,
319336
files=[(op.join(dataset_name, f), url, opts)
@@ -393,7 +410,7 @@ def fetch_connectome(dataset, data_dir=None, url=None, resume=True,
393410
}
394411

395412
filenames = [
396-
os.path.join(dataset, '{}.csv'.format(fn)) for fn in info['keys']
413+
op.join(dataset, '{}.csv'.format(fn)) for fn in info['keys']
397414
] + [op.join(dataset, 'ref.txt')]
398415
data = _fetch_files(data_dir, files=[(f, url, opts) for f in filenames],
399416
resume=resume, verbose=verbose)
@@ -454,7 +471,7 @@ def fetch_vazquez_rodriguez2019(data_dir=None, url=None, resume=True,
454471
}
455472

456473
filenames = [
457-
os.path.join(dataset_name, 'rsquared_gradient.csv')
474+
op.join(dataset_name, 'rsquared_gradient.csv')
458475
]
459476
data = _fetch_files(data_dir, files=[(f, url, opts) for f in filenames],
460477
resume=resume, verbose=verbose)
@@ -534,10 +551,51 @@ def fetch_schaefer2018(version='fsaverage', data_dir=None, url=None,
534551
.format(version, hemi, desc) for desc in keys for hemi in ['L', 'R']
535552
]
536553

537-
files = [(os.path.join(dataset_name, version, f), url, opts)
554+
files = [(op.join(dataset_name, version, f), url, opts)
538555
for f in filenames]
539556
data = _fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
540557

541558
data = [data[i:i + 2] for i in range(0, len(keys) * 2, 2)]
542559

543560
return Bunch(**dict(zip(keys, data)))
561+
562+
563+
def fetch_hcp_standards(data_dir=None, url=None, resume=True, verbose=1):
564+
"""
565+
Fetches HCP standard mesh atlases for converting between FreeSurfer and HCP
566+
567+
Parameters
568+
----------
569+
data_dir : str, optional
570+
Path to use as data directory. If not specified, will check for
571+
environmental variable 'NNT_DATA'; if that is not set, will use
572+
`~/nnt-data` instead. Default: None
573+
url : str, optional
574+
URL from which to download data. Default: None
575+
resume : bool, optional
576+
Whether to attempt to resume partial download, if possible. Default:
577+
True
578+
verbose : int, optional
579+
Modifies verbosity of download, where higher numbers mean more updates.
580+
Default: 1
581+
582+
Returns
583+
-------
584+
standards : str
585+
Filepath to standard_mesh_atlases directory
586+
"""
587+
if url is None:
588+
url = 'http://brainvis.wustl.edu/workbench/standard_mesh_atlases.zip'
589+
dataset_name = 'standard_mesh_atlases'
590+
data_dir = _get_data_dir(data_dir=data_dir)
591+
opts = {
592+
'uncompress': True,
593+
'move': '{}.zip'.format(dataset_name)
594+
}
595+
filenames = [
596+
'L.sphere.32k_fs_LR.surf.gii', 'R.sphere.32k_fs_LR.surf.gii'
597+
]
598+
files = [(op.join(dataset_name, f), url, opts) for f in filenames]
599+
_fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
600+
601+
return op.join(data_dir, dataset_name)

netneurotools/tests/test_datasets.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,11 @@ def test_fetch_fsaverage(tmpdir, version):
7272

7373

7474
@pytest.mark.parametrize('version, expected', [
75-
('volume', [1, 1, 1, 1, 1]),
75+
('MNI152NLin2009aSym', [1, 1, 1, 1, 1]),
7676
('fsaverage', [2, 2, 2, 2, 2]),
7777
('fsaverage5', [2, 2, 2, 2, 2]),
7878
('fsaverage6', [2, 2, 2, 2, 2]),
79+
('fslr32k', [2, 2, 2, 2, 2]),
7980
('gcs', [2, 2, 2, 2, 6])
8081
])
8182
def test_fetch_cammoun2012(tmpdir, version, expected):
@@ -139,9 +140,14 @@ def test_fetch_schaefer2018(tmpdir, version):
139140
for k in keys)
140141

141142

143+
def test_fetch_hcp_standards(tmpdir):
144+
hcp = datasets.fetch_hcp_standards(data_dir=tmpdir, verbose=0)
145+
assert os.path.isdir(hcp)
146+
147+
142148
@pytest.mark.parametrize('dset, expected', [
143-
('atl-cammoun2012', ['volume', 'fsaverage', 'fsaverage5', 'fsaverage6',
144-
'gcs']),
149+
('atl-cammoun2012', ['fsaverage', 'fsaverage5', 'fsaverage6', 'fslr32k',
150+
'MNI152NLin2009aSym', 'gcs']),
145151
('tpl-conte69', ['url', 'md5']),
146152
('atl-pauli2018', ['url', 'md5', 'name']),
147153
('tpl-fsaverage', ['fsaverage' + f for f in ['', '3', '4', '5', '6']]),

netneurotools/tests/test_utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@ def test_add_triu():
3333
])
3434
def test_get_centroids(tmpdir, scale, expected):
3535
# fetch test dataset
36-
cammoun = datasets.fetch_cammoun2012('volume', data_dir=tmpdir, verbose=0)
36+
cammoun = datasets.fetch_cammoun2012('MNI152NLin2009aSym', data_dir=tmpdir,
37+
verbose=0)
3738

3839
ijk = utils.get_centroids(cammoun[scale])
3940
xyz = utils.get_centroids(cammoun[scale], image_space=True)

netneurotools/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,7 @@ def get_centroids(img, labels=None, image_space=False):
238238
from nilearn._utils import check_niimg_3d
239239

240240
img = check_niimg_3d(img)
241-
data = img.get_data()
241+
data = np.asarray(img.dataobj)
242242

243243
if labels is None:
244244
labels = np.trim_zeros(np.unique(data))

0 commit comments

Comments
 (0)