Skip to content

Commit 47df638

Browse files
committed
[REF] Adds fs_LR_32k Cammoun 2012 atlases
1 parent 8532cc1 commit 47df638

File tree

4 files changed

+146
-50
lines changed

4 files changed

+146
-50
lines changed

netneurotools/data/osf.json

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,35 +5,42 @@
55
"mb37e",
66
"5ce6bb4423fec40017e82c5e"
77
],
8-
"md5": "c3435f2720da6a74c3d55db54ebdbfff"
8+
"md5": "266c4520af768e766328fb8e6648005d"
99
},
1010
"fsaverage": {
1111
"url": [
1212
"mb37e",
1313
"5ce6c30523fec40017e83439"
1414
],
15-
"md5": "478599b362a88198396fdb15ad999f9e"
15+
"md5": "2a19eb4744c0ce6c243f721bd43ecff0"
1616
},
1717
"fsaverage5": {
1818
"url": [
1919
"mb37e",
2020
"5e189a1c57341903868036dd"
2121
],
22-
"md5": "e96a456f14fd30f26734a7dfbcbb18e0"
22+
"md5": "2afb22e1887d47f1ca81c340fff7692b"
2323
},
2424
"fsaverage6": {
2525
"url": [
2626
"mb37e",
2727
"5e189a1b5734190380804072"
2828
],
29-
"md5": "b220fb658e1abdfdf8bdb55b26f2acf3"
29+
"md5": "1df743bff13316f67bd41d13ec691c97"
3030
},
31-
"volume": {
31+
"MNI152NLin2009aSym": {
3232
"url": [
3333
"mb37e",
34-
"5ce6bb438d6e05001860abca"
34+
"5e2f4bf0e71ef800301880c2"
3535
],
36-
"md5": "088fb64b397557dfa01901f04f4cd9d2"
36+
"md5": "9da30bad22d732aa5f00a6d178d087c4"
37+
},
38+
"fslr32k": {
39+
"url": [
40+
"mb37e",
41+
"5e2f4bf1e71ef80027189c56"
42+
],
43+
"md5": "a5177319d5e0b8825a91d503ded1a59e"
3744
}
3845
},
3946
"atl-pauli2018": [

netneurotools/datasets/__init__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,11 +5,13 @@
55
__all__ = [
66
'fetch_cammoun2012', 'fetch_pauli2018', 'fetch_fsaverage', 'fetch_conte69',
77
'fetch_connectome', 'available_connectomes', 'fetch_vazquez_rodriguez2019',
8-
'fetch_mirchi2018', 'make_correlated_xy', 'fetch_schaefer2018'
8+
'fetch_mirchi2018', 'make_correlated_xy', 'fetch_schaefer2018',
9+
'fetch_hcp_standards'
910
]
1011

1112
from .fetchers import (fetch_cammoun2012, fetch_pauli2018, fetch_fsaverage,
1213
fetch_conte69, fetch_connectome, available_connectomes,
13-
fetch_vazquez_rodriguez2019, fetch_schaefer2018)
14+
fetch_vazquez_rodriguez2019, fetch_schaefer2018,
15+
fetch_hcp_standards)
1416
from .generators import (make_correlated_xy)
1517
from .mirchi import (fetch_mirchi2018)

netneurotools/datasets/fetchers.py

Lines changed: 83 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55

66
import itertools
77
import json
8-
import os
98
import os.path as op
109
import warnings
1110

@@ -17,19 +16,21 @@
1716
from ..utils import check_fs_subjid
1817

1918

20-
def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
21-
verbose=1):
19+
def fetch_cammoun2012(version='MNI125NLin2009aSym', data_dir=None, url=None,
20+
resume=True, verbose=1):
2221
"""
2322
Downloads files for Cammoun et al., 2012 multiscale parcellation
2423
2524
Parameters
2625
----------
27-
version : {'volume', 'fsaverage', 'fsaverage5', 'fsaverage6', 'gcs'}
28-
Specifies which version of the dataset to download, where 'volume' will
29-
return .nii.gz atlas files defined in MNI152 space, 'fsaverageX' will
30-
return .annot files defined in fsaverageX space (FreeSurfer 6.0.1), and
31-
'gcs' will return FreeSurfer-style .gcs probabilistic atlas files for
32-
generating new, subject-specific parcellations
26+
version : str, optional
27+
Specifies which version of the dataset to download, where
28+
'MNI125NLin2009aSym' will return .nii.gz atlas files defined in MNI152
29+
space, 'fsaverageX' will return .annot files defined in fsaverageX
30+
space (FreeSurfer 6.0.1), 'fslr32k' will return .label.gii files in
31+
fs_LR_32k HCP space, and 'gcs' will return FreeSurfer-style .gcs
32+
probabilistic atlas files for generating new, subject-specific
33+
parcellations. Default: 'MNI125NLin2009aSym'
3334
data_dir : str, optional
3435
Path to use as data directory. If not specified, will check for
3536
environmental variable 'NNT_DATA'; if that is not set, will use
@@ -62,8 +63,23 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
6263
License: https://raw.githubusercontent.com/LTS5/cmp/master/COPYRIGHT
6364
"""
6465

66+
if version == 'surface':
67+
warnings.warn('Providing `version="surface"` is deprecated and will '
68+
'be removed in a future release. For consistent '
69+
'behavior please use `version="fsaverage"` instead.',
70+
DeprecationWarning, stacklevel=2)
71+
version = 'fsaverage'
72+
elif version == 'volume':
73+
warnings.warn('Providing `version="volume"` is deprecated and will '
74+
'be removed in a future release. For consistent '
75+
'behavior please use `version="MNI152NLin2009aSym"` '
76+
'instead.',
77+
DeprecationWarning, stacklevel=2)
78+
version = 'MNI152NLin2009aSym'
79+
6580
versions = [
66-
'volume', 'surface', 'gcs', 'fsaverage', 'fsaverage5', 'fsaverage6'
81+
'gcs', 'fsaverage', 'fsaverage5', 'fsaverage6', 'fslr32k',
82+
'MNI152NLin2009aSym'
6783
]
6884
if version not in versions:
6985
raise ValueError('The version of Cammoun et al., 2012 parcellation '
@@ -73,13 +89,6 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
7389
dataset_name = 'atl-cammoun2012'
7490
keys = ['scale033', 'scale060', 'scale125', 'scale250', 'scale500']
7591

76-
if version == 'surface':
77-
warnings.warn('Providing `version="surface"` is deprecated and will '
78-
'be removed in a future release. For consistent '
79-
'behavior please use `version="fsaverage"` instead. ',
80-
DeprecationWarning, stacklevel=2)
81-
version = 'fsaverage'
82-
8392
data_dir = _get_data_dir(data_dir=data_dir)
8493
info = _get_dataset_info(dataset_name)[version]
8594
if url is None:
@@ -92,11 +101,17 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
92101
}
93102

94103
# filenames differ based on selected version of dataset
95-
if version == 'volume':
104+
if version == 'MNI152NLin2009aSym':
96105
filenames = [
97106
'atl-Cammoun2012_space-MNI152NLin2009aSym_res-{}_deterministic{}'
98107
.format(res[-3:], suff) for res in keys for suff in ['.nii.gz']
99108
] + ['atl-Cammoun2012_space-MNI152NLin2009aSym_info.csv']
109+
elif version == 'fslr32k':
110+
filenames = [
111+
'atl-Cammoun2012_space-fslr32k_res-{}_hemi-{}_deterministic{}'
112+
.format(res[-3:], hemi, suff) for res in keys
113+
for hemi in ['L', 'R'] for suff in ['.label.gii']
114+
]
100115
elif version in ('fsaverage', 'fsaverage5', 'fsaverage6'):
101116
filenames = [
102117
'atl-Cammoun2012_space-{}_res-{}_hemi-{}_deterministic{}'
@@ -111,12 +126,14 @@ def fetch_cammoun2012(version='volume', data_dir=None, url=None, resume=True,
111126
for hemi in ['L', 'R'] for suff in ['.gcs', '.ctab']
112127
]
113128

114-
files = [(os.path.join(dataset_name, f), url, opts) for f in filenames]
129+
files = [
130+
(op.join(dataset_name, version, f), url, opts) for f in filenames
131+
]
115132
data = _fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
116133

117-
if version == 'volume':
134+
if version == 'MNI152NLin2009aSym':
118135
keys += ['info']
119-
elif version in ('fsaverage', 'fsaverage5', 'fsaverage6'):
136+
elif version in ('fslr32k', 'fsaverage', 'fsaverage5', 'fsaverage6'):
120137
data = [data[i:i + 2] for i in range(0, len(data), 2)]
121138
else:
122139
data = [data[::2][i:i + 2] for i in range(0, len(data) // 2, 2)]
@@ -313,7 +330,7 @@ def fetch_fsaverage(version='fsaverage', data_dir=None, url=None, resume=True,
313330

314331
try:
315332
data_dir = check_fs_subjid(version)[1]
316-
data = [os.path.join(data_dir, f) for f in filenames]
333+
data = [op.join(data_dir, f) for f in filenames]
317334
except FileNotFoundError:
318335
data = _fetch_files(data_dir, resume=resume, verbose=verbose,
319336
files=[(op.join(dataset_name, f), url, opts)
@@ -393,7 +410,7 @@ def fetch_connectome(dataset, data_dir=None, url=None, resume=True,
393410
}
394411

395412
filenames = [
396-
os.path.join(dataset, '{}.csv'.format(fn)) for fn in info['keys']
413+
op.join(dataset, '{}.csv'.format(fn)) for fn in info['keys']
397414
] + [op.join(dataset, 'ref.txt')]
398415
data = _fetch_files(data_dir, files=[(f, url, opts) for f in filenames],
399416
resume=resume, verbose=verbose)
@@ -454,7 +471,7 @@ def fetch_vazquez_rodriguez2019(data_dir=None, url=None, resume=True,
454471
}
455472

456473
filenames = [
457-
os.path.join(dataset_name, 'rsquared_gradient.csv')
474+
op.join(dataset_name, 'rsquared_gradient.csv')
458475
]
459476
data = _fetch_files(data_dir, files=[(f, url, opts) for f in filenames],
460477
resume=resume, verbose=verbose)
@@ -534,10 +551,51 @@ def fetch_schaefer2018(version='fsaverage', data_dir=None, url=None,
534551
.format(version, hemi, desc) for desc in keys for hemi in ['L', 'R']
535552
]
536553

537-
files = [(os.path.join(dataset_name, version, f), url, opts)
554+
files = [(op.join(dataset_name, version, f), url, opts)
538555
for f in filenames]
539556
data = _fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
540557

541558
data = [data[i:i + 2] for i in range(0, len(keys) * 2, 2)]
542559

543560
return Bunch(**dict(zip(keys, data)))
561+
562+
563+
def fetch_hcp_standards(data_dir=None, url=None, resume=True, verbose=1):
564+
"""
565+
Fetches HCP standard mesh atlases for converting between FreeSurfer and HCP
566+
567+
Parameters
568+
----------
569+
data_dir : str, optional
570+
Path to use as data directory. If not specified, will check for
571+
environmental variable 'NNT_DATA'; if that is not set, will use
572+
`~/nnt-data` instead. Default: None
573+
url : str, optional
574+
URL from which to download data. Default: None
575+
resume : bool, optional
576+
Whether to attempt to resume partial download, if possible. Default:
577+
True
578+
verbose : int, optional
579+
Modifies verbosity of download, where higher numbers mean more updates.
580+
Default: 1
581+
582+
Returns
583+
-------
584+
standards : str
585+
Filepath to standard_mesh_atlases directory
586+
"""
587+
if url is None:
588+
url = 'http://brainvis.wustl.edu/workbench/standard_mesh_atlases.zip'
589+
dataset_name = 'standard_mesh_atlases'
590+
data_dir = _get_data_dir(data_dir=data_dir)
591+
opts = {
592+
'uncompress': True,
593+
'move': '{}.zip'.format(dataset_name)
594+
}
595+
filenames = [
596+
'L.sphere.32k_fs_LR.surf.gii', 'R.sphere.32k_fs_LR.surf.gii'
597+
]
598+
files = [(op.join(dataset_name, f), url, opts) for f in filenames]
599+
_fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
600+
601+
return op.join(data_dir, dataset_name)

resources/generate_atl-cammoun2012_surface.py

Lines changed: 45 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,21 @@
2424
from netneurotools.utils import check_fs_subjid, run
2525

2626

27-
cmd = 'mri_surf2surf --srcsubject fsaverage --trgsubject {trgsubject} ' \
28-
'--sval-annot {annot} --tval {tval} --hemi {hemi} --seed 1234'
27+
ANNOT = 'atl-Cammoun2012_space-fsaverage_res-{}_hemi-{}_deterministic.annot'
28+
SURFCMD = 'mri_surf2surf --srcsubject fsaverage --trgsubject {trgsubject} ' \
29+
'--sval-annot {annot} --tval {tval} --hemi {hemi} --seed 1234'
30+
GIICMD = 'mris_convert --annot {annot} {white} {gii}'
31+
HCPCMD = 'wb_command -label-resample {annot} ' \
32+
'{path}/resample_fsaverage/' \
33+
'fsaverage{vers}_std_sphere.{hemi}.{ires}k_fsavg_{hemi}.surf.gii ' \
34+
'{path}/resample_fsaverage/' \
35+
'fs_LR-deformed_to-fsaverage.{hemi}.sphere.{ores}k_fs_LR.surf.gii ' \
36+
'ADAP_BARY_AREA {out} -area-metrics ' \
37+
'{path}/resample_fsaverage/' \
38+
'fsaverage{vers}.{hemi}.midthickness_va_avg.{ires}k_fsavg_{hemi}.' \
39+
'shape.gii ' \
40+
'{path}/resample_fsaverage/' \
41+
'fs_LR.{hemi}.midthickness_va_avg.{ores}k_fs_LR.shape.gii'
2942

3043

3144
def combine_cammoun_500(lhannot, rhannot, subject_id, annot=None,
@@ -136,10 +149,6 @@ def combine_cammoun_500(lhannot, rhannot, subject_id, annot=None,
136149
return created
137150

138151

139-
FSUBJ = 'fsaverage'
140-
ANNOT = 'atl-Cammoun2012_space-{}_res-{}_hemi-{}_deterministic.annot'
141-
ANNOT = ANNOT.format(FSUBJ, '{}', '{}')
142-
143152
if __name__ == '__main__':
144153
#####
145154
# get the GCS files and apply them onto the fsaverage surface
@@ -148,18 +157,18 @@ def combine_cammoun_500(lhannot, rhannot, subject_id, annot=None,
148157
for fn in gcsfiles:
149158
hemi = re.search('hemi-([RL])', fn).group(1)
150159
scale = re.search('res-(.*)_hemi-', fn).group(1)
151-
out = op.join(op.dirname(fn), ANNOT.format(scale, hemi))
152-
freesurfer.apply_prob_atlas(FSUBJ, fn, hemi.lower() + 'h',
160+
dirname = op.join(op.dirname(op.dirname(fn)), 'fsaverage')
161+
out = op.join(dirname, ANNOT.format(scale, hemi))
162+
freesurfer.apply_prob_atlas('fsaverage', fn, hemi.lower() + 'h',
153163
ctab=fn.replace('.gcs', '.ctab'),
154164
annot=out)
155165

156166
#####
157167
# get scale 500 parcellation files and combine
158-
dirname = op.dirname(fn)
159168
lh = sorted(glob.glob(op.join(dirname, ANNOT.format('500*', 'L'))))
160169
rh = sorted(glob.glob(op.join(dirname, ANNOT.format('500*', 'R'))))
161170
annot500 = op.join(dirname, ANNOT.format('500', '{}'))
162-
parc500 = combine_cammoun_500(lh, rh, FSUBJ, annot=annot500)
171+
parc500 = combine_cammoun_500(lh, rh, 'fsaverage', annot=annot500)
163172
for fn in lh + rh:
164173
os.remove(fn)
165174

@@ -191,21 +200,41 @@ def combine_cammoun_500(lhannot, rhannot, subject_id, annot=None,
191200

192201
#####
193202
# this should work now!
194-
annotations = datasets.fetch_cammoun2012('surface')
203+
annotations = datasets.fetch_cammoun2012('fsaverage')
195204

196205
# map (via surf2surf) fsaverage to fsaverage5/6 so we can provide those
197206
for trg in ['fsaverage5', 'fsaverage6']:
198207
for scale, (lh, rh) in annotations.items():
199208
for annot, hemi in [(lh, 'lh'), (rh, 'rh')]:
200209
tval = annot.replace('space-fsaverage', 'space-{}'.format(trg))
201-
210+
tval = tval.replace('/fsaverage/', '/{}/'.format(trg))
202211
msg = f'Generating annotation file: {tval}'
203212
print(msg, end='\r', flush=True)
204213

205-
run(cmd.format(trgsubject=trg,
206-
annot=annot,
207-
tval=tval,
208-
hemi=hemi),
214+
run(SURFCMD.format(trgsubject=trg,
215+
annot=annot,
216+
tval=tval,
217+
hemi=hemi),
209218
quiet=True)
210219

211220
print(' ' * len(msg) + '\b' * len(msg), end='', flush=True)
221+
222+
hcp = datasets.fetch_hcp_standards()
223+
fsaverage = datasets.fetch_fsaverage()
224+
for scale, (lh, rh) in annotations.items():
225+
for annot, hemi in [(lh, 'lh'), (rh, 'rh')]:
226+
outdir = op.join(op.dirname(op.dirname(annot)), 'fslr32k')
227+
gii = annot.replace('.annot', '.label.gii')
228+
white = fsaverage['white'][0 if hemi == 'lh' else 1]
229+
fname = op.basename(gii).replace('fsaverage', 'fslr32k')
230+
msg = f'Generating fslr32k file: {fname}'
231+
print(msg, end='\r', flush=True)
232+
run(GIICMD.format(annot=annot, white=white, gii=gii),
233+
quiet=True)
234+
run(HCPCMD.format(annot=gii, path=hcp, vers='',
235+
hemi=hemi[0].capitalize(),
236+
ires='164', ores='32',
237+
out=op.join(outdir, fname)),
238+
quiet=True)
239+
240+
print(' ' * len(msg) + '\b' * len(msg), end='', flush=True)

0 commit comments

Comments
 (0)