Skip to content

Commit c8e09c0

Browse files
authored
Merge pull request #81 from rmarkello/von_economo
[ENH] Adds von Economo-Koskinas atlas fetcher
2 parents 55386eb + dec0540 commit c8e09c0

File tree

6 files changed

+75
-3
lines changed

6 files changed

+75
-3
lines changed

docs/api.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,7 @@ Functions to download atlases and templates
153153
fetch_pauli2018
154154
fetch_schaefer2018
155155
fetch_hcp_standards
156+
fetch_voneconomo
156157

157158
Functions to download real-world datasets
158159

netneurotools/data/osf.json

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -351,5 +351,12 @@
351351
],
352352
"md5": "d8378f33107ed5d98c27e8070ebb5aa2"
353353
}
354+
},
355+
"atl-voneconomo_koskinas": {
356+
"url": [
357+
"mb37e",
358+
"5ed80005fabc45000d639900"
359+
],
360+
"md5": "28fb2bb04fbb2a1d65428364b48cf6ac"
354361
}
355362
}

netneurotools/datasets/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@
66
'fetch_cammoun2012', 'fetch_pauli2018', 'fetch_fsaverage', 'fetch_conte69',
77
'fetch_connectome', 'available_connectomes', 'fetch_vazquez_rodriguez2019',
88
'fetch_mirchi2018', 'make_correlated_xy', 'fetch_schaefer2018',
9-
'fetch_hcp_standards'
9+
'fetch_hcp_standards', 'fetch_voneconomo'
1010
]
1111

1212
from .fetchers import (fetch_cammoun2012, fetch_pauli2018, fetch_fsaverage,
1313
fetch_conte69, fetch_connectome, available_connectomes,
1414
fetch_vazquez_rodriguez2019, fetch_schaefer2018,
15-
fetch_hcp_standards)
15+
fetch_hcp_standards, fetch_voneconomo)
1616
from .generators import (make_correlated_xy)
1717
from .mirchi import (fetch_mirchi2018)

netneurotools/datasets/fetchers.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -608,3 +608,62 @@ def fetch_hcp_standards(data_dir=None, url=None, resume=True, verbose=1):
608608
_fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
609609

610610
return op.join(data_dir, dataset_name)
611+
612+
613+
def fetch_voneconomo(data_dir=None, url=None, resume=True, verbose=1):
614+
"""
615+
Fetches von-Economo Koskinas probabilistic FreeSurfer atlas
616+
617+
Parameters
618+
----------
619+
data_dir : str, optional
620+
Path to use as data directory. If not specified, will check for
621+
environmental variable 'NNT_DATA'; if that is not set, will use
622+
`~/nnt-data` instead. Default: None
623+
url : str, optional
624+
URL from which to download data. Default: None
625+
resume : bool, optional
626+
Whether to attempt to resume partial download, if possible. Default:
627+
True
628+
verbose : int, optional
629+
Modifies verbosity of download, where higher numbers mean more updates.
630+
Default: 1
631+
632+
Returns
633+
-------
634+
filenames : :class:`sklearn.utils.Bunch`
635+
Dictionary-like object with keys of format '{}Parcels{}Networks' where
636+
corresponding values are the left/right hemisphere annotation files
637+
638+
References
639+
----------
640+
Scholtens, L. H., de Reus, M. A., de Lange, S. C., Schmidt, R., & van den
641+
Heuvel, M. P. (2018). An MRI von Economo–Koskinas atlas. NeuroImage, 170,
642+
249-256.
643+
644+
Notes
645+
-----
646+
License: CC-BY-NC-SA 4.0
647+
"""
648+
649+
dataset_name = 'atl-voneconomo_koskinas'
650+
keys = ['gcs', 'ctab']
651+
652+
data_dir = _get_data_dir(data_dir=data_dir)
653+
info = _get_dataset_info(dataset_name)
654+
if url is None:
655+
url = info['url']
656+
opts = {
657+
'uncompress': True,
658+
'md5sum': info['md5'],
659+
'move': '{}.tar.gz'.format(dataset_name)
660+
}
661+
filenames = [
662+
'atl-vonEconomoKoskinas_hemi-{}_probabilistic.{}'.format(hemi, suff)
663+
for hemi in ['L', 'R'] for suff in ['gcs', 'ctab']
664+
]
665+
files = [(op.join(dataset_name, f), url, opts) for f in filenames]
666+
data = _fetch_files(data_dir, files=files, resume=resume, verbose=verbose)
667+
data = [ANNOT(*data[n::2]) for n in range(len(keys))]
668+
669+
return Bunch(**dict(zip(keys, data)))

netneurotools/freesurfer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ def _decode_list(vals):
109109
""" List decoder
110110
"""
111111

112-
return [l.decode() if hasattr(l, 'decode') else l for l in vals]
112+
return [val.decode() if hasattr(val, 'decode') else val for val in vals]
113113

114114

115115
def find_parcel_centroids(*, lhannot, rhannot, method='surface',

netneurotools/tests/test_datasets.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,11 @@ def test_fetch_hcp_standards(tmpdir):
148148
assert os.path.isdir(hcp)
149149

150150

151+
def test_fetch_voneconomo(tmpdir):
152+
vek = datasets.fetch_voneconomo(data_dir=tmpdir, verbose=0)
153+
assert all(hasattr(vek, k) and len(vek[k]) == 2 for k in ['gcs', 'ctab'])
154+
155+
151156
@pytest.mark.parametrize('dset, expected', [
152157
('atl-cammoun2012', ['fsaverage', 'fsaverage5', 'fsaverage6', 'fslr32k',
153158
'MNI152NLin2009aSym', 'gcs']),

0 commit comments

Comments
 (0)