|
3 | 3 | Functions for fetching datasets from the internet |
4 | 4 | """ |
5 | 5 |
|
| 6 | +from collections import namedtuple |
6 | 7 | import itertools |
7 | 8 | import json |
8 | 9 | import os.path as op |
|
15 | 16 | from .utils import _get_data_dir, _get_dataset_info |
16 | 17 | from ..utils import check_fs_subjid |
17 | 18 |
|
| 19 | +ANNOT = namedtuple('Surface', ('lh', 'rh')) |
| 20 | + |
18 | 21 |
|
19 | 22 | def fetch_cammoun2012(version='MNI152NLin2009aSym', data_dir=None, url=None, |
20 | 23 | resume=True, verbose=1): |
@@ -134,7 +137,7 @@ def fetch_cammoun2012(version='MNI152NLin2009aSym', data_dir=None, url=None, |
134 | 137 | if version == 'MNI152NLin2009aSym': |
135 | 138 | keys += ['info'] |
136 | 139 | elif version in ('fslr32k', 'fsaverage', 'fsaverage5', 'fsaverage6'): |
137 | | - data = [data[i:i + 2] for i in range(0, len(data), 2)] |
| 140 | + data = [ANNOT(*data[i:i + 2]) for i in range(0, len(data), 2)] |
138 | 141 | else: |
139 | 142 | data = [data[::2][i:i + 2] for i in range(0, len(data) // 2, 2)] |
140 | 143 | # deal with the fact that last scale is split into three files :sigh: |
@@ -209,7 +212,7 @@ def fetch_conte69(data_dir=None, url=None, resume=True, verbose=1): |
209 | 212 | data[-1] = json.load(src) |
210 | 213 |
|
211 | 214 | # bundle hemispheres together |
212 | | - data = [data[:-1][i:i + 2] for i in range(0, 6, 2)] + [data[-1]] |
| 215 | + data = [ANNOT(*data[:-1][i:i + 2]) for i in range(0, 6, 2)] + [data[-1]] |
213 | 216 |
|
214 | 217 | return Bunch(**dict(zip(keys + ['info'], data))) |
215 | 218 |
|
@@ -336,7 +339,7 @@ def fetch_fsaverage(version='fsaverage', data_dir=None, url=None, resume=True, |
336 | 339 | files=[(op.join(dataset_name, f), url, opts) |
337 | 340 | for f in filenames]) |
338 | 341 |
|
339 | | - data = [data[i:i + 2] for i in range(0, len(keys) * 2, 2)] |
| 342 | + data = [ANNOT(*data[i:i + 2]) for i in range(0, len(keys) * 2, 2)] |
340 | 343 |
|
341 | 344 | return Bunch(**dict(zip(keys, data))) |
342 | 345 |
|
@@ -561,7 +564,7 @@ def fetch_schaefer2018(version='fsaverage', data_dir=None, url=None, |
561 | 564 | data = _fetch_files(data_dir, files=files, resume=resume, verbose=verbose) |
562 | 565 |
|
563 | 566 | if suffix == 'annot': |
564 | | - data = [data[i:i + 2] for i in range(0, len(keys) * 2, 2)] |
| 567 | + data = [ANNOT(*data[i:i + 2]) for i in range(0, len(keys) * 2, 2)] |
565 | 568 |
|
566 | 569 | return Bunch(**dict(zip(keys, data))) |
567 | 570 |
|
|
0 commit comments