Skip to content

Commit 266de4a

Browse files
committed
ref+tst: some refactoring, intial testing
1 parent f47b637 commit 266de4a

File tree

4 files changed

+128
-35
lines changed

4 files changed

+128
-35
lines changed

heudiconv/convert.py

Lines changed: 23 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ def convert(items, converter, scaninfo_suffix, custom_callable, with_prov,
242242
if not isinstance(outtypes, (list, tuple)):
243243
outtypes = (outtypes,)
244244

245-
prefix_dirname = op.dirname(prefix + '.ext')
245+
prefix_dirname = op.dirname(prefix)
246246
outname_bids = prefix + '.json'
247247
bids_outfiles = []
248248
lgr.info('Converting %s (%d DICOMs) -> %s . '
@@ -442,8 +442,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
442442
"""
443443
from nipype.interfaces.base import isdefined
444444

445-
prefix_dirname = op.dirname(prefix + '.ext')
446-
prefix_basename = op.basename(prefix)
445+
prefix_dirname, prefix_basename = op.split(prefix)
447446

448447
bids_outfiles = []
449448
res_files = res.outputs.converted_files
@@ -475,8 +474,8 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
475474
# Also copy BIDS files although they might need to
476475
# be merged/postprocessed later
477476
bids_files = sorted(res.outputs.bids
478-
if len(res.outputs.bids) == len(res_files)
479-
else [None] * len(res_files))
477+
if len(res.outputs.bids) == len(res_files)
478+
else [None] * len(res_files))
480479

481480
### Do we have a multi-echo series? ###
482481
# Some Siemens sequences (e.g. CMRR's MB-EPI) set the label 'TE1',
@@ -488,23 +487,24 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
488487
# series. To do that, the most straightforward way is to read the
489488
# echo times for all bids_files and see if they are all the same or not.
490489

491-
# Get the echo times while not breaking non-BIDS compliance
492-
echo_times = []
490+
# Check for echotime information
491+
echo_times = set()
492+
493493
for bids_file in bids_files:
494494
if bids_file:
495-
echo_times.append(load_json(bids_file).get('EchoTime'))
495+
# check for varying EchoTimes
496+
echot = load_json(bids_file).get('EchoTime', None)
497+
if echot is not None:
498+
echo_times.add(echot)
496499

497500
# To see if the echo times are the same, convert it to a set and see if
498-
# only one remains:
499-
multiecho = False
500-
if echo_times:
501-
multiecho = len(set(echo_times)) == 1
501+
# only one remains:
502+
is_multiecho = len(echo_times) >= 1 if echo_times else False
502503

503504
### Loop through the bids_files, set the output name and save files
504-
505505
for fl, suffix, bids_file in zip(res_files, suffixes, bids_files):
506-
# load the json file info:
507-
# TODO: time performance
506+
507+
# TODO: monitor conversion duration
508508
if bids_file:
509509
fileinfo = load_json(bids_file)
510510

@@ -515,7 +515,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
515515
# _sbref sequences reconstructing magnitude and phase generate
516516
# two NIfTI files IN THE SAME SERIES, so we cannot just add
517517
# the suffix, if we want to be bids compliant:
518-
if (bids_file and (this_prefix_basename.endswith('_sbref'))):
518+
if bids_file and this_prefix_basename.endswith('_sbref'):
519519
# Check to see if it is magnitude or phase reconstruction:
520520
if 'M' in fileinfo.get('ImageType'):
521521
mag_or_phase = 'magnitude'
@@ -525,7 +525,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
525525
mag_or_phase = suffix
526526

527527
# Insert reconstruction label
528-
if not (("_rec-%s" % mag_or_phase) in this_prefix_basename):
528+
if not ("_rec-%s" % mag_or_phase) in this_prefix_basename:
529529

530530
# If "_rec-" is specified, prepend the 'mag_or_phase' value.
531531
if ('_rec-' in this_prefix_basename):
@@ -548,23 +548,23 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
548548
# (Note: it can be _sbref and multiecho, so don't use "elif"):
549549
# For multi-echo sequences, we have to specify the echo number in
550550
# the file name:
551-
if bids and multiecho:
551+
if bids_file and is_multiecho:
552552
# Get the EchoNumber from json file info. If not present, it's echo-1
553553
echo_number = fileinfo.get('EchoNumber', 1)
554554

555555

556-
supported_multiecho = ['_bold', '_sbref', '_T1w'] # epi?
556+
supported_multiecho = ['_bold', '_epi', '_sbref', '_T1w']
557557
# Now, decide where to insert it.
558558
# Insert it **before** the following string(s), whichever appears first.
559-
for imgtype in ['_bold', '_sbref', '_T1w']:
559+
for imgtype in supported_multiecho:
560560
if (imgtype in this_prefix_basename):
561561
this_prefix_basename = this_prefix_basename.replace(
562562
imgtype, "_echo-%d%s" % (echo_number, imgtype)
563563
)
564564
break
565565

566566
# For Scout runs with multiple NIfTI images per run:
567-
if (bids and ('scout' in this_prefix_basename.lower())):
567+
if bids and 'scout' in this_prefix_basename.lower():
568568
# in some cases (more than one slice slab), there are several
569569
# NIfTI images in the scout run, so distinguish them with "_acq-"
570570
spt = this_prefix_basename.split('_acq-Scout', 1)
@@ -573,7 +573,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
573573
# Fallback option:
574574
# If we have failed to modify this_prefix_basename, because it didn't fall
575575
# into any of the options above, just add the suffix at the end:
576-
if ( this_prefix_basename == prefix_basename ):
576+
if this_prefix_basename == prefix_basename:
577577
this_prefix_basename += suffix
578578

579579
# Finally, form the outname by stitching the directory and outtype:
@@ -586,6 +586,7 @@ def save_converted_files(res, item_dicoms, bids, outtype, prefix, outname_bids,
586586
outname_bids_file = "%s.json" % (outname)
587587
safe_copyfile(bids_file, outname_bids_file, overwrite)
588588
bids_outfiles.append(outname_bids_file)
589+
589590
# res_files is not a list
590591
else:
591592
outname = "{}.{}".format(prefix, outtype)

heudiconv/heuristics/bids-ME.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import os
2+
3+
def create_key(template, outtype=('nii.gz',), annotation_classes=None):
4+
if template is None or not template:
5+
raise ValueError('Template must be a valid format string')
6+
return template, outtype, annotation_classes
7+
8+
def infotodict(seqinfo):
9+
"""Heuristic evaluator for determining which runs belong where
10+
11+
allowed template fields - follow python string module:
12+
13+
item: index within category
14+
subject: participant id
15+
seqitem: run number during scanning
16+
subindex: sub index within group
17+
"""
18+
bold = create_key('sub-{subject}/func/sub-{subject}_task-test_run-{item}_bold')
19+
20+
info = {bold: []}
21+
for s in seqinfo:
22+
if '_ME_' in s.series_description:
23+
info[bold].append(s.series_id)
24+
return info

heudiconv/tests/test_regression.py

Lines changed: 43 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""Testing conversion with conversion saved on datalad"""
22
import json
33
from glob import glob
4+
import os.path as op
45

56
import pytest
67

@@ -11,8 +12,8 @@
1112
except ImportError:
1213
have_datalad = False
1314

14-
import heudiconv
1515
from heudiconv.cli.run import main as runner
16+
from heudiconv.utils import load_json
1617
# testing utilities
1718
from .utils import fetch_data, gen_heudiconv_args
1819

@@ -24,12 +25,19 @@
2425
def test_conversion(tmpdir, subject, heuristic, anon_cmd):
2526
tmpdir.chdir()
2627
try:
27-
datadir = fetch_data(tmpdir.strpath, subject)
28+
datadir = fetch_data(tmpdir.strpath,
29+
"dbic/QA", # path from datalad database root
30+
getpath=op.join('sourcedata', subject))
2831
except IncompleteResultsError as exc:
2932
pytest.skip("Failed to fetch test data: %s" % str(exc))
3033
outdir = tmpdir.mkdir('out').strpath
3134

32-
args = gen_heudiconv_args(datadir, outdir, subject, heuristic, anon_cmd)
35+
args = gen_heudiconv_args(datadir,
36+
outdir,
37+
subject,
38+
heuristic,
39+
anon_cmd,
40+
template=op.join('sourcedata/{subject}/*/*/*.tgz'))
3341
runner(args) # run conversion
3442

3543
# verify functionals were converted
@@ -38,8 +46,38 @@ def test_conversion(tmpdir, subject, heuristic, anon_cmd):
3846

3947
# compare some json metadata
4048
json_ = '{}/task-rest_acq-24mm64sl1000tr32te600dyn_bold.json'.format
41-
orig, conv = (json.load(open(json_(datadir))),
42-
json.load(open(json_(outdir))))
49+
orig, conv = (load_json(json_(datadir)),
50+
load_json(json_(outdir)))
4351
keys = ['EchoTime', 'MagneticFieldStrength', 'Manufacturer', 'SliceTiming']
4452
for key in keys:
4553
assert orig[key] == conv[key]
54+
55+
@pytest.mark.skipif(not have_datalad, reason="no datalad")
56+
def test_multiecho(tmpdir, subject='MEEPI', heuristic='bids-ME.py'):
57+
tmpdir.chdir()
58+
try:
59+
datadir = fetch_data(tmpdir.strpath, "dicoms/velasco/MEEPI")
60+
except IncompleteResultsError as exc:
61+
pytest.skip("Failed to fetch test data: %s" % str(exc))
62+
63+
outdir = tmpdir.mkdir('out').strpath
64+
args = gen_heudiconv_args(datadir, outdir, subject, heuristic)
65+
runner(args) # run conversion
66+
67+
# check if we have echo functionals
68+
echoes = glob(op.join('out', 'sub-' + subject, 'func', '*echo*nii.gz'))
69+
assert len(echoes) == 3
70+
71+
# check EchoTime of each functional
72+
# ET1 < ET2 < ET3
73+
prev_echo = 0
74+
for echo in sorted(echoes):
75+
_json = echo.replace('.nii.gz', '.json')
76+
assert _json
77+
echotime = load_json(_json).get('EchoTime', None)
78+
assert echotime > prev_echo
79+
prev_echo = echotime
80+
81+
events = glob(op.join('out', 'sub-' + subject, 'func', '*events.tsv'))
82+
for event in events:
83+
assert 'echo-' not in event

heudiconv/tests/utils.py

Lines changed: 38 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,24 @@
55
TESTS_DATA_PATH = op.join(op.dirname(__file__), 'data')
66

77

8-
def gen_heudiconv_args(datadir, outdir, subject, heuristic_file, anon_cmd=None, xargs=None):
8+
def gen_heudiconv_args(datadir, outdir, subject, heuristic_file,
9+
anon_cmd=None, template=None, xargs=None):
910
heuristic = op.realpath(op.join(HEURISTICS_PATH, heuristic_file))
10-
args = ["-d", op.join(datadir, 'sourcedata/{subject}/*/*/*.tgz'),
11+
12+
if template:
13+
# use --dicom_dir_template
14+
args = ["-d", op.join(datadir, template)]
15+
else:
16+
args = ["--files", datadir]
17+
18+
args.extend([
1119
"-c", "dcm2niix",
1220
"-o", outdir,
1321
"-s", subject,
1422
"-f", heuristic,
15-
"--bids",]
23+
"--bids",
24+
"--minmeta",]
25+
)
1626
if anon_cmd:
1727
args += ["--anon-cmd", op.join(op.dirname(__file__), anon_cmd), "-a", outdir]
1828
if xargs:
@@ -21,10 +31,30 @@ def gen_heudiconv_args(datadir, outdir, subject, heuristic_file, anon_cmd=None,
2131
return args
2232

2333

24-
def fetch_data(tmpdir, subject):
25-
"""Fetches some test dicoms using datalad"""
34+
def fetch_data(tmpdir, dataset, getpath=None):
35+
"""
36+
Utility function to interface with datalad database.
37+
Performs datalad `install` and datalad `get` operations.
38+
39+
Parameters
40+
----------
41+
tmpdir : str
42+
directory to temporarily store data
43+
dataset : str
44+
dataset path from `http://datasets-tests.datalad.org`
45+
getpath : str [optional]
46+
exclusive path to get
47+
48+
Returns
49+
-------
50+
targetdir : str
51+
directory with installed dataset
52+
"""
2653
from datalad import api
27-
targetdir = op.join(tmpdir, 'QA')
28-
api.install(path=targetdir, source='http://datasets-tests.datalad.org/dbic/QA')
29-
api.get('{}/sourcedata/{}'.format(targetdir, subject))
54+
targetdir = op.join(tmpdir, op.basename(dataset))
55+
api.install(path=targetdir,
56+
source='http://datasets-tests.datalad.org/{}'.format(dataset))
57+
58+
getdir = targetdir + (op.sep + getpath if getpath is not None else '')
59+
api.get(getdir)
3060
return targetdir

0 commit comments

Comments
 (0)