Skip to content

Commit e9f5a3e

Browse files
committed
ENH: improve sorting of more complicated .PAR/.REC files
1 parent 87375ea commit e9f5a3e

File tree

3 files changed

+124
-17
lines changed

3 files changed

+124
-17
lines changed

bin/parrec2nii

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,13 @@ def get_opt_parser():
113113
default=False,
114114
help=one_line("""Overwrite file if it exists. Default:
115115
False""")))
116+
p.add_option(
117+
Option("--strict-sort", action="store_true", dest="strict_sort",
118+
default=False,
119+
help=one_line("""Use additional keys in determining the order
120+
to sort the slices within the .REC file. This may be necessary
121+
for more complicated scans with multiple echos,
122+
cardiac phases, ASL label states, etc.""")))
116123
return p
117124

118125

@@ -148,7 +155,8 @@ def proc_file(infile, opts):
148155
infile = fname_ext_ul_case(infile)
149156
pr_img = pr.load(infile,
150157
permit_truncated=opts.permit_truncated,
151-
scaling=scaling)
158+
scaling=scaling,
159+
strict_sort=opts.strict_sort)
152160
pr_hdr = pr_img.header
153161
affine = pr_hdr.get_affine(origin=opts.origin)
154162
slope, intercept = pr_hdr.get_data_scaling(scaling)

nibabel/parrec.py

Lines changed: 90 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -618,7 +618,8 @@ def __getitem__(self, slicer):
618618
class PARRECHeader(SpatialHeader):
619619
"""PAR/REC header"""
620620

621-
def __init__(self, info, image_defs, permit_truncated=False):
621+
def __init__(self, info, image_defs, permit_truncated=False,
622+
strict_sort=False):
622623
"""
623624
Parameters
624625
----------
@@ -631,10 +632,14 @@ def __init__(self, info, image_defs, permit_truncated=False):
631632
permit_truncated : bool, optional
632633
If True, a warning is emitted instead of an error when a truncated
633634
recording is detected.
635+
strict_sort : bool, optional, keyword-only
636+
If True, a larger number of header fields are used while sorting
637+
the REC data array.
634638
"""
635639
self.general_info = info.copy()
636640
self.image_defs = image_defs.copy()
637641
self.permit_truncated = permit_truncated
642+
self.strict_sort = strict_sort
638643
_truncation_checks(info, image_defs, permit_truncated)
639644
# charge with basic properties to be able to use base class
640645
# functionality
@@ -660,14 +665,16 @@ def from_header(klass, header=None):
660665
'non-PARREC header.')
661666

662667
@classmethod
663-
def from_fileobj(klass, fileobj, permit_truncated=False):
668+
def from_fileobj(klass, fileobj, permit_truncated=False,
669+
strict_sort=False):
664670
info, image_defs = parse_PAR_header(fileobj)
665-
return klass(info, image_defs, permit_truncated)
671+
return klass(info, image_defs, permit_truncated, strict_sort)
666672

667673
def copy(self):
668674
return PARRECHeader(deepcopy(self.general_info),
669675
self.image_defs.copy(),
670-
self.permit_truncated)
676+
self.permit_truncated,
677+
self.strict_sort)
671678

672679
def as_analyze_map(self):
673680
"""Convert PAR parameters to NIFTI1 format"""
@@ -1003,20 +1010,81 @@ def get_sorted_slice_indices(self):
10031010
If the recording is truncated, the returned indices take care of
10041011
discarding any slice indices from incomplete volumes.
10051012
1013+
If `self.strict_sort` is True, a more complicated sorting based on
1014+
multiple fields from the .PAR file is used.
1015+
10061016
Returns
10071017
-------
10081018
slice_indices : list
10091019
List for indexing into the last (third) dimension of the REC data
10101020
array, and (equivalently) the only dimension of
10111021
``self.image_defs``.
10121022
"""
1013-
slice_nos = self.image_defs['slice number']
1014-
is_full = vol_is_full(slice_nos, self.general_info['max_slices'])
1015-
keys = (slice_nos, vol_numbers(slice_nos), np.logical_not(is_full))
1016-
# Figure out how many we need to remove from the end, and trim them
1017-
# Based on our sorting, they should always be last
1023+
if not self.strict_sort:
1024+
slice_nos = self.image_defs['slice number']
1025+
is_full = vol_is_full(slice_nos, self.general_info['max_slices'])
1026+
keys = (slice_nos, vol_numbers(slice_nos), np.logical_not(is_full))
1027+
sort_order = np.lexsort(keys)
1028+
else:
1029+
# Sort based on a larger number of keys. This is more complicated
1030+
# but works for .PAR files that get missorted by the above method
1031+
slice_nos = self.image_defs['slice number']
1032+
dynamics = self.image_defs['dynamic scan number']
1033+
phases = self.image_defs['cardiac phase number']
1034+
echos = self.image_defs['echo number']
1035+
1036+
# try adding keys only present in a subset of .PAR files
1037+
try:
1038+
# only present in PAR v4.2+
1039+
asl_labels = self.image_defs['label type']
1040+
asl_keys = (asl_labels, )
1041+
except:
1042+
asl_keys = ()
1043+
if not self.general_info['diffusion'] == 0:
1044+
try:
1045+
# only present for .PAR v4.1+
1046+
bvals = self.image_defs['diffusion b value number']
1047+
bvecs = self.image_defs['gradient orientation number']
1048+
except:
1049+
bvals = self.image_defs['diffusion_b_factor']
1050+
# use hash to get a single sortable value per direction
1051+
bvecs = [hash(tuple(
1052+
a)) for a in self.image_defs['diffusion'].tolist()]
1053+
diffusion_keys = (bvecs, bvals)
1054+
else:
1055+
diffusion_keys = ()
1056+
1057+
# Define the desired sort order (last key is highest precedence)
1058+
keys = (slice_nos, echos, phases) + \
1059+
diffusion_keys + asl_keys + (dynamics, )
1060+
1061+
"""
1062+
Data sorting is done in two stages:
1063+
- run an initial sort using the keys defined above
1064+
- call vol_is_full to identify potentially missing volumes
1065+
- call vol_numbers to assign unique volume numbers if for some
1066+
reason the keys defined above don't provide a unique sort
1067+
order (e.g. this occurs for the Trace volume in DTI)
1068+
- run a final sort using the vol_numbers and is_full keys
1069+
"""
1070+
initial_sort_order = np.lexsort(keys)
1071+
is_full = vol_is_full(slice_nos[initial_sort_order],
1072+
self.general_info['max_slices'])
1073+
vol_nos = vol_numbers(slice_nos[initial_sort_order])
1074+
1075+
# have to "unsort" is_full and vol_nos to match the other sort keys
1076+
unsort_indices = np.argsort(initial_sort_order)
1077+
is_full = is_full[unsort_indices]
1078+
vol_nos = np.asarray(vol_nos)[unsort_indices]
1079+
1080+
# final set of sort keys
1081+
keys += (vol_nos, np.logical_not(is_full), )
1082+
sort_order = np.lexsort(keys)
1083+
1084+
# Figure out how many we need to remove from the end, and trim them.
1085+
# Based on our sorting, they should always be last.
10181086
n_used = np.prod(self.get_data_shape()[2:])
1019-
return np.lexsort(keys)[:n_used]
1087+
return sort_order[:n_used]
10201088

10211089

10221090
class PARRECImage(SpatialImage):
@@ -1033,7 +1101,7 @@ class PARRECImage(SpatialImage):
10331101
@classmethod
10341102
@kw_only_meth(1)
10351103
def from_file_map(klass, file_map, mmap=True, permit_truncated=False,
1036-
scaling='dv'):
1104+
scaling='dv', strict_sort=False):
10371105
""" Create PARREC image from file map `file_map`
10381106
10391107
Parameters
@@ -1054,11 +1122,15 @@ def from_file_map(klass, file_map, mmap=True, permit_truncated=False,
10541122
scaling : {'dv', 'fp'}, optional, keyword-only
10551123
Scaling method to apply to data (see
10561124
:meth:`PARRECHeader.get_data_scaling`).
1125+
strict_sort : bool, optional, keyword-only
1126+
If True, a larger number of header fields are used while sorting
1127+
the REC data array.
10571128
"""
10581129
with file_map['header'].get_prepare_fileobj('rt') as hdr_fobj:
10591130
hdr = klass.header_class.from_fileobj(
10601131
hdr_fobj,
1061-
permit_truncated=permit_truncated)
1132+
permit_truncated=permit_truncated,
1133+
strict_sort=strict_sort)
10621134
rec_fobj = file_map['image'].get_prepare_fileobj()
10631135
data = klass.ImageArrayProxy(rec_fobj, hdr,
10641136
mmap=mmap, scaling=scaling)
@@ -1068,7 +1140,7 @@ def from_file_map(klass, file_map, mmap=True, permit_truncated=False,
10681140
@classmethod
10691141
@kw_only_meth(1)
10701142
def from_filename(klass, filename, mmap=True, permit_truncated=False,
1071-
scaling='dv'):
1143+
scaling='dv', strict_sort=False):
10721144
""" Create PARREC image from filename `filename`
10731145
10741146
Parameters
@@ -1088,12 +1160,16 @@ def from_filename(klass, filename, mmap=True, permit_truncated=False,
10881160
scaling : {'dv', 'fp'}, optional, keyword-only
10891161
Scaling method to apply to data (see
10901162
:meth:`PARRECHeader.get_data_scaling`).
1163+
strict_sort : bool, optional, keyword-only
1164+
If True, a larger number of header fields are used while sorting
1165+
the REC data array.
10911166
"""
10921167
file_map = klass.filespec_to_file_map(filename)
10931168
return klass.from_file_map(file_map,
10941169
mmap=mmap,
10951170
permit_truncated=permit_truncated,
1096-
scaling=scaling)
1171+
scaling=scaling,
1172+
strict_sort=strict_sort)
10971173

10981174
load = from_filename
10991175

nibabel/tests/test_parrec.py

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,6 @@
113113
[0., 0., 3.3, -64.35],
114114
[0., 0., 0., 1.]]),
115115
}
116-
117116
# Original values for b values in DTI.PAR, still in PSL orientation
118117
DTI_PAR_BVECS = np.array([[-0.667, -0.667, -0.333],
119118
[-0.333, 0.667, -0.667],
@@ -259,6 +258,10 @@ def test_get_sorted_slice_indices():
259258
hdr = PARRECHeader(HDR_INFO, HDR_DEFS[:-1], permit_truncated=True)
260259
assert_array_equal(hdr.get_sorted_slice_indices(), range(n_slices - 9))
261260

261+
# different result when strict_sort=True
262+
hdr = PARRECHeader(HDR_INFO, HDR_DEFS[::-1], strict_sort=True)
263+
assert_array_equal(hdr.get_sorted_slice_indices(), range(n_slices)[::-1])
264+
262265

263266
def test_vol_number():
264267
# Test algorithm for calculating volume number
@@ -340,6 +343,25 @@ def test_diffusion_parameters():
340343
assert_almost_equal(dti_hdr.get_q_vectors(), bvals[:, None] * bvecs)
341344

342345

346+
def test_diffusion_parameters_strict_sort():
347+
# Check getting diffusion parameters from diffusion example
348+
dti_par = pjoin(DATA_PATH, 'DTI.PAR')
349+
with open(dti_par, 'rt') as fobj:
350+
dti_hdr = PARRECHeader.from_fileobj(fobj, strict_sort=True)
351+
assert_equal(dti_hdr.get_data_shape(), (80, 80, 10, 8))
352+
assert_equal(dti_hdr.general_info['diffusion'], 1)
353+
bvals, bvecs = dti_hdr.get_bvals_bvecs()
354+
assert_almost_equal(bvals, np.sort(DTI_PAR_BVALS))
355+
# DTI_PAR_BVECS gives bvecs copied from first slice each vol in DTI.PAR
356+
# Permute to match bvec directions to acquisition directions
357+
# note that bval sorting occurs prior to bvec sorting
358+
assert_almost_equal(bvecs,
359+
DTI_PAR_BVECS[
360+
np.ix_(np.argsort(DTI_PAR_BVALS), [2, 0, 1])])
361+
# Check q vectors
362+
assert_almost_equal(dti_hdr.get_q_vectors(), bvals[:, None] * bvecs)
363+
364+
343365
def test_diffusion_parameters_v4():
344366
dti_v4_par = pjoin(DATA_PATH, 'DTIv40.PAR')
345367
with open(dti_v4_par, 'rt') as fobj:
@@ -530,9 +552,10 @@ class FakeHeader(object):
530552
""" Minimal API of header for PARRECArrayProxy
531553
"""
532554

533-
def __init__(self, shape, dtype):
555+
def __init__(self, shape, dtype, strict_sort=False):
534556
self._shape = shape
535557
self._dtype = np.dtype(dtype)
558+
self.strict_sort = strict_sort
536559

537560
def get_data_shape(self):
538561
return self._shape

0 commit comments

Comments
 (0)