@@ -618,7 +618,8 @@ def __getitem__(self, slicer):
618
618
class PARRECHeader (SpatialHeader ):
619
619
"""PAR/REC header"""
620
620
621
- def __init__ (self , info , image_defs , permit_truncated = False ):
621
+ def __init__ (self , info , image_defs , permit_truncated = False ,
622
+ strict_sort = False ):
622
623
"""
623
624
Parameters
624
625
----------
@@ -631,10 +632,14 @@ def __init__(self, info, image_defs, permit_truncated=False):
631
632
permit_truncated : bool, optional
632
633
If True, a warning is emitted instead of an error when a truncated
633
634
recording is detected.
635
+ strict_sort : bool, optional, keyword-only
636
+ If True, a larger number of header fields are used while sorting
637
+ the REC data array.
634
638
"""
635
639
self .general_info = info .copy ()
636
640
self .image_defs = image_defs .copy ()
637
641
self .permit_truncated = permit_truncated
642
+ self .strict_sort = strict_sort
638
643
_truncation_checks (info , image_defs , permit_truncated )
639
644
# charge with basic properties to be able to use base class
640
645
# functionality
@@ -660,14 +665,16 @@ def from_header(klass, header=None):
660
665
'non-PARREC header.' )
661
666
662
667
@classmethod
663
- def from_fileobj (klass , fileobj , permit_truncated = False ):
668
+ def from_fileobj (klass , fileobj , permit_truncated = False ,
669
+ strict_sort = False ):
664
670
info , image_defs = parse_PAR_header (fileobj )
665
- return klass (info , image_defs , permit_truncated )
671
+ return klass (info , image_defs , permit_truncated , strict_sort )
666
672
667
673
def copy (self ):
668
674
return PARRECHeader (deepcopy (self .general_info ),
669
675
self .image_defs .copy (),
670
- self .permit_truncated )
676
+ self .permit_truncated ,
677
+ self .strict_sort )
671
678
672
679
def as_analyze_map (self ):
673
680
"""Convert PAR parameters to NIFTI1 format"""
@@ -1003,20 +1010,81 @@ def get_sorted_slice_indices(self):
1003
1010
If the recording is truncated, the returned indices take care of
1004
1011
discarding any slice indices from incomplete volumes.
1005
1012
1013
+ If `self.strict_sort` is True, a more complicated sorting based on
1014
+ multiple fields from the .PAR file is used.
1015
+
1006
1016
Returns
1007
1017
-------
1008
1018
slice_indices : list
1009
1019
List for indexing into the last (third) dimension of the REC data
1010
1020
array, and (equivalently) the only dimension of
1011
1021
``self.image_defs``.
1012
1022
"""
1013
- slice_nos = self .image_defs ['slice number' ]
1014
- is_full = vol_is_full (slice_nos , self .general_info ['max_slices' ])
1015
- keys = (slice_nos , vol_numbers (slice_nos ), np .logical_not (is_full ))
1016
- # Figure out how many we need to remove from the end, and trim them
1017
- # Based on our sorting, they should always be last
1023
+ if not self .strict_sort :
1024
+ slice_nos = self .image_defs ['slice number' ]
1025
+ is_full = vol_is_full (slice_nos , self .general_info ['max_slices' ])
1026
+ keys = (slice_nos , vol_numbers (slice_nos ), np .logical_not (is_full ))
1027
+ sort_order = np .lexsort (keys )
1028
+ else :
1029
+ # Sort based on a larger number of keys. This is more complicated
1030
+ # but works for .PAR files that get missorted by the above method
1031
+ slice_nos = self .image_defs ['slice number' ]
1032
+ dynamics = self .image_defs ['dynamic scan number' ]
1033
+ phases = self .image_defs ['cardiac phase number' ]
1034
+ echos = self .image_defs ['echo number' ]
1035
+
1036
+ # try adding keys only present in a subset of .PAR files
1037
+ try :
1038
+ # only present in PAR v4.2+
1039
+ asl_labels = self .image_defs ['label type' ]
1040
+ asl_keys = (asl_labels , )
1041
+ except :
1042
+ asl_keys = ()
1043
+ if not self .general_info ['diffusion' ] == 0 :
1044
+ try :
1045
+ # only present for .PAR v4.1+
1046
+ bvals = self .image_defs ['diffusion b value number' ]
1047
+ bvecs = self .image_defs ['gradient orientation number' ]
1048
+ except :
1049
+ bvals = self .image_defs ['diffusion_b_factor' ]
1050
+ # use hash to get a single sortable value per direction
1051
+ bvecs = [hash (tuple (
1052
+ a )) for a in self .image_defs ['diffusion' ].tolist ()]
1053
+ diffusion_keys = (bvecs , bvals )
1054
+ else :
1055
+ diffusion_keys = ()
1056
+
1057
+ # Define the desired sort order (last key is highest precedence)
1058
+ keys = (slice_nos , echos , phases ) + \
1059
+ diffusion_keys + asl_keys + (dynamics , )
1060
+
1061
+ """
1062
+ Data sorting is done in two stages:
1063
+ - run an initial sort using the keys defined above
1064
+ - call vol_is_full to identify potentially missing volumes
1065
+ - call vol_numbers to assign unique volume numbers if for some
1066
+ reason the keys defined above don't provide a unique sort
1067
+ order (e.g. this occurs for the Trace volume in DTI)
1068
+ - run a final sort using the vol_numbers and is_full keys
1069
+ """
1070
+ initial_sort_order = np .lexsort (keys )
1071
+ is_full = vol_is_full (slice_nos [initial_sort_order ],
1072
+ self .general_info ['max_slices' ])
1073
+ vol_nos = vol_numbers (slice_nos [initial_sort_order ])
1074
+
1075
+ # have to "unsort" is_full and vol_nos to match the other sort keys
1076
+ unsort_indices = np .argsort (initial_sort_order )
1077
+ is_full = is_full [unsort_indices ]
1078
+ vol_nos = np .asarray (vol_nos )[unsort_indices ]
1079
+
1080
+ # final set of sort keys
1081
+ keys += (vol_nos , np .logical_not (is_full ), )
1082
+ sort_order = np .lexsort (keys )
1083
+
1084
+ # Figure out how many we need to remove from the end, and trim them.
1085
+ # Based on our sorting, they should always be last.
1018
1086
n_used = np .prod (self .get_data_shape ()[2 :])
1019
- return np . lexsort ( keys ) [:n_used ]
1087
+ return sort_order [:n_used ]
1020
1088
1021
1089
1022
1090
class PARRECImage (SpatialImage ):
@@ -1033,7 +1101,7 @@ class PARRECImage(SpatialImage):
1033
1101
@classmethod
1034
1102
@kw_only_meth (1 )
1035
1103
def from_file_map (klass , file_map , mmap = True , permit_truncated = False ,
1036
- scaling = 'dv' ):
1104
+ scaling = 'dv' , strict_sort = False ):
1037
1105
""" Create PARREC image from file map `file_map`
1038
1106
1039
1107
Parameters
@@ -1054,11 +1122,15 @@ def from_file_map(klass, file_map, mmap=True, permit_truncated=False,
1054
1122
scaling : {'dv', 'fp'}, optional, keyword-only
1055
1123
Scaling method to apply to data (see
1056
1124
:meth:`PARRECHeader.get_data_scaling`).
1125
+ strict_sort : bool, optional, keyword-only
1126
+ If True, a larger number of header fields are used while sorting
1127
+ the REC data array.
1057
1128
"""
1058
1129
with file_map ['header' ].get_prepare_fileobj ('rt' ) as hdr_fobj :
1059
1130
hdr = klass .header_class .from_fileobj (
1060
1131
hdr_fobj ,
1061
- permit_truncated = permit_truncated )
1132
+ permit_truncated = permit_truncated ,
1133
+ strict_sort = strict_sort )
1062
1134
rec_fobj = file_map ['image' ].get_prepare_fileobj ()
1063
1135
data = klass .ImageArrayProxy (rec_fobj , hdr ,
1064
1136
mmap = mmap , scaling = scaling )
@@ -1068,7 +1140,7 @@ def from_file_map(klass, file_map, mmap=True, permit_truncated=False,
1068
1140
@classmethod
1069
1141
@kw_only_meth (1 )
1070
1142
def from_filename (klass , filename , mmap = True , permit_truncated = False ,
1071
- scaling = 'dv' ):
1143
+ scaling = 'dv' , strict_sort = False ):
1072
1144
""" Create PARREC image from filename `filename`
1073
1145
1074
1146
Parameters
@@ -1088,12 +1160,16 @@ def from_filename(klass, filename, mmap=True, permit_truncated=False,
1088
1160
scaling : {'dv', 'fp'}, optional, keyword-only
1089
1161
Scaling method to apply to data (see
1090
1162
:meth:`PARRECHeader.get_data_scaling`).
1163
+ strict_sort : bool, optional, keyword-only
1164
+ If True, a larger number of header fields are used while sorting
1165
+ the REC data array.
1091
1166
"""
1092
1167
file_map = klass .filespec_to_file_map (filename )
1093
1168
return klass .from_file_map (file_map ,
1094
1169
mmap = mmap ,
1095
1170
permit_truncated = permit_truncated ,
1096
- scaling = scaling )
1171
+ scaling = scaling ,
1172
+ strict_sort = strict_sort )
1097
1173
1098
1174
load = from_filename
1099
1175
0 commit comments