Skip to content

Commit dfebe58

Browse files
committed
RF: Rework PARRECArrayProxy
1 parent 16e3552 commit dfebe58

File tree

1 file changed

+33
-16
lines changed

1 file changed

+33
-16
lines changed

nibabel/parrec.py

Lines changed: 33 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -639,32 +639,49 @@ def get_unscaled(self):
639639
self._slice_indices, self._shape,
640640
mmap=self._mmap)
641641

642-
def __array__(self):
643-
with ImageOpener(self.file_like) as fileobj:
644-
return _data_from_rec(fileobj,
645-
self._rec_shape,
646-
self._dtype,
647-
self._slice_indices,
648-
self._shape,
649-
scalings=self._slice_scaling,
650-
mmap=self._mmap)
651-
652-
def __getitem__(self, slicer):
642+
def _get_unscaled(self, slicer):
653643
indices = self._slice_indices
654-
if indices[0] != 0 or np.any(np.diff(indices) != 1):
644+
if slicer == ():
645+
with ImageOpener(self.file_like) as fileobj:
646+
rec_data = array_from_file(self._rec_shape, self._dtype, fileobj, mmap=self._mmap)
647+
rec_data = rec_data[..., indices]
648+
return rec_data.reshape(self._shape, order='F')
649+
elif indices[0] != 0 or np.any(np.diff(indices) != 1):
655650
# We can't load direct from REC file, use inefficient slicing
656-
return np.asanyarray(self)[slicer]
651+
return self._get_unscaled(())[slicer]
652+
657653
# Slices all sequential from zero, can use fileslice
658654
# This gives more efficient volume by volume loading, for example
659655
with ImageOpener(self.file_like) as fileobj:
660-
raw_data = fileslice(fileobj, slicer, self._shape, self._dtype, 0,
661-
'F')
656+
return fileslice(fileobj, slicer, self._shape, self._dtype, 0, 'F')
657+
658+
def _get_scaled(self, dtype, slicer):
659+
raw_data = self._get_unscaled(slicer)
660+
if self._slice_scaling is None:
661+
if dtype is None or raw_data.dtype >= np.dtype(dtype):
662+
return raw_data
663+
return np.asanyarray(raw_data, dtype=dtype)
664+
662665
# Broadcast scaling to shape of original data
663666
slopes, inters = self._slice_scaling
664667
fake_data = strided_scalar(self._shape)
665668
_, slopes, inters = np.broadcast_arrays(fake_data, slopes, inters)
669+
670+
if dtype is None:
671+
dtype = max(slopes.dtype, inters.dtype)
672+
666673
# Slice scaling to give output shape
667-
return raw_data * slopes[slicer] + inters[slicer]
674+
return raw_data * slopes.astype(dtype)[slicer] + inters.astype(dtype)[slicer]
675+
676+
677+
def get_scaled(self, dtype=None):
678+
return self._get_scaled(dtype=dtype, slicer=())
679+
680+
def __array__(self):
681+
return self._get_scaled(dtype=None, slicer=())
682+
683+
def __getitem__(self, slicer):
684+
return self._get_scaled(dtype=None, slicer=slicer)
668685

669686

670687
class PARRECHeader(SpatialHeader):

0 commit comments

Comments
 (0)