diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 2e4f877c5f..3e57643fc1 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -23,7 +23,7 @@ img2 = nib.load('other_file.nii.gz') img3 = nib.load('spm_file.img') - data = img1.get_data() + data = img1.get_fdata() affine = img1.affine print(img1) diff --git a/nibabel/analyze.py b/nibabel/analyze.py index 8015715590..dc352505c6 100644 --- a/nibabel/analyze.py +++ b/nibabel/analyze.py @@ -1009,7 +1009,7 @@ def to_file_map(self, file_map=None): ''' if file_map is None: file_map = self.file_map - data = self.get_data() + data = np.asanyarray(self.dataobj) self.update_header() hdr = self._header out_dtype = self.get_data_dtype() diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 49182ba705..c5847a87a8 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -486,7 +486,7 @@ class AFNIImage(SpatialImage): [ 0. , 0. , 3. , -52.3511], [ 0. , 0. , 0. , 1. ]]) >>> head = load(os.path.join(datadir, 'example4d+orig.HEAD')) - >>> np.array_equal(head.get_data(), brik.get_data()) + >>> np.array_equal(head.get_fdata(), brik.get_fdata()) True """ diff --git a/nibabel/cifti2/tests/test_cifti2io_axes.py b/nibabel/cifti2/tests/test_cifti2io_axes.py index 4089395b78..c237e3c61a 100644 --- a/nibabel/cifti2/tests/test_cifti2io_axes.py +++ b/nibabel/cifti2/tests/test_cifti2io_axes.py @@ -93,8 +93,8 @@ def check_rewrite(arr, axes, extension='.nii'): (fd, name) = tempfile.mkstemp(extension) cifti2.Cifti2Image(arr, header=axes).to_filename(name) img = nib.load(name) - arr2 = img.get_data() - assert (arr == arr2).all() + arr2 = img.get_fdata() + assert np.allclose(arr, arr2) for idx in range(len(img.shape)): assert (axes[idx] == img.header.get_axis(idx)) return img @@ -103,7 +103,7 @@ def check_rewrite(arr, axes, extension='.nii'): @needs_nibabel_data('nitest-cifti2') def test_read_ones(): img = nib.load(os.path.join(test_directory, 'ones.dscalar.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert (arr == 1).all() assert isinstance(axes[0], cifti2_axes.ScalarAxis) @@ -118,7 +118,7 @@ def test_read_ones(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_dscalar(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dscalar.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.ScalarAxis) assert len(axes[0]) == 2 @@ -132,7 +132,7 @@ def test_read_conte69_dscalar(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_dtseries(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.dtseries.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.SeriesAxis) assert len(axes[0]) == 2 @@ -147,7 +147,7 @@ def test_read_conte69_dtseries(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_dlabel(): img = nib.load(os.path.join(test_directory, 'Conte69.parcellations_VGD11b.32k_fs_LR.dlabel.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.LabelAxis) assert len(axes[0]) == 3 @@ -162,7 +162,7 @@ def test_read_conte69_dlabel(): @needs_nibabel_data('nitest-cifti2') def test_read_conte69_ptseries(): img = nib.load(os.path.join(test_directory, 'Conte69.MyelinAndCorrThickness.32k_fs_LR.ptseries.nii')) - arr = img.get_data() + arr = img.get_fdata() axes = [img.header.get_axis(dim) for dim in range(2)] assert isinstance(axes[0], cifti2_axes.SeriesAxis) assert len(axes[0]) == 2 diff --git a/nibabel/cifti2/tests/test_cifti2io_header.py b/nibabel/cifti2/tests/test_cifti2io_header.py index 3e3cd9c77d..b8cbd05a32 100644 --- a/nibabel/cifti2/tests/test_cifti2io_header.py +++ b/nibabel/cifti2/tests/test_cifti2io_header.py @@ -63,11 +63,11 @@ def test_read_and_proxies(): assert_equal(img2.shape, (1, 91282)) # While we cannot reshape arrayproxies, all images are in-memory assert_true(not img2.in_memory) - data = img2.get_data() + data = img2.get_fdata() assert_true(data is not img2.dataobj) # Uncaching has no effect, images are always array images img2.uncache() - assert_true(data is not img2.get_data()) + assert_true(data is not img2.get_fdata()) @needs_nibabel_data('nitest-cifti2') diff --git a/nibabel/cifti2/tests/test_new_cifti2.py b/nibabel/cifti2/tests/test_new_cifti2.py index 2a157ca7fb..15c64e84c3 100644 --- a/nibabel/cifti2/tests/test_new_cifti2.py +++ b/nibabel/cifti2/tests/test_new_cifti2.py @@ -13,7 +13,8 @@ from nibabel.tmpdirs import InTemporaryDirectory from nose.tools import assert_true, assert_equal, assert_raises -from nibabel.testing import clear_and_catch_warnings, error_warnings, suppress_warnings +from nibabel.testing import ( + clear_and_catch_warnings, error_warnings, suppress_warnings, assert_array_equal) affine = [[-1.5, 0, 0, 90], [0, 1.5, 0, -85], @@ -246,7 +247,7 @@ def test_dtseries(): assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseSeries') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_series_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -268,7 +269,7 @@ def test_dscalar(): img2 = nib.load('test.dscalar.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseScalar') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_scalar_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -290,7 +291,7 @@ def test_dlabel(): img2 = nib.load('test.dlabel.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseLabel') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_label_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -310,7 +311,7 @@ def test_dconn(): img2 = nib.load('test.dconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDense') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_geometry_map(img2.header.matrix.get_index_map(0)) @@ -333,7 +334,7 @@ def test_ptseries(): img2 = nib.load('test.ptseries.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnParcelSries') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_series_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -355,7 +356,7 @@ def test_pscalar(): img2 = nib.load('test.pscalar.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnParcelScalr') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_scalar_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -377,7 +378,7 @@ def test_pdconn(): img2 = ci.load('test.pdconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnParcelDense') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_geometry_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -399,7 +400,7 @@ def test_dpconn(): img2 = ci.load('test.dpconn.nii') assert_equal(img2.nifti_header.get_intent()[0], 'ConnDenseParcel') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_parcel_map(img2.header.matrix.get_index_map(0)) check_geometry_map(img2.header.matrix.get_index_map(1)) del img2 @@ -420,7 +421,7 @@ def test_plabel(): img2 = ci.load('test.plabel.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnUnknown') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) check_label_map(img2.header.matrix.get_index_map(0)) check_parcel_map(img2.header.matrix.get_index_map(1)) del img2 @@ -440,7 +441,7 @@ def test_pconn(): img2 = ci.load('test.pconn.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnParcels') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_parcel_map(img2.header.matrix.get_index_map(0)) @@ -465,7 +466,7 @@ def test_pconnseries(): img2 = ci.load('test.pconnseries.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnPPSr') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) check_parcel_map(img2.header.matrix.get_index_map(0)) @@ -491,7 +492,7 @@ def test_pconnscalar(): img2 = ci.load('test.pconnscalar.nii') assert_equal(img.nifti_header.get_intent()[0], 'ConnPPSc') assert_true(isinstance(img2, ci.Cifti2Image)) - assert_true((img2.get_data() == data).all()) + assert_array_equal(img2.get_fdata(), data) assert_equal(img2.header.matrix.get_index_map(0), img2.header.matrix.get_index_map(1)) diff --git a/nibabel/cmdline/ls.py b/nibabel/cmdline/ls.py index 68bd6ee8c0..ea2e4032ae 100755 --- a/nibabel/cmdline/ls.py +++ b/nibabel/cmdline/ls.py @@ -125,7 +125,7 @@ def proc_file(f, opts): if opts.stats or opts.counts: # We are doomed to load data try: - d = vol.get_data() + d = np.asarray(vol.dataobj) if not opts.stats_zeros: d = d[np.nonzero(d)] else: diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index 9ba97789dc..3c0558f43e 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -68,8 +68,8 @@ def get_data(self, caching='fill'): We recommend you use the ``get_fdata`` method instead of the ``get_data`` method, because it is easier to predict the return - data type. We will deprecate the ``get_data`` method around April - 2018, and remove it around April 2020. + data type. ``get_data`` will be deprecated around November 2019 + and removed around November 2021. If you don't care about the predictability of the return data type, and you want the minimum possible data size in memory, you can diff --git a/nibabel/ecat.py b/nibabel/ecat.py index fef2741ef8..f3a7f1736c 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -788,7 +788,7 @@ def __init__(self, dataobj, affine, header, >>> frame0 = img.get_frame(0) >>> frame0.shape == (10, 10, 3) True - >>> data4d = img.get_data() + >>> data4d = img.get_fdata() >>> data4d.shape == (10, 10, 3, 1) True """ @@ -945,7 +945,7 @@ def to_file_map(self, file_map=None): # It appears to be necessary to load the data before saving even if the # data itself is not used. - self.get_data() + self.get_fdata() hdr = self.header mlist = self._mlist subheaders = self.get_subheaders() diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 64b79550e3..86ce837942 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -120,7 +120,7 @@ class FileBasedImage(object): You can get the data out again with:: - img.get_data() + img.get_fdata() Less commonly, for some image types that support it, you might want to fetch out the unscaled array via the object containing the data:: diff --git a/nibabel/freesurfer/mghformat.py b/nibabel/freesurfer/mghformat.py index 37bc82cfb3..ddb30cb796 100644 --- a/nibabel/freesurfer/mghformat.py +++ b/nibabel/freesurfer/mghformat.py @@ -594,7 +594,7 @@ def to_file_map(self, file_map=None): ''' if file_map is None: file_map = self.file_map - data = self.get_data() + data = np.asanyarray(self.dataobj) self.update_header() hdr = self.header with file_map['image'].get_prepare_fileobj('wb') as mghf: diff --git a/nibabel/freesurfer/tests/test_mghformat.py b/nibabel/freesurfer/tests/test_mghformat.py index 47e54080c3..289acbcd01 100644 --- a/nibabel/freesurfer/tests/test_mghformat.py +++ b/nibabel/freesurfer/tests/test_mghformat.py @@ -81,7 +81,7 @@ def test_read_mgh(): assert_array_almost_equal(h.get_vox2ras_tkr(), v2rtkr) # data. will be different for your own mri_volsynth invocation - v = mgz.get_data() + v = mgz.get_fdata() assert_almost_equal(v[1, 2, 3, 0], -0.3047, 4) assert_almost_equal(v[1, 2, 3, 1], 0.0018, 4) @@ -97,7 +97,7 @@ def test_write_mgh(): # read from the tmp file and see if it checks out mgz = load('tmpsave.mgz') h = mgz.header - dat = mgz.get_data() + dat = mgz.get_fdata() # Delete loaded image to allow file deletion by windows del mgz # header @@ -193,7 +193,7 @@ def test_filename_exts(): save(img, fname) # read from the tmp file and see if it checks out img_back = load(fname) - assert_array_equal(img_back.get_data(), v) + assert_array_equal(img_back.get_fdata(), v) del img_back @@ -288,7 +288,7 @@ def test_mgh_load_fileobj(): fm = MGHImage.make_file_map(mapping=dict(image=bio)) img2 = MGHImage.from_file_map(fm) assert_true(img2.dataobj.file_like is bio) - assert_array_equal(img.get_data(), img2.get_data()) + assert_array_equal(img.get_fdata(), img2.get_fdata()) def test_mgh_affine_default(): diff --git a/nibabel/funcs.py b/nibabel/funcs.py index 240b20f802..178ac8191c 100644 --- a/nibabel/funcs.py +++ b/nibabel/funcs.py @@ -79,8 +79,7 @@ def squeeze_image(img): if slen == len(shape): return klass.from_image(img) shape = shape[:slen] - data = img.get_data() - data = data.reshape(shape) + data = np.asanyarray(img.dataobj).reshape(shape) return klass(data, img.affine, img.header, @@ -144,7 +143,7 @@ def concat_images(images, check_affines=True, axis=None): raise ValueError('Affine for image {0} does not match affine ' 'for first image'.format(i)) # Do not fill cache in image if it is empty - out_data[i] = img.get_data(caching='unchanged') + out_data[i] = np.asanyarray(img.dataobj) if axis is None: out_data = np.rollaxis(out_data, 0, out_data.ndim) @@ -169,7 +168,7 @@ def four_to_three(img): imgs : list list of 3D images ''' - arr = img.get_data() + arr = np.asanyarray(img.dataobj) header = img.header affine = img.affine image_maker = img.__class__ diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index cd1efbe3d7..421b95ba2f 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -155,7 +155,7 @@ def read_img_data(img, prefer='scaled'): """ Read data from image associated with files If you want unscaled data, please use ``img.dataobj.get_unscaled()`` - instead. If you want scaled data, use ``img.get_data()`` (which will cache + instead. If you want scaled data, use ``img.get_fdata()`` (which will cache the loaded array) or ``np.array(img.dataobj)`` (which won't cache the array). If you want to load the data as for a modified header, save the image with the modified header, and reload. @@ -164,7 +164,7 @@ def read_img_data(img, prefer='scaled'): ---------- img : ``SpatialImage`` Image with valid image file in ``img.file_map``. Unlike the - ``img.get_data()`` method, this function returns the data read + ``img.get_fdata()`` method, this function returns the data read from the image file, as specified by the *current* image header and *current* image files. prefer : str, optional diff --git a/nibabel/minc2.py b/nibabel/minc2.py index 37821409c4..b27d43f77f 100644 --- a/nibabel/minc2.py +++ b/nibabel/minc2.py @@ -16,7 +16,7 @@ import nibabel as nib img = nib.load('my_funny.mnc') - data = img.get_data() + data = img.get_fdata() print(data.mean()) print(data.max()) print(data.min()) diff --git a/nibabel/processing.py b/nibabel/processing.py index 449e6b41fc..0c5f921d87 100644 --- a/nibabel/processing.py +++ b/nibabel/processing.py @@ -233,7 +233,7 @@ def resample_to_output(in_img, # looks like when resampled into world coordinates if n_dim < 3: # Expand image to 3D, make voxel sizes match new_shape = in_shape + (1,) * (3 - n_dim) - data = in_img.get_data().reshape(new_shape) # 2D data should be small + data = np.asanyarray(in_img.dataobj).reshape(new_shape) # 2D data should be small in_img = out_class(data, in_img.affine, in_img.header) if voxel_sizes is not None and len(voxel_sizes) == n_dim: # Need to pad out voxel sizes to match new image dimensions diff --git a/nibabel/spaces.py b/nibabel/spaces.py index 393a8a216f..094f43dc77 100644 --- a/nibabel/spaces.py +++ b/nibabel/spaces.py @@ -112,7 +112,7 @@ def slice2volume(index, axis, shape=None): and then use ``whole_aff`` in ``scipy.ndimage.affine_transform``: rzs, trans = to_matvec(whole_aff) - data = img2.get_data() + data = img2.get_fdata() new_slice = scipy.ndimage.affine_transform(data, rzs, trans, slice_shape) Parameters diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index ede0820065..fd2795e96a 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -20,7 +20,8 @@ methods: - * .get_data() + * .get_fdata() + * .get_data() (deprecated, use get_fdata() instead) * .get_affine() (deprecated, use affine property instead) * .get_header() (deprecated, use header property instead) * .to_filename(fname) - writes data to filename(s) derived from @@ -69,7 +70,7 @@ You can get the data out again with:: - img.get_data() + img.get_fdata() Less commonly, for some image types that support it, you might want to fetch out the unscaled array via the object containing the data:: @@ -123,12 +124,12 @@ >>> img.to_file_map() >>> # read it back again from the written files >>> img2 = nib.AnalyzeImage.from_file_map(file_map) - >>> np.all(img2.get_data() == data) + >>> np.all(img2.get_fdata(dtype=np.float32) == data) True >>> # write, read it again >>> img2.to_file_map() >>> img3 = nib.AnalyzeImage.from_file_map(file_map) - >>> np.all(img3.get_data() == data) + >>> np.all(img3.get_fdata(dtype=np.float32) == data) True ''' @@ -586,7 +587,7 @@ def __getitem__(self, idx): "Cannot slice image objects; consider using `img.slicer[slice]` " "to generate a sliced image (see documentation for caveats) or " "slicing image array data with `img.dataobj[slice]` or " - "`img.get_data()[slice]`") + "`img.get_fdata()[slice]`") def orthoview(self): """Plot the image using OrthoSlicer3D @@ -630,7 +631,7 @@ def as_reoriented(self, ornt): if np.array_equal(ornt, [[0, 1], [1, 1], [2, 1]]): return self - t_arr = apply_orientation(self.get_data(), ornt) + t_arr = apply_orientation(np.asanyarray(self.dataobj), ornt) new_aff = self.affine.dot(inv_ornt_aff(ornt, self.shape)) return self.__class__(t_arr, new_aff, self.header) diff --git a/nibabel/tests/data/check_parrec_reslice.py b/nibabel/tests/data/check_parrec_reslice.py index cc2a5942b5..c7352c3f89 100644 --- a/nibabel/tests/data/check_parrec_reslice.py +++ b/nibabel/tests/data/check_parrec_reslice.py @@ -39,7 +39,7 @@ def resample_img2img(img_to, img_from, order=1, out_class=nib.Nifti1Image): from scipy import ndimage as spnd vox2vox = npl.inv(img_from.affine).dot(img_to.affine) rzs, trans = to_matvec(vox2vox) - data = spnd.affine_transform(img_from.get_data(), + data = spnd.affine_transform(img_from.get_fdata(), rzs, trans, img_to.shape, @@ -57,7 +57,7 @@ def gmean_norm(data): np.set_printoptions(suppress=True, precision=4) normal_fname = "Phantom_EPI_3mm_tra_SENSE_6_1.PAR" normal_img = parrec.load(normal_fname) - normal_data = normal_img.get_data() + normal_data = normal_img.get_fdata() normal_normed = gmean_norm(normal_data) print("RMS of standard image {:<44}: {}".format( @@ -69,7 +69,7 @@ def gmean_norm(data): continue funny_img = parrec.load(parfile) fixed_img = resample_img2img(normal_img, funny_img) - fixed_data = fixed_img.get_data() + fixed_data = fixed_img.get_fdata() difference_data = normal_normed - gmean_norm(fixed_data) print('RMS resliced {:<52} : {}'.format( parfile, diff --git a/nibabel/tests/test_analyze.py b/nibabel/tests/test_analyze.py index 45a4c00d62..6b05df83e3 100644 --- a/nibabel/tests/test_analyze.py +++ b/nibabel/tests/test_analyze.py @@ -717,8 +717,8 @@ def test_default_header(self): def test_data_hdr_cache(self): # test the API for loaded images, such that the data returned - # from img.get_data() is not affected by subsequent changes to - # the header. + # from np.asanyarray(img.dataobj) and img,get_fdata() are not + # affected by subsequent changes to the header. IC = self.image_class # save an image to a file map fm = IC.make_file_map() @@ -739,7 +739,8 @@ def test_data_hdr_cache(self): assert_equal(hdr.get_data_shape(), (3, 2, 2)) hdr.set_data_dtype(np.uint8) assert_equal(hdr.get_data_dtype(), np.dtype(np.uint8)) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) + assert_array_equal(np.asanyarray(img2.dataobj), data) # now check read_img_data function - here we do see the changed # header sc_data = read_img_data(img2) @@ -830,7 +831,7 @@ def test_header_updating(self): hdr_back = img.from_file_map(img.file_map).header assert_array_equal(hdr.get_zooms(), (9, 3, 4)) # Modify data in-place? Update on save - data = img.get_data() + data = img.get_fdata() data.shape = (3, 2, 4) img.to_file_map() img_back = img.from_file_map(img.file_map) @@ -843,7 +844,7 @@ def test_pickle(self): img = img_klass(np.zeros((2, 3, 4)), None) img_str = pickle.dumps(img) img2 = pickle.loads(img_str) - assert_array_equal(img.get_data(), img2.get_data()) + assert_array_equal(img.get_fdata(), img2.get_fdata()) assert_equal(img.header, img2.header) # Save / reload using bytes IO objects for key, value in img.file_map.items(): @@ -852,7 +853,7 @@ def test_pickle(self): img_prox = img.from_file_map(img.file_map) img_str = pickle.dumps(img_prox) img2_prox = pickle.loads(img_str) - assert_array_equal(img.get_data(), img2_prox.get_data()) + assert_array_equal(img.get_fdata(), img2_prox.get_fdata()) def test_no_finite_values(self): # save of data with no finite values to int type raises error if we have diff --git a/nibabel/tests/test_brikhead.py b/nibabel/tests/test_brikhead.py index a99e6c41b6..d09023d248 100644 --- a/nibabel/tests/test_brikhead.py +++ b/nibabel/tests/test_brikhead.py @@ -99,7 +99,7 @@ def test_brikheadfile(self): assert_equal(brik.header.get_zooms(), tp['zooms']) assert_array_equal(brik.affine, tp['affine']) assert_equal(brik.header.get_space(), tp['space']) - data = brik.get_data() + data = brik.get_fdata() assert_equal(data.shape, tp['shape']) assert_array_equal(brik.dataobj.scaling, tp['scaling']) assert_equal(brik.header.get_volume_labels(), tp['labels']) @@ -108,20 +108,20 @@ def test_load(self): # Check highest level load of brikhead works for tp in self.test_files: img = self.module.load(tp['head']) - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, tp['shape']) # min, max, mean values assert_data_similar(data, tp) # check if file can be converted to nifti ni_img = Nifti1Image.from_image(img) assert_array_equal(ni_img.affine, tp['affine']) - assert_array_equal(ni_img.get_data(), data) + assert_array_equal(ni_img.get_fdata(), data) def test_array_proxy_slicing(self): # Test slicing of array proxy for tp in self.test_files: img = self.module.load(tp['fname']) - arr = img.get_data() + arr = img.get_fdata() prox = img.dataobj assert_true(prox.is_proxy) for sliceobj in slicer_samples(img.shape): diff --git a/nibabel/tests/test_ecat.py b/nibabel/tests/test_ecat.py index 9005d32d4f..a3a40b2904 100644 --- a/nibabel/tests/test_ecat.py +++ b/nibabel/tests/test_ecat.py @@ -193,20 +193,20 @@ def test_save(self): with InTemporaryDirectory(): self.img.to_filename(tmp_file) other = self.image_class.load(tmp_file) - assert_equal(self.img.get_data().all(), other.get_data().all()) + assert_array_equal(self.img.get_fdata(), other.get_fdata()) # Delete object holding reference to temporary file to make Windows # happier. del other def test_data(self): - dat = self.img.get_data() + dat = self.img.get_fdata() assert_equal(dat.shape, self.img.shape) frame = self.img.get_frame(0) assert_array_equal(frame, dat[:, :, :, 0]) def test_array_proxy(self): # Get the cached data copy - dat = self.img.get_data() + dat = self.img.get_fdata() # Make a new one to test arrayproxy img = self.image_class.load(self.example_file) data_prox = img.dataobj @@ -218,7 +218,7 @@ def test_array_proxy(self): def test_array_proxy_slicing(self): # Test slicing of array proxy - arr = self.img.get_data() + arr = self.img.get_fdata() prox = self.img.dataobj assert_true(prox.is_proxy) for sliceobj in slicer_samples(self.img.shape): @@ -227,7 +227,7 @@ def test_array_proxy_slicing(self): def test_isolation(self): # Test image isolated from external changes to affine img_klass = self.image_class - arr, aff, hdr, sub_hdr, mlist = (self.img.get_data(), + arr, aff, hdr, sub_hdr, mlist = (self.img.get_fdata(), self.img.affine, self.img.header, self.img.get_subheaders(), @@ -240,7 +240,7 @@ def test_isolation(self): def test_float_affine(self): # Check affines get converted to float img_klass = self.image_class - arr, aff, hdr, sub_hdr, mlist = (self.img.get_data(), + arr, aff, hdr, sub_hdr, mlist = (self.img.get_fdata(), self.img.affine, self.img.header, self.img.get_subheaders(), @@ -256,7 +256,7 @@ def test_data_regression(self): vals = dict(max=248750736458.0, min=1125342630.0, mean=117907565661.46666) - data = self.img.get_data() + data = self.img.get_fdata() assert_equal(data.max(), vals['max']) assert_equal(data.min(), vals['min']) assert_array_almost_equal(data.mean(), vals['mean']) @@ -277,4 +277,4 @@ def test_from_filespec_deprecation(): # Warning for from_filespec img_speced = EcatImage.from_filespec(ecat_file) assert_equal(len(w), 1) - assert_array_equal(img_loaded.get_data(), img_speced.get_data()) + assert_array_equal(img_loaded.get_fdata(), img_speced.get_fdata()) diff --git a/nibabel/tests/test_ecat_data.py b/nibabel/tests/test_ecat_data.py index 471bc6b93c..4b187bf855 100644 --- a/nibabel/tests/test_ecat_data.py +++ b/nibabel/tests/test_ecat_data.py @@ -43,7 +43,7 @@ def test_load(self): assert_equal(img.shape, self.example_params['shape']) assert_equal(img.get_data_dtype(0).type, self.example_params['type']) # Check correspondence of data and recorded shape - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, self.example_params['shape']) # min, max, mean values from given parameters assert_almost_equal(data.min(), self.example_params['min'], 4) diff --git a/nibabel/tests/test_filebasedimages.py b/nibabel/tests/test_filebasedimages.py index c9d256edbb..a9c5668508 100644 --- a/nibabel/tests/test_filebasedimages.py +++ b/nibabel/tests/test_filebasedimages.py @@ -2,6 +2,7 @@ """ from itertools import product +import warnings import numpy as np @@ -27,6 +28,11 @@ def shape(self): return self.arr.shape def get_data(self): + warnings.warn('Deprecated', DeprecationWarning) + return self.arr + + @property + def dataobj(self): return self.arr def get_fdata(self): diff --git a/nibabel/tests/test_files_interface.py b/nibabel/tests/test_files_interface.py index 0e9ed88eb9..1994741a1a 100644 --- a/nibabel/tests/test_files_interface.py +++ b/nibabel/tests/test_files_interface.py @@ -71,7 +71,7 @@ def test_files_interface(): img.to_file_map() # saves to files img2 = Nifti1Image.from_file_map(img.file_map) # img still has correct data - assert_array_equal(img2.get_data(), img.get_data()) + assert_array_equal(img2.get_fdata(), img.get_fdata()) # fileobjs - pair img = Nifti1Pair(arr, aff) img.file_map['image'].fileobj = BytesIO() @@ -81,7 +81,7 @@ def test_files_interface(): img.to_file_map() # saves to files img2 = Nifti1Pair.from_file_map(img.file_map) # img still has correct data - assert_array_equal(img2.get_data(), img.get_data()) + assert_array_equal(img2.get_fdata(), img.get_fdata()) def test_round_trip_spatialimages(): @@ -99,8 +99,8 @@ def test_round_trip_spatialimages(): img.to_file_map() # read it back again from the written files img2 = klass.from_file_map(file_map) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) # write, read it again img2.to_file_map() img3 = klass.from_file_map(file_map) - assert_array_equal(img3.get_data(), data) + assert_array_equal(img3.get_fdata(), data) diff --git a/nibabel/tests/test_funcs.py b/nibabel/tests/test_funcs.py index 8a2a7918d8..447555d6d0 100644 --- a/nibabel/tests/test_funcs.py +++ b/nibabel/tests/test_funcs.py @@ -108,7 +108,7 @@ def test_concat(): else: assert_false( expect_error, "Expected a concatenation error, but got none.") - assert_array_equal(all_imgs.get_data(), all_data) + assert_array_equal(all_imgs.get_fdata(), all_data) assert_array_equal(all_imgs.affine, affine) # check that not-matching affines raise error @@ -123,7 +123,7 @@ def test_concat(): else: assert_false( expect_error, "Expected a concatenation error, but got none.") - assert_array_equal(all_imgs.get_data(), all_data) + assert_array_equal(all_imgs.get_fdata(), all_data) assert_array_equal(all_imgs.affine, affine) @@ -140,7 +140,7 @@ def test_closest_canonical(): img = AnalyzeImage(arr, np.diag([-1, 1, 1, 1])) xyz_img = as_closest_canonical(img) assert_false(img is xyz_img) - out_arr = xyz_img.get_data() + out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) # Now onto the NIFTI cases (where dim_info also has to be updated) @@ -159,7 +159,7 @@ def test_closest_canonical(): xyz_img = as_closest_canonical(img) assert_false(img is xyz_img) assert_true(img.header.get_dim_info() == xyz_img.header.get_dim_info()) - out_arr = xyz_img.get_data() + out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.flipud(arr)) # no error for enforce_diag in this case @@ -185,7 +185,7 @@ def test_closest_canonical(): # Check both the original and new objects assert_true(img.header.get_dim_info() == (0, 1, 2)) assert_true(xyz_img.header.get_dim_info() == (0, 2, 1)) - out_arr = xyz_img.get_data() + out_arr = xyz_img.get_fdata() assert_array_equal(out_arr, np.transpose(arr, (0, 2, 1, 3))) # same axis swap but with None dim info (except for slice dim) diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index 8792fe938e..748f9c2472 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -14,11 +14,10 @@ * ``img.shape`` (shape of data as read with ``np.array(img.dataobj)`` * ``img.get_fdata()`` (returns floating point data as read with ``np.array(img.dataobj)`` and the cast to float); -* ``img.get_data()`` (returns data as read with ``np.array(img.dataobj)``); -* ``img.uncache()`` (``img.get_data()`` and ``img.get_data`` are allowed to - cache the result of the array creation. If they do, this call empties that - cache. Implement this as a no-op if ``get_fdata()``, ``get_data`` do not - cache. +* ``img.uncache()`` (``img.get_fdata()`` (recommended) and ``img.get_data()`` + (deprecated) are allowed to cache the result of the array creation. If they + do, this call empties that cache. Implement this as a no-op if + ``get_fdata()``, ``get_data()`` do not cache.) * ``img[something]`` generates an informative TypeError * ``img.in_memory`` is True for an array image, and for a proxy image that is cached, but False otherwise. @@ -44,7 +43,7 @@ from nose import SkipTest from nose.tools import (assert_true, assert_false, assert_raises, assert_equal) -from numpy.testing import (assert_almost_equal, assert_array_equal) +from numpy.testing import assert_almost_equal, assert_array_equal, assert_warns from ..testing import clear_and_catch_warnings from ..tmpdirs import InTemporaryDirectory @@ -92,7 +91,7 @@ def obj_params(self): ``data_summary`` : dict with data ``min``, ``max``, ``mean``; * ``shape`` : shape of image; * ``affine`` : shape (4, 4) affine array for image; - * ``dtype`` : dtype of data returned from ``get_data()``; + * ``dtype`` : dtype of data returned from ``np.asarray(dataobj)``; * ``is_proxy`` : bool, True if image data is proxied; Notes @@ -131,8 +130,7 @@ def validate_filenames(self, imaker, params): rt_img = bytesio_round_trip(img) assert_array_equal(img.shape, rt_img.shape) assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_img.get_data()) + assert_almost_equal(np.asanyarray(img.dataobj), np.asanyarray(rt_img.dataobj)) # Give the image a file map klass = type(img) rt_img.file_map = bytesio_filemap(klass) @@ -140,8 +138,7 @@ def validate_filenames(self, imaker, params): rt_img.to_file_map() rt_rt_img = klass.from_file_map(rt_img.file_map) assert_almost_equal(img.get_fdata(), rt_rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_rt_img.get_data()) + assert_almost_equal(np.asanyarray(img.dataobj), np.asanyarray(rt_img.dataobj)) # get_ / set_ filename fname = 'an_image' + self.standard_extension img.set_filename(fname) @@ -150,12 +147,14 @@ def validate_filenames(self, imaker, params): # to_ / from_ filename fname = 'another_image' + self.standard_extension with InTemporaryDirectory(): - img.to_filename(fname) - rt_img = img.__class__.from_filename(fname) + # Validate that saving or loading a file doesn't use deprecated methods internally + with clear_and_catch_warnings() as w: + warnings.simplefilter('error', DeprecationWarning) + img.to_filename(fname) + rt_img = img.__class__.from_filename(fname) assert_array_equal(img.shape, rt_img.shape) assert_almost_equal(img.get_fdata(), rt_img.get_fdata()) - # get_data will be deprecated - assert_almost_equal(img.get_data(), rt_img.get_data()) + assert_almost_equal(np.asanyarray(img.dataobj), np.asanyarray(rt_img.dataobj)) del rt_img # to allow windows to delete the directory def validate_no_slicing(self, imaker, params): @@ -163,6 +162,13 @@ def validate_no_slicing(self, imaker, params): assert_raises(TypeError, img.__getitem__, 'string') assert_raises(TypeError, img.__getitem__, slice(None)) + def validate_get_data_deprecated(self, imaker, params): + # Check deprecated header API + img = imaker() + with assert_warns(DeprecationWarning): + data = img.get_data() + assert_array_equal(np.asanyarray(img.dataobj), data) + class GetSetDtypeMixin(object): """ Adds dtype tests @@ -517,7 +523,7 @@ def validate_from_bytes(self, imaker, params): img_b = klass.from_bytes(fobj.read()) assert self._header_eq(img_a.header, img_b.header) - assert np.array_equal(img_a.get_data(), img_b.get_data()) + assert np.array_equal(img_a.get_fdata(), img_b.get_fdata()) del img_a del img_b @@ -537,7 +543,7 @@ def validate_to_from_bytes(self, imaker, params): assert img_b.to_bytes() == bytes_a assert self._header_eq(img_a.header, img_b.header) - assert np.array_equal(img_a.get_data(), img_b.get_data()) + assert np.array_equal(img_a.get_fdata(), img_b.get_fdata()) del img_a del img_b diff --git a/nibabel/tests/test_image_load_save.py b/nibabel/tests/test_image_load_save.py index 7101b6a31b..6031d4e851 100644 --- a/nibabel/tests/test_image_load_save.py +++ b/nibabel/tests/test_image_load_save.py @@ -29,7 +29,7 @@ from ..spatialimages import SpatialImage from numpy.testing import assert_array_equal, assert_array_almost_equal -from nose.tools import assert_true, assert_equal, assert_raises +from nose.tools import assert_true, assert_equal, assert_not_equal, assert_raises _, have_scipy, _ = optional_package('scipy') # No scipy=>no SPM-format writing DATA_PATH = pjoin(dirname(__file__), 'data') @@ -38,11 +38,7 @@ def round_trip(img): # round trip a nifti single - sio = BytesIO() - img.file_map['image'].fileobj = sio - img.to_file_map() - img2 = Nifti1Image.from_file_map(img.file_map) - return img2 + return Nifti1Image.from_bytes(img.to_bytes()) def test_conversion_spatialimages(): @@ -61,7 +57,7 @@ def test_conversion_spatialimages(): if not w_class.makeable: continue img2 = w_class.from_image(img) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) assert_array_equal(img2.affine, affine) @@ -74,13 +70,15 @@ def test_save_load_endian(): assert_equal(img.header.endianness, native_code) img2 = round_trip(img) assert_equal(img2.header.endianness, native_code) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) + assert_array_equal(np.asanyarray(img2.dataobj), data) # byte swapped endian image bs_hdr = img.header.as_byteswapped() bs_img = Nifti1Image(data, affine, bs_hdr) assert_equal(bs_img.header.endianness, swapped_code) # of course the data is the same because it's not written to disk - assert_array_equal(bs_img.get_data(), data) + assert_array_equal(bs_img.get_fdata(), data) + assert_array_equal(np.asanyarray(bs_img.dataobj), data) # Check converting to another image cbs_img = AnalyzeImage.from_image(bs_img) # this will make the header native by doing the header conversion @@ -92,17 +90,21 @@ def test_save_load_endian(): assert_equal(cbs_hdr2.endianness, native_code) # Try byteswapped round trip bs_img2 = round_trip(bs_img) - bs_data2 = bs_img2.get_data() + bs_data2 = np.asanyarray(bs_img2.dataobj) + bs_fdata2 = bs_img2.get_fdata() # now the data dtype was swapped endian, so the read data is too assert_equal(bs_data2.dtype.byteorder, swapped_code) assert_equal(bs_img2.header.endianness, swapped_code) assert_array_equal(bs_data2, data) + # but get_fdata uses native endian + assert_not_equal(bs_fdata2.dtype.byteorder, swapped_code) + assert_array_equal(bs_fdata2, data) # Now mix up byteswapped data and non-byteswapped header mixed_img = Nifti1Image(bs_data2, affine) assert_equal(mixed_img.header.endianness, native_code) m_img2 = round_trip(mixed_img) assert_equal(m_img2.header.endianness, native_code) - assert_array_equal(m_img2.get_data(), data) + assert_array_equal(m_img2.get_fdata(), data) def test_save_load(): @@ -119,7 +121,7 @@ def test_save_load(): ni1.save(img, nifn) re_img = nils.load(nifn) assert_true(isinstance(re_img, ni1.Nifti1Image)) - assert_array_equal(re_img.get_data(), data) + assert_array_equal(re_img.get_fdata(), data) assert_array_equal(re_img.affine, affine) # These and subsequent del statements are to prevent confusing # windows errors when trying to open files or delete the @@ -129,20 +131,20 @@ def test_save_load(): spm2.save(img, sifn) re_img2 = nils.load(sifn) assert_true(isinstance(re_img2, spm2.Spm2AnalyzeImage)) - assert_array_equal(re_img2.get_data(), data) + assert_array_equal(re_img2.get_fdata(), data) assert_array_equal(re_img2.affine, affine) del re_img2 spm99.save(img, sifn) re_img3 = nils.load(sifn) assert_true(isinstance(re_img3, spm99.Spm99AnalyzeImage)) - assert_array_equal(re_img3.get_data(), data) + assert_array_equal(re_img3.get_fdata(), data) assert_array_equal(re_img3.affine, affine) ni1.save(re_img3, nifn) del re_img3 re_img = nils.load(nifn) assert_true(isinstance(re_img, ni1.Nifti1Image)) - assert_array_equal(re_img.get_data(), data) + assert_array_equal(re_img.get_fdata(), data) assert_array_equal(re_img.affine, affine) del re_img @@ -173,7 +175,7 @@ def test_two_to_one(): # the offset stays at zero (but is 352 on disk) assert_equal(pimg.header['magic'], b'ni1') assert_equal(pimg.header['vox_offset'], 0) - assert_array_equal(pimg.get_data(), data) + assert_array_equal(pimg.get_fdata(), data) # same for from_image, going from single image to pair format ana_img = ana.AnalyzeImage.from_image(img) assert_equal(ana_img.header['vox_offset'], 0) @@ -211,7 +213,7 @@ def test_negative_load_save(): img.to_file_map() str_io.seek(0) re_img = Nifti1Image.from_file_map(img.file_map) - assert_array_almost_equal(re_img.get_data(), data, 4) + assert_array_almost_equal(re_img.get_fdata(), data, 4) def test_filename_save(): @@ -255,7 +257,7 @@ def test_filename_save(): fname = pjoin(pth, 'image' + out_ext) nils.save(img, fname) rt_img = nils.load(fname) - assert_array_almost_equal(rt_img.get_data(), data) + assert_array_almost_equal(rt_img.get_fdata(), data) assert_true(type(rt_img) is loadklass) # delete image to allow file close. Otherwise windows # raises an error when trying to delete the directory diff --git a/nibabel/tests/test_loadsave.py b/nibabel/tests/test_loadsave.py index 4c1c703389..491cb07b76 100644 --- a/nibabel/tests/test_loadsave.py +++ b/nibabel/tests/test_loadsave.py @@ -35,7 +35,7 @@ def test_read_img_data(): ): fpath = pjoin(data_path, fname) img = load(fpath) - data = img.get_data() + data = img.get_fdata() data2 = read_img_data(img) assert_array_equal(data, data2) # These examples have null scaling - assert prefer=unscaled is the same @@ -87,7 +87,7 @@ def test_read_img_data_nifti(): # Load - now the scaling and offset correctly applied img_fname = img.file_map['image'].filename img_back = load(img_fname) - data_back = img_back.get_data() + data_back = img_back.get_fdata() assert_array_equal(data_back, read_img_data(img_back)) # This is the same as if we loaded the image and header separately hdr_fname = (img.file_map['header'].filename @@ -131,7 +131,7 @@ def test_read_img_data_nifti(): with open(img_fname, 'ab') as fobj: fobj.write(b'\x00\x00') img_back = load(img_fname) - data_back = img_back.get_data() + data_back = img_back.get_fdata() assert_array_equal(data_back, read_img_data(img_back)) img_back.header.set_data_offset(1026) # Check we pick up new offset diff --git a/nibabel/tests/test_minc1.py b/nibabel/tests/test_minc1.py index 50f4955917..a4d42fdc36 100644 --- a/nibabel/tests/test_minc1.py +++ b/nibabel/tests/test_minc1.py @@ -135,7 +135,7 @@ def test_old_namespace(): mimg = MincImage(arr, aff) # Call to create object created warning assert_equal(warns.pop(0).category, FutureWarning) - assert_array_equal(mimg.get_data(), arr) + assert_array_equal(mimg.get_fdata(), arr) # Another old name from ..minc1 import MincFile, Minc1File assert_false(MincFile is Minc1File) @@ -185,20 +185,20 @@ def test_load(self): # Check highest level load of minc works for tp in self.test_files: img = load(tp['fname']) - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, tp['shape']) # min, max, mean values from read in SPM2 / minctools assert_data_similar(data, tp) # check if mnc can be converted to nifti ni_img = Nifti1Image.from_image(img) assert_array_equal(ni_img.affine, tp['affine']) - assert_array_equal(ni_img.get_data(), data) + assert_array_equal(ni_img.get_fdata(), data) def test_array_proxy_slicing(self): # Test slicing of array proxy for tp in self.test_files: img = load(tp['fname']) - arr = img.get_data() + arr = img.get_fdata() prox = img.dataobj assert_true(prox.is_proxy) for sliceobj in slicer_samples(img.shape): @@ -220,7 +220,7 @@ def test_compressed(self): fobj.write(content) fobj.close() img = self.module.load(fname) - data = img.get_data() + data = img.get_fdata() assert_data_similar(data, tp) del img diff --git a/nibabel/tests/test_minc2_data.py b/nibabel/tests/test_minc2_data.py index 57146171e9..ebfafa938f 100644 --- a/nibabel/tests/test_minc2_data.py +++ b/nibabel/tests/test_minc2_data.py @@ -64,7 +64,7 @@ def test_load(self): assert_almost_equal(img.affine, self.example_params['affine'], 4) assert_equal(img.get_data_dtype().type, self.example_params['type']) # Check correspondence of data and recorded shape - data = img.get_data() + data = img.get_fdata() assert_equal(data.shape, self.example_params['shape']) # min, max, mean values from read in SPM2 assert_almost_equal(data.min(), self.example_params['min'], 4) @@ -74,7 +74,7 @@ def test_load(self): ni_img = Nifti1Image.from_image(img) assert_almost_equal(ni_img.get_affine(), self.example_params['affine'], 2) - assert_array_equal(ni_img.get_data(), data) + assert_array_equal(ni_img.get_fdata(), data) class TestB0(TestEPIFrame): diff --git a/nibabel/tests/test_nifti1.py b/nibabel/tests/test_nifti1.py index 38863e9aa2..0213b615f1 100644 --- a/nibabel/tests/test_nifti1.py +++ b/nibabel/tests/test_nifti1.py @@ -361,7 +361,7 @@ def test_freesurfer_ico7_hack(self): nii = load(os.path.join(nitest_path, 'derivative', 'fsaverage', 'surf', 'lh.orig.avg.area.nii')) assert_equal(mgh.shape, nii.shape) - assert_array_equal(mgh.get_data(), nii.get_data()) + assert_array_equal(mgh.get_fdata(), nii.get_fdata()) assert_array_equal(nii.header._structarr['dim'][1:4], np.array([27307, 1, 6])) # Test writing produces consistent nii files @@ -369,8 +369,8 @@ def test_freesurfer_ico7_hack(self): nii.to_filename('test.nii') nii2 = load('test.nii') assert_equal(nii.shape, nii2.shape) - assert_array_equal(nii.get_data(), nii2.get_data()) - assert_array_equal(nii.get_affine(), nii2.get_affine()) + assert_array_equal(nii.get_fdata(), nii2.get_fdata()) + assert_array_equal(nii.affine, nii2.affine) def test_qform_sform(self): HC = self.header_class @@ -975,16 +975,16 @@ def test_load_save(self): assert_equal(img.shape, shape) img.set_data_dtype(npt) img2 = bytesio_round_trip(img) - assert_array_equal(img2.get_data(), data) + assert_array_equal(img2.get_fdata(), data) with InTemporaryDirectory() as tmpdir: for ext in ('', '.gz', '.bz2'): fname = os.path.join(tmpdir, 'test' + img_ext + ext) img.to_filename(fname) img3 = IC.load(fname) assert_true(isinstance(img3, img.__class__)) - assert_array_equal(img3.get_data(), data) + assert_array_equal(img3.get_fdata(), data) assert_equal(img3.header, img.header) - assert_true(isinstance(img3.get_data(), + assert_true(isinstance(np.asanyarray(img3.dataobj), np.memmap if ext == '' else np.ndarray)) # del to avoid windows errors of form 'The process cannot # access the file because it is being used' @@ -1010,7 +1010,7 @@ def test_load_pixdims(self): assert_array_equal(img_hdr.get_zooms(), [2, 3, 4]) # Save to stringio re_simg = bytesio_round_trip(simg) - assert_array_equal(re_simg.get_data(), arr) + assert_array_equal(re_simg.get_fdata(), arr) # Check qform, sform, pixdims are the same rimg_hdr = re_simg.header assert_array_equal(rimg_hdr.get_qform(), qaff) @@ -1337,7 +1337,7 @@ def test_loadsave_cycle(self): lnim = bytesio_round_trip(wnim) assert_equal(lnim.get_data_dtype(), np.int16) # Scaling applied - assert_array_equal(lnim.get_data(), data * 2. + 8.) + assert_array_equal(lnim.get_fdata(), data * 2. + 8.) # slope, inter reset by image creation, but saved in proxy assert_equal(lnim.header.get_slope_inter(), (None, None)) assert_equal((lnim.dataobj.slope, lnim.dataobj.inter), (2, 8)) @@ -1354,11 +1354,11 @@ def test_load(self): with InTemporaryDirectory(): for img in (simg, pimg): save(img, 'test.nii') - assert_array_equal(arr, load('test.nii').get_data()) + assert_array_equal(arr, load('test.nii').get_fdata()) save(simg, 'test.img') - assert_array_equal(arr, load('test.img').get_data()) + assert_array_equal(arr, load('test.img').get_fdata()) save(simg, 'test.hdr') - assert_array_equal(arr, load('test.hdr').get_data()) + assert_array_equal(arr, load('test.hdr').get_fdata()) def test_float_int_min_max(self): # Conversion between float and int @@ -1370,7 +1370,7 @@ def test_float_int_min_max(self): for out_dt in IUINT_TYPES: img = self.single_class(arr, aff) img_back = bytesio_round_trip(img) - arr_back_sc = img_back.get_data() + arr_back_sc = img_back.get_fdata() assert_true(np.allclose(arr, arr_back_sc)) def test_float_int_spread(self): @@ -1384,7 +1384,7 @@ def test_float_int_spread(self): for out_dt in IUINT_TYPES: img = self.single_class(arr_t, aff) img_back = bytesio_round_trip(img) - arr_back_sc = img_back.get_data() + arr_back_sc = img_back.get_fdata() slope, inter = img_back.header.get_slope_inter() # Get estimate for error max_miss = rt_err_estimate(arr_t, arr_back_sc.dtype, slope, @@ -1407,7 +1407,7 @@ def test_rt_bias(self): for out_dt in IUINT_TYPES: img = self.single_class(arr_t, aff) img_back = bytesio_round_trip(img) - arr_back_sc = img_back.get_data() + arr_back_sc = img_back.get_fdata() slope, inter = img_back.header.get_slope_inter() bias = np.mean(arr_t - arr_back_sc) # Get estimate for error @@ -1457,7 +1457,7 @@ def test_large_nifti1(): with InTemporaryDirectory(): img.to_filename('test.nii.gz') del img - data = load('test.nii.gz').get_data() + data = load('test.nii.gz').get_fdata() # Check that the data are all ones assert_equal(image_shape, data.shape) n_ones = np.sum((data == 1.)) diff --git a/nibabel/tests/test_parrec.py b/nibabel/tests/test_parrec.py index 917bc417c6..940d8864e5 100644 --- a/nibabel/tests/test_parrec.py +++ b/nibabel/tests/test_parrec.py @@ -764,13 +764,13 @@ def test_varying_scaling(): scaled_arr[:, :, i] *= slopes[i] scaled_arr[:, :, i] += inters[i] assert_almost_equal(np.reshape(scaled_arr, img.shape, order='F'), - img.get_data(), 9) + img.get_fdata(), 9) # Check fp scaling for i in range(arr.shape[2]): scaled_arr[:, :, i] /= (slopes[i] * sc_slopes[i]) dv_img = PARRECImage.load(VARY_REC, scaling='fp') assert_almost_equal(np.reshape(scaled_arr, img.shape, order='F'), - dv_img.get_data(), 9) + dv_img.get_fdata(), 9) def test_anonymized(): diff --git a/nibabel/tests/test_processing.py b/nibabel/tests/test_processing.py index a09bd4cd85..0e1dbb83c7 100644 --- a/nibabel/tests/test_processing.py +++ b/nibabel/tests/test_processing.py @@ -411,7 +411,7 @@ def test_against_spm_resample(): func = nib.load(pjoin(DATA_DIR, 'functional.nii')) some_rotations = euler2mat(0.1, 0.2, 0.3) extra_affine = from_matvec(some_rotations, [3, 4, 5]) - moved_anat = nib.Nifti1Image(anat.get_data().astype(float), + moved_anat = nib.Nifti1Image(anat.get_fdata(), extra_affine.dot(anat.affine), anat.header) one_func = nib.Nifti1Image(func.dataobj[..., 0], diff --git a/nibabel/tests/test_proxy_api.py b/nibabel/tests/test_proxy_api.py index 58ad5fa5d2..494ab4b556 100644 --- a/nibabel/tests/test_proxy_api.py +++ b/nibabel/tests/test_proxy_api.py @@ -377,7 +377,7 @@ class TestEcatAPI(_TestProxyAPI): def obj_params(self): eg_path = pjoin(DATA_PATH, self.eg_fname) img = ecat.load(eg_path) - arr_out = img.get_data() + arr_out = img.get_fdata() def eg_func(): img = ecat.load(eg_path) @@ -398,7 +398,7 @@ class TestPARRECAPI(_TestProxyAPI): def _func_dict(self, rec_name): img = parrec.load(rec_name) - arr_out = img.get_data() + arr_out = img.get_fdata() def eg_func(): img = parrec.load(rec_name) diff --git a/nibabel/tests/test_round_trip.py b/nibabel/tests/test_round_trip.py index d216a03cdd..5c3a12b086 100644 --- a/nibabel/tests/test_round_trip.py +++ b/nibabel/tests/test_round_trip.py @@ -25,7 +25,7 @@ def round_trip(arr, out_dtype): img.to_file_map() back = Nifti1Image.from_file_map(img.file_map) # Recover array and calculated scaling from array proxy object - return back.get_data(), back.dataobj.slope, back.dataobj.inter + return back.get_fdata(), back.dataobj.slope, back.dataobj.inter def check_params(in_arr, in_type, out_type): diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 99e9c546f0..6d46e57c5c 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -216,7 +216,7 @@ def check_conversion(cmd, pr_data, out_fname): img = load(out_fname) # Check orientations always LAS assert_equal(aff2axcodes(img.affine), tuple('LAS')) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], data.min())) assert_true(np.allclose(img.header['cal_max'], data.max())) @@ -224,21 +224,21 @@ def check_conversion(cmd, pr_data, out_fname): # Check minmax options run_command(cmd + ['--minmax', '1', '2']) img = load(out_fname) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], 1)) assert_true(np.allclose(img.header['cal_max'], 2)) del img, data # for windows run_command(cmd + ['--minmax', 'parse', '2']) img = load(out_fname) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], data.min())) assert_true(np.allclose(img.header['cal_max'], 2)) del img, data # for windows run_command(cmd + ['--minmax', '1', 'parse']) img = load(out_fname) - data = img.get_data() + data = img.get_fdata() assert_true(np.allclose(data, pr_data)) assert_true(np.allclose(img.header['cal_min'], 1)) assert_true(np.allclose(img.header['cal_max'], data.max())) @@ -260,7 +260,7 @@ def test_parrec2nii(): assert_equal(img.shape, eg_dict['shape']) assert_dt_equal(img.get_data_dtype(), eg_dict['dtype']) # Check against values from Philips converted nifti image - data = img.get_data() + data = img.get_fdata() assert_data_similar(data, eg_dict) assert_almost_equal(img.header.get_zooms(), eg_dict['zooms']) # Standard save does not save extensions @@ -273,7 +273,7 @@ def test_parrec2nii(): assert_equal(code, 1) # Default scaling is dv pr_img = load(fname) - flipped_data = flip_axis(pr_img.get_data(), 1) + flipped_data = flip_axis(pr_img.get_fdata(), 1) base_cmd = ['parrec2nii', '--overwrite', fname] check_conversion(base_cmd, flipped_data, out_froot) check_conversion(base_cmd + ['--scaling=dv'], @@ -281,7 +281,7 @@ def test_parrec2nii(): out_froot) # fp pr_img = load(fname, scaling='fp') - flipped_data = flip_axis(pr_img.get_data(), 1) + flipped_data = flip_axis(pr_img.get_fdata(), 1) check_conversion(base_cmd + ['--scaling=fp'], flipped_data, out_froot) @@ -356,7 +356,7 @@ def test_parrec2nii_with_data(): bvals_trace = np.loadtxt('DTI.bvals') assert_almost_equal(bvals_trace, DTI_PAR_BVALS) img = load('DTI.nii') - data = img.get_data().copy() + data = img.get_fdata() del img # Bvecs in header, transposed from PSL to LPS bvecs_LPS = DTI_PAR_BVECS[:, [2, 0, 1]] @@ -384,7 +384,7 @@ def test_parrec2nii_with_data(): img = load('DTI.nii') bvecs_notrace = np.loadtxt('DTI.bvecs').T bvals_notrace = np.loadtxt('DTI.bvals') - data_notrace = img.get_data().copy() + data_notrace = img.get_fdata() assert_equal(data_notrace.shape[-1], len(bvecs_notrace)) del img # ensure correct volume was removed @@ -399,7 +399,7 @@ def test_parrec2nii_with_data(): # strict-sort: bvals should be in ascending order assert_almost_equal(np.loadtxt('DTI.bvals'), np.sort(DTI_PAR_BVALS)) img = load('DTI.nii') - data_sorted = img.get_data().copy() + data_sorted = img.get_fdata() assert_almost_equal(data[..., np.argsort(DTI_PAR_BVALS)], data_sorted) del img diff --git a/nibabel/tests/test_spatialimages.py b/nibabel/tests/test_spatialimages.py index b0f571023d..54633c9820 100644 --- a/nibabel/tests/test_spatialimages.py +++ b/nibabel/tests/test_spatialimages.py @@ -22,7 +22,7 @@ from unittest import TestCase from nose.tools import (assert_true, assert_false, assert_equal, assert_not_equal, assert_raises) -from numpy.testing import assert_array_equal, assert_array_almost_equal +from numpy.testing import assert_array_equal, assert_array_almost_equal, assert_warns from .test_helpers import bytesio_round_trip from ..testing import (clear_and_catch_warnings, suppress_warnings, @@ -235,7 +235,7 @@ def test_images(self): # See https://github.com/nipy/nibabel/issues/58 arr = np.arange(24, dtype=np.int16).reshape((2, 3, 4)) img = self.image_class(arr, None) - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) assert_equal(img.affine, None) def test_default_header(self): @@ -252,9 +252,9 @@ def test_data_api(self): img = self.image_class(DataLike(), None) # Shape may be promoted to higher dimension, but may not reorder or # change size - assert_array_equal(img.get_data().flatten(), np.arange(3)) - assert_equal(img.get_shape()[:1], (3,)) - assert_equal(np.prod(img.get_shape()), 3) + assert_array_equal(img.get_fdata().flatten(), np.arange(3)) + assert_equal(img.shape[:1], (3,)) + assert_equal(np.prod(img.shape), 3) def check_dtypes(self, expected, actual): # Some images will want dtypes to be equal including endianness, @@ -389,9 +389,10 @@ def test_get_data(self): "Cannot slice image objects; consider using " "`img.slicer[slice]` to generate a sliced image (see " "documentation for caveats) or slicing image array data " - "with `img.dataobj[slice]` or `img.get_data()[slice]`") + "with `img.dataobj[slice]` or `img.get_fdata()[slice]`") assert_true(in_data is img.dataobj) - out_data = img.get_data() + with assert_warns(DeprecationWarning): + out_data = img.get_data() assert_true(in_data is out_data) # and that uncache has no effect img.uncache() @@ -403,15 +404,19 @@ def test_get_data(self): rt_img = bytesio_round_trip(img) assert_false(in_data is rt_img.dataobj) assert_array_equal(rt_img.dataobj, in_data) - out_data = rt_img.get_data() + with assert_warns(DeprecationWarning): + out_data = rt_img.get_data() assert_array_equal(out_data, in_data) assert_false(rt_img.dataobj is out_data) # cache - assert_true(rt_img.get_data() is out_data) + with assert_warns(DeprecationWarning): + assert_true(rt_img.get_data() is out_data) out_data[:] = 42 rt_img.uncache() - assert_false(rt_img.get_data() is out_data) - assert_array_equal(rt_img.get_data(), in_data) + with assert_warns(DeprecationWarning): + assert_false(rt_img.get_data() is out_data) + with assert_warns(DeprecationWarning): + assert_array_equal(rt_img.get_data(), in_data) def test_slicer(self): img_klass = self.image_class @@ -534,10 +539,14 @@ def test_slicer(self): pass else: sliced_data = in_data[sliceobj] - assert_array_equal(sliced_data, sliced_img.get_data()) + with assert_warns(DeprecationWarning): + assert_array_equal(sliced_data, sliced_img.get_data()) + assert_array_equal(sliced_data, sliced_img.get_fdata()) assert_array_equal(sliced_data, sliced_img.dataobj) assert_array_equal(sliced_data, img.dataobj[sliceobj]) - assert_array_equal(sliced_data, img.get_data()[sliceobj]) + with assert_warns(DeprecationWarning): + assert_array_equal(sliced_data, img.get_data()[sliceobj]) + assert_array_equal(sliced_data, img.get_fdata()[sliceobj]) def test_api_deprecations(self): @@ -632,7 +641,7 @@ def test_load_mmap(self): if mmap is not None: kwargs['mmap'] = mmap back_img = func(param1, **kwargs) - back_data = back_img.get_data() + back_data = np.asanyarray(back_img.dataobj) if expected_mode is None: assert_false(isinstance(back_data, np.memmap), 'Should not be a %s' % img_klass.__name__) diff --git a/nibabel/tests/test_spm99analyze.py b/nibabel/tests/test_spm99analyze.py index 137d3b0451..86143f35ab 100644 --- a/nibabel/tests/test_spm99analyze.py +++ b/nibabel/tests/test_spm99analyze.py @@ -187,7 +187,7 @@ def assert_null_scaling(self, arr, slope, inter): img = img_class(arr, np.eye(4), input_hdr) img_hdr = img.header self._set_raw_scaling(input_hdr, slope, inter) - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) # Scaling has no effect on image as written via header (with rescaling # turned off). fm = bytesio_filemap(img) @@ -196,12 +196,12 @@ def assert_null_scaling(self, arr, slope, inter): img_hdr.write_to(hdr_fobj) img_hdr.data_to_fileobj(arr, img_fobj, rescale=False) raw_rt_img = img_class.from_file_map(fm) - assert_array_equal(raw_rt_img.get_data(), arr) + assert_array_equal(raw_rt_img.get_fdata(), arr) # Scaling makes no difference for image round trip fm = bytesio_filemap(img) img.to_file_map(fm) rt_img = img_class.from_file_map(fm) - assert_array_equal(rt_img.get_data(), arr) + assert_array_equal(rt_img.get_fdata(), arr) def test_header_scaling(self): # For images that implement scaling, test effect of scaling @@ -258,20 +258,20 @@ def _check_write_scaling(self, img = img_class(arr, aff) self.assert_scale_me_scaling(img.header) # Array from image unchanged by scaling - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) # As does round trip img_rt = bytesio_round_trip(img) self.assert_scale_me_scaling(img_rt.header) # Round trip array is not scaled - assert_array_equal(img_rt.get_data(), arr) + assert_array_equal(img_rt.get_fdata(), arr) # Explicit scaling causes scaling after round trip self._set_raw_scaling(img.header, slope, inter) self.assert_scaling_equal(img.header, slope, inter) # Array from image unchanged by scaling - assert_array_equal(img.get_data(), arr) + assert_array_equal(img.get_fdata(), arr) # But the array scaled after round trip img_rt = bytesio_round_trip(img) - assert_array_equal(img_rt.get_data(), + assert_array_equal(img_rt.get_fdata(), apply_read_scaling(arr, effective_slope, effective_inter)) @@ -289,7 +289,7 @@ def _check_write_scaling(self, img.header.set_data_dtype(np.uint8) with np.errstate(invalid='ignore'): img_rt = bytesio_round_trip(img) - assert_array_equal(img_rt.get_data(), + assert_array_equal(img_rt.get_fdata(), apply_read_scaling(np.round(arr), effective_slope, effective_inter)) @@ -299,7 +299,7 @@ def _check_write_scaling(self, with np.errstate(invalid='ignore'): img_rt = bytesio_round_trip(img) exp_unscaled_arr = np.clip(np.round(arr), 0, 255) - assert_array_equal(img_rt.get_data(), + assert_array_equal(img_rt.get_fdata(), apply_read_scaling(exp_unscaled_arr, effective_slope, effective_inter)) @@ -313,7 +313,7 @@ def test_int_int_scaling(self): img.set_data_dtype(np.uint8) self._set_raw_scaling(hdr, 1, 0 if hdr.has_data_intercept else None) img_rt = bytesio_round_trip(img) - assert_array_equal(img_rt.get_data(), np.clip(arr, 0, 255)) + assert_array_equal(img_rt.get_fdata(), np.clip(arr, 0, 255)) def test_no_scaling(self): # Test writing image converting types when not calculating scaling @@ -337,7 +337,7 @@ def test_no_scaling(self): with np.errstate(invalid='ignore'): rt_img = bytesio_round_trip(img) with suppress_warnings(): # invalid mult - back_arr = rt_img.get_data() + back_arr = np.asanyarray(rt_img.dataobj) exp_back = arr.copy() # If converting to floating point type, casting is direct. # Otherwise we will need to do float-(u)int casting at some point @@ -392,13 +392,13 @@ def test_nan2zero_range_ok(self): arr[1, 0, 0] = 256 # to push outside uint8 range img = img_class(arr, np.eye(4)) rt_img = bytesio_round_trip(img) - assert_array_equal(rt_img.get_data(), arr) + assert_array_equal(rt_img.get_fdata(), arr) # Uncontroversial so far, but now check that nan2zero works correctly # for int type img.set_data_dtype(np.uint8) with np.errstate(invalid='ignore'): rt_img = bytesio_round_trip(img) - assert_equal(rt_img.get_data()[0, 0, 0], 0) + assert_equal(rt_img.get_fdata()[0, 0, 0], 0) class TestSpm99AnalyzeImage(test_analyze.TestAnalyzeImage, ImageScalingMixin): @@ -450,7 +450,7 @@ def test_mat_read(self): # Test round trip img.to_file_map() r_img = img_klass.from_file_map(fm) - assert_array_equal(r_img.get_data(), arr) + assert_array_equal(r_img.get_fdata(), arr) assert_array_equal(r_img.affine, aff) # mat files are for matlab and have 111 voxel origins. We need to # adjust for that, when loading and saving. Check for signs of that in @@ -478,7 +478,7 @@ def test_mat_read(self): dict(M=np.diag([3, 4, 5, 1]), mat=np.diag([6, 7, 8, 1]))) # Check we are preferring the 'mat' matrix r_img = img_klass.from_file_map(fm) - assert_array_equal(r_img.get_data(), arr) + assert_array_equal(r_img.get_fdata(), arr) assert_array_equal(r_img.affine, np.dot(np.diag([6, 7, 8, 1]), to_111)) # But will use M if present @@ -486,7 +486,7 @@ def test_mat_read(self): mat_fileobj.truncate(0) savemat(mat_fileobj, dict(M=np.diag([3, 4, 5, 1]))) r_img = img_klass.from_file_map(fm) - assert_array_equal(r_img.get_data(), arr) + assert_array_equal(r_img.get_fdata(), arr) assert_array_equal(r_img.affine, np.dot(np.diag([3, 4, 5, 1]), np.dot(flipper, to_111)))