Skip to content

Commit 4668e26

Browse files
committed
fix: resolved conflicts
2 parents a55e92f + 63f9ef2 commit 4668e26

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

88 files changed

+2718
-1749
lines changed

.travis.yml

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,19 @@
44
# for it to be on multiple physical lines, so long as you remember: - There
55
# can't be any leading "-"s - All newlines will be removed, so use ";"s
66
language: python
7+
8+
# Run jobs on container-based infrastructure, can be overridden per job
9+
sudo: false
10+
11+
cache:
12+
directories:
13+
- $HOME/.cache/pip
14+
15+
addons:
16+
apt:
17+
packages:
18+
- libhdf5-serial-dev
19+
720
env:
821
global:
922
- DEPENDS="numpy scipy matplotlib h5py"
@@ -46,9 +59,6 @@ before_install:
4659
pip install coverage;
4760
pip install coveralls;
4861
fi
49-
- if [[ $DEPENDS == *h5py* ]]; then
50-
sudo apt-get install libhdf5-serial-dev;
51-
fi
5262
# command to install dependencies
5363
install:
5464
- python setup.py install

Changelog

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,17 @@ and Stephan Gerhard (SG).
2424

2525
References like "pr/298" refer to github pull request numbers.
2626

27+
* Upcoming
28+
29+
* Trackvis reader will now allow final streamline to have fewer points that
30+
tne numbe declared in the header, with ``strict=False`` argument to
31+
``read`` function;
32+
* Minor API breakage in trackvis reader. We are now raising a DataError if
33+
there are too few streamlines in the file, instead of a HeaderError. We
34+
are raising a DataError if the track is truncated when ``strict=True``
35+
(the default), rather than a TypeError when trying to create the points
36+
array.
37+
2738
* 2.0.1 (Saturday 27 June 2015)
2839

2940
Contributions from Ben Cipollini, Chris Markiewicz, Alexandre Gramfort,

bin/nib-nifti-dx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ def main():
2525
(opts, files) = parser.parse_args()
2626

2727
for fname in files:
28-
with nib.volumeutils.BinOpener(fname) as fobj:
29-
hdr = fobj.read(nib.nifti1.header_dtype.itemsize)
28+
with nib.openers.ImageOpener(fname) as fobj:
29+
hdr = fobj.read(nib.nifti1.header_dtype.itemsize)
3030
result = nib.Nifti1Header.diagnose_binaryblock(hdr)
3131
if len(result):
3232
print('Picky header check output for "%s"\n' % fname)

nibabel/__init__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@
6161
from .orientations import (io_orientation, orientation_affine,
6262
flip_axis, OrientationError,
6363
apply_orientation, aff2axcodes)
64-
from .imageclasses import class_map, ext_map
64+
from .imageclasses import class_map, ext_map, all_image_classes
6565
from . import trackvis
6666
from . import mriutils
6767

@@ -73,9 +73,11 @@
7373
bench = Tester().bench
7474
del Tester
7575
except ImportError:
76-
def test(*args, **kwargs): raise RuntimeError('Need numpy >= 1.2 for tests')
76+
def test(*args, **kwargs):
77+
raise RuntimeError('Need numpy >= 1.2 for tests')
7778

7879
from .pkg_info import get_pkg_info as _get_pkg_info
7980

81+
8082
def get_info():
8183
return _get_pkg_info(os.path.dirname(__file__))

nibabel/affines.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -72,9 +72,9 @@ def apply_affine(aff, pts):
7272
shape = pts.shape
7373
pts = pts.reshape((-1, shape[-1]))
7474
# rzs == rotations, zooms, shears
75-
rzs = aff[:-1,:-1]
76-
trans = aff[:-1,-1]
77-
res = np.dot(pts, rzs.T) + trans[None,:]
75+
rzs = aff[:-1, :-1]
76+
trans = aff[:-1, -1]
77+
res = np.dot(pts, rzs.T) + trans[None, :]
7878
return res.reshape(shape)
7979

8080

@@ -90,8 +90,8 @@ def to_matvec(transform):
9090
NxM transform matrix in homogeneous coordinates representing an affine
9191
transformation from an (N-1)-dimensional space to an (M-1)-dimensional
9292
space. An example is a 4x4 transform representing rotations and
93-
translations in 3 dimensions. A 4x3 matrix can represent a 2-dimensional
94-
plane embedded in 3 dimensional space.
93+
translations in 3 dimensions. A 4x3 matrix can represent a
94+
2-dimensional plane embedded in 3 dimensional space.
9595
9696
Returns
9797
-------
@@ -124,8 +124,8 @@ def to_matvec(transform):
124124
def from_matvec(matrix, vector=None):
125125
""" Combine a matrix and vector into an homogeneous affine
126126
127-
Combine a rotation / scaling / shearing matrix and translation vector into a
128-
transform in homogeneous coordinates.
127+
Combine a rotation / scaling / shearing matrix and translation vector into
128+
a transform in homogeneous coordinates.
129129
130130
Parameters
131131
----------
@@ -163,7 +163,7 @@ def from_matvec(matrix, vector=None):
163163
"""
164164
matrix = np.asarray(matrix)
165165
nin, nout = matrix.shape
166-
t = np.zeros((nin+1,nout+1), matrix.dtype)
166+
t = np.zeros((nin + 1, nout + 1), matrix.dtype)
167167
t[0:nin, 0:nout] = matrix
168168
t[nin, nout] = 1.
169169
if not vector is None:
@@ -175,8 +175,8 @@ def append_diag(aff, steps, starts=()):
175175
""" Add diagonal elements `steps` and translations `starts` to affine
176176
177177
Typical use is in expanding 4x4 affines to larger dimensions. Nipy is the
178-
main consumer because it uses NxM affines, whereas we generally only use 4x4
179-
affines; the routine is here for convenience.
178+
main consumer because it uses NxM affines, whereas we generally only use
179+
4x4 affines; the routine is here for convenience.
180180
181181
Parameters
182182
----------
@@ -219,13 +219,13 @@ def append_diag(aff, steps, starts=()):
219219
aff_plus = np.zeros((old_n_out + n_steps + 1,
220220
old_n_in + n_steps + 1), dtype=aff.dtype)
221221
# Get stuff from old affine
222-
aff_plus[:old_n_out,:old_n_in] = aff[:old_n_out, :old_n_in]
223-
aff_plus[:old_n_out,-1] = aff[:old_n_out,-1]
222+
aff_plus[:old_n_out, :old_n_in] = aff[:old_n_out, :old_n_in]
223+
aff_plus[:old_n_out, -1] = aff[:old_n_out, -1]
224224
# Add new diagonal elements
225225
for i, el in enumerate(steps):
226226
aff_plus[old_n_out+i, old_n_in+i] = el
227227
# Add translations for new affine, plus last 1
228-
aff_plus[old_n_out:,-1] = list(starts) + [1]
228+
aff_plus[old_n_out:, -1] = list(starts) + [1]
229229
return aff_plus
230230

231231

nibabel/analyze.py

Lines changed: 42 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -46,23 +46,23 @@
4646
The inability to store affines means that we have to guess what orientation the
4747
image has. Most Analyze images are stored on disk in (fastest-changing to
4848
slowest-changing) R->L, P->A and I->S order. That is, the first voxel is the
49-
rightmost, most posterior and most inferior voxel location in the image, and the
50-
next voxel is one voxel towards the left of the image.
49+
rightmost, most posterior and most inferior voxel location in the image, and
50+
the next voxel is one voxel towards the left of the image.
5151
5252
Most people refer to this disk storage format as 'radiological', on the basis
5353
that, if you load up the data as an array ``img_arr`` where the first axis is
54-
the fastest changing, then take a slice in the I->S axis - ``img_arr[:,:,10]`` -
55-
then the right part of the brain will be on the left of your displayed slice.
54+
the fastest changing, then take a slice in the I->S axis - ``img_arr[:,:,10]``
55+
- then the right part of the brain will be on the left of your displayed slice.
5656
Radiologists like looking at images where the left of the brain is on the right
5757
side of the image.
5858
5959
Conversely, if the image has the voxels stored with the left voxels first -
6060
L->R, P->A, I->S, then this would be 'neurological' format. Neurologists like
6161
looking at images where the left side of the brain is on the left of the image.
6262
63-
When we are guessing at an affine for Analyze, this translates to the problem of
64-
whether the affine should consider proceeding within the data down an X line as
65-
being from left to right, or right to left.
63+
When we are guessing at an affine for Analyze, this translates to the problem
64+
of whether the affine should consider proceeding within the data down an X line
65+
as being from left to right, or right to left.
6666
6767
By default we assume that the image is stored in R->L format. We encode this
6868
choice in the ``default_x_flip`` flag that can be True or False. True means
@@ -153,18 +153,18 @@
153153
header_dtype = np.dtype(header_key_dtd + image_dimension_dtd +
154154
data_history_dtd)
155155

156-
_dtdefs = ( # code, conversion function, equivalent dtype, aliases
156+
_dtdefs = ( # code, conversion function, equivalent dtype, aliases
157157
(0, 'none', np.void),
158-
(1, 'binary', np.void), # 1 bit per voxel, needs thought
158+
(1, 'binary', np.void), # 1 bit per voxel, needs thought
159159
(2, 'uint8', np.uint8),
160160
(4, 'int16', np.int16),
161161
(8, 'int32', np.int32),
162162
(16, 'float32', np.float32),
163-
(32, 'complex64', np.complex64), # numpy complex format?
163+
(32, 'complex64', np.complex64), # numpy complex format?
164164
(64, 'float64', np.float64),
165-
(128, 'RGB', np.dtype([('R','u1'),
166-
('G', 'u1'),
167-
('B', 'u1')])),
165+
(128, 'RGB', np.dtype([('R', 'u1'),
166+
('G', 'u1'),
167+
('B', 'u1')])),
168168
(255, 'all', np.void))
169169

170170
# Make full code alias bank, including dtype column
@@ -343,7 +343,7 @@ def default_structarr(klass, endianness=None):
343343
hdr_data['dim'] = 1
344344
hdr_data['dim'][0] = 0
345345
hdr_data['pixdim'] = 1
346-
hdr_data['datatype'] = 16 # float32
346+
hdr_data['datatype'] = 16 # float32
347347
hdr_data['bitpix'] = 32
348348
return hdr_data
349349

@@ -858,7 +858,7 @@ def _chk_bitpix(klass, hdr, fix=False):
858858
rep.problem_level = 10
859859
rep.problem_msg = 'bitpix does not match datatype'
860860
if fix:
861-
hdr['bitpix'] = bitpix # inplace modification
861+
hdr['bitpix'] = bitpix # inplace modification
862862
rep.fix_msg = 'setting bitpix to match datatype'
863863
return hdr, rep
864864

@@ -892,13 +892,28 @@ def _chk_pixdims(hdr, fix=False):
892892
rep.fix_msg = ' and '.join(fmsgs)
893893
return hdr, rep
894894

895+
@classmethod
896+
def may_contain_header(klass, binaryblock):
897+
if len(binaryblock) < klass.sizeof_hdr:
898+
return False
899+
900+
hdr_struct = np.ndarray(shape=(), dtype=header_dtype,
901+
buffer=binaryblock[:klass.sizeof_hdr])
902+
bs_hdr_struct = hdr_struct.byteswap()
903+
return 348 in (hdr_struct['sizeof_hdr'], bs_hdr_struct['sizeof_hdr'])
904+
895905

896906
class AnalyzeImage(SpatialImage):
897907
""" Class for basic Analyze format image
898908
"""
899909
header_class = AnalyzeHeader
900-
files_types = (('image','.img'), ('header','.hdr'))
901-
_compressed_exts = ('.gz', '.bz2')
910+
_meta_sniff_len = header_class.sizeof_hdr
911+
files_types = (('image', '.img'), ('header', '.hdr'))
912+
valid_exts = ('.img', '.hdr')
913+
_compressed_suffixes = ('.gz', '.bz2')
914+
915+
makeable = True
916+
rw = True
902917

903918
ImageArrayProxy = ArrayProxy
904919

@@ -931,10 +946,10 @@ def from_file_map(klass, file_map, mmap=True):
931946
mmap : {True, False, 'c', 'r'}, optional, keyword only
932947
`mmap` controls the use of numpy memory mapping for reading image
933948
array data. If False, do not try numpy ``memmap`` for data array.
934-
If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A `mmap`
935-
value of True gives the same behavior as ``mmap='c'``. If image
936-
data file cannot be memory-mapped, ignore `mmap` value and read
937-
array from file.
949+
If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A
950+
`mmap` value of True gives the same behavior as ``mmap='c'``. If
951+
image data file cannot be memory-mapped, ignore `mmap` value and
952+
read array from file.
938953
939954
Returns
940955
-------
@@ -971,10 +986,10 @@ def from_filename(klass, filename, mmap=True):
971986
mmap : {True, False, 'c', 'r'}, optional, keyword only
972987
`mmap` controls the use of numpy memory mapping for reading image
973988
array data. If False, do not try numpy ``memmap`` for data array.
974-
If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A `mmap`
975-
value of True gives the same behavior as ``mmap='c'``. If image
976-
data file cannot be memory-mapped, ignore `mmap` value and read
977-
array from file.
989+
If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A
990+
`mmap` value of True gives the same behavior as ``mmap='c'``. If
991+
image data file cannot be memory-mapped, ignore `mmap` value and
992+
read array from file.
978993
979994
Returns
980995
-------
@@ -1030,7 +1045,8 @@ def to_file_map(self, file_map=None):
10301045
arr_writer = ArrayWriter(data, out_dtype, check_scaling=False)
10311046
hdr_fh, img_fh = self._get_fileholders(file_map)
10321047
# Check if hdr and img refer to same file; this can happen with odd
1033-
# analyze images but most often this is because it's a single nifti file
1048+
# analyze images but most often this is because it's a single nifti
1049+
# file
10341050
hdr_img_same = hdr_fh.same_file_as(img_fh)
10351051
hdrf = hdr_fh.get_prepare_fileobj(mode='wb')
10361052
if hdr_img_same:

nibabel/arrayproxy.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,10 @@
2727
"""
2828
import warnings
2929

30-
from .volumeutils import BinOpener, array_from_file, apply_read_scaling
30+
from .volumeutils import array_from_file, apply_read_scaling
3131
from .fileslice import fileslice
3232
from .keywordonly import kw_only_meth
33+
from .openers import ImageOpener
3334

3435

3536
class ArrayProxy(object):
@@ -130,7 +131,7 @@ def get_unscaled(self):
130131
131132
This is an optional part of the proxy API
132133
'''
133-
with BinOpener(self.file_like) as fileobj:
134+
with ImageOpener(self.file_like) as fileobj:
134135
raw_data = array_from_file(self._shape,
135136
self._dtype,
136137
fileobj,
@@ -145,7 +146,7 @@ def __array__(self):
145146
return apply_read_scaling(raw_data, self._slope, self._inter)
146147

147148
def __getitem__(self, slicer):
148-
with BinOpener(self.file_like) as fileobj:
149+
with ImageOpener(self.file_like) as fileobj:
149150
raw_data = fileslice(fileobj,
150151
slicer,
151152
self._shape,

0 commit comments

Comments
 (0)