Skip to content

Commit f48f3c5

Browse files
committed
Merge remote-tracking branch 'upstream/master'
Conflicts: nibabel/nicom/tests/test_dicomwrappers.py
2 parents 0da29a1 + be9b778 commit f48f3c5

File tree

174 files changed

+6795
-3042
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

174 files changed

+6795
-3042
lines changed

.gitmodules

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,6 @@
1010
[submodule "nibabel-data/nitest-freesurfer"]
1111
path = nibabel-data/nitest-freesurfer
1212
url = https://bitbucket.org/nipy/nitest-freesurfer.git
13+
[submodule "nibabel-data/parrec_oblique"]
14+
path = nibabel-data/parrec_oblique
15+
url = https://github.com/grlee77/parrec_oblique.git

.mailmap

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,3 +29,5 @@ Basile Pinsard <[email protected]> bpinsard <[email protected]>
2929
3030
Ben Cipollini <[email protected]> Ben Cipollini <[email protected]>
3131
Chris Markiewicz <[email protected]> Christopher J. Markiewicz <[email protected]>
32+
Chris Markiewicz <[email protected]> Christopher J. Markiewicz <[email protected]>
33+
Chris Markiewicz <[email protected]> Chris Johnson <[email protected]>

.travis.yml

Lines changed: 51 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
# munges each line before executing it to print out the exit status. It's okay
44
# for it to be on multiple physical lines, so long as you remember: - There
55
# can't be any leading "-"s - All newlines will be removed, so use ";"s
6+
67
language: python
78

89
# Run jobs on container-based infrastructure, can be overridden per job
@@ -11,20 +12,19 @@ sudo: false
1112
cache:
1213
directories:
1314
- $HOME/.cache/pip
14-
1515
addons:
1616
apt:
1717
packages:
1818
- libhdf5-serial-dev
19-
19+
# For numpy --pre wheels
20+
- libatlas-base-dev
2021
env:
2122
global:
2223
- DEPENDS="numpy scipy matplotlib h5py"
2324
- PYDICOM=1
2425
- INSTALL_TYPE="setup"
2526
python:
2627
- 2.6
27-
- 3.2
2828
- 3.3
2929
- 3.4
3030
- 3.5
@@ -37,14 +37,26 @@ matrix:
3737
- python: 2.7
3838
env:
3939
- DEPENDS=numpy==1.5.1 PYDICOM=0
40+
# Absolute minimum dependencies plus oldest MPL
41+
- python: 2.7
42+
env:
43+
- DEPENDS=numpy==1.5.1 matplotlib==1.3.1 PYDICOM=0
4044
# Minimum pydicom dependency
4145
- python: 2.7
4246
env:
4347
- DEPENDS="numpy==1.5.1 pydicom==0.9.7"
48+
# test against numpy 1.7
49+
- python: 2.7
50+
env:
51+
- DEPENDS="numpy==1.7.1"
4452
# pydicom 1.0 (currently unreleased)
4553
- python: 2.7
4654
env:
4755
- PYDICOM="v1.0"
56+
# test against pre-release builds
57+
- python: 2.7
58+
env:
59+
- EXTRA_PIP_FLAGS="--pre"
4860
# Documentation doctests
4961
- python: 2.7
5062
env:
@@ -58,14 +70,20 @@ matrix:
5870
- python: 2.7
5971
env:
6072
- INSTALL_TYPE=requirements
73+
- python: 2.7
74+
env:
75+
- STYLE=1
76+
- python: 3.5
77+
env:
78+
- STYLE=1
6179
before_install:
6280
- source tools/travis_tools.sh
6381
- virtualenv --python=python venv
6482
- source venv/bin/activate
6583
- python --version # just to check
66-
- pip install -U pip # upgrade to latest pip to find 3.5 wheels
67-
- retry pip install nose # always
68-
- wheelhouse_pip_install $DEPENDS
84+
- pip install -U pip wheel # upgrade to latest pip find 3.5 wheels; wheel to avoid errors
85+
- retry pip install nose flake8 # always
86+
- wheelhouse_pip_install $EXTRA_PIP_FLAGS $DEPENDS
6987
# pydicom <= 0.9.8 doesn't install on python 3
7088
- if [ "${TRAVIS_PYTHON_VERSION:0:1}" == "2" ]; then
7189
if [ "$PYDICOM" == "1" ]; then
@@ -77,6 +95,7 @@ before_install:
7795
- if [ "${COVERAGE}" == "1" ]; then
7896
pip install coverage;
7997
pip install coveralls;
98+
pip install codecov;
8099
fi
81100
# command to install dependencies
82101
install:
@@ -86,33 +105,42 @@ install:
86105
elif [ "$INSTALL_TYPE" == "sdist" ]; then
87106
python setup_egg.py egg_info # check egg_info while we're here
88107
python setup_egg.py sdist
89-
wheelhouse_pip_install dist/*.tar.gz
108+
wheelhouse_pip_install $EXTRA_PIP_FLAGS dist/*.tar.gz
90109
elif [ "$INSTALL_TYPE" == "wheel" ]; then
91110
pip install wheel
92111
python setup_egg.py bdist_wheel
93-
wheelhouse_pip_install dist/*.whl
112+
wheelhouse_pip_install $EXTRA_PIP_FLAGS dist/*.whl
94113
elif [ "$INSTALL_TYPE" == "requirements" ]; then
95-
wheelhouse_pip_install -r requirements.txt
114+
wheelhouse_pip_install $EXTRA_PIP_FLAGS -r requirements.txt
96115
python setup.py install
97116
fi
98117
# Point to nibabel data directory
99118
- export NIBABEL_DATA_DIR="$PWD/nibabel-data"
100119
# command to run tests, e.g. python setup.py test
101120
script:
102-
# Change into an innocuous directory and find tests from installation
103-
- mkdir for_testing
104-
- cd for_testing
105-
- if [ "${COVERAGE}" == "1" ]; then
106-
cp ../.coveragerc .;
107-
COVER_ARGS="--with-coverage --cover-package nibabel";
108-
fi
109-
- if [ "$DOC_DOC_TEST" == "1" ]; then
110-
pip install sphinx;
111-
cd ../doc;
112-
make html;
113-
make doctest;
121+
- |
122+
if [ "${STYLE}" == "1" ]; then
123+
# Run styles only on core nibabel code.
124+
flake8 nibabel
114125
else
115-
nosetests --with-doctest $COVER_ARGS nibabel;
126+
# Change into an innocuous directory and find tests from installation
127+
mkdir for_testing
128+
cd for_testing
129+
if [ "${COVERAGE}" == "1" ]; then
130+
cp ../.coveragerc .;
131+
COVER_ARGS="--with-coverage --cover-package nibabel";
132+
fi
133+
if [ "$DOC_DOC_TEST" == "1" ]; then
134+
cd ../doc;
135+
pip install -r ../doc-requirements.txt
136+
make html;
137+
make doctest;
138+
else
139+
nosetests --with-doctest $COVER_ARGS nibabel;
140+
fi
116141
fi
117142
after_success:
118-
- if [ "${COVERAGE}" == "1" ]; then coveralls; fi
143+
- if [ "${COVERAGE}" == "1" ]; then coveralls; codecov; fi
144+
145+
notifications:
146+
webhooks: http://nipy.bic.berkeley.edu:54856/travis

Changelog

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ References like "pr/298" refer to github pull request numbers.
2626

2727
* Upcoming
2828

29+
* Read and write support for DICOM tags in NIfTI Extended Header using
30+
pydicom (pr/296);
2931
* Trackvis reader will now allow final streamline to have fewer points that
3032
tne numbe declared in the header, with ``strict=False`` argument to
3133
``read`` function;
@@ -34,6 +36,19 @@ References like "pr/298" refer to github pull request numbers.
3436
are raising a DataError if the track is truncated when ``strict=True``
3537
(the default), rather than a TypeError when trying to create the points
3638
array.
39+
40+
* 2.0.2 (Monday 23 November 2015)
41+
42+
* Fix for integer overflow on large images (pr/325) (MB);
43+
* Fix for Freesurfer nifti files with unusual dimensions (pr/332) (Chris
44+
Markiewicz);
45+
* Fix typos on benchmarks and tests (pr/336, pr/340, pr/347) (Chris
46+
Markiewicz);
47+
* Fix Windows install script (pr/339) (MB);
48+
* Support for Python 3.5 (pr/363) (MB) and numpy 1.10 (pr/358) (Chris
49+
Markiewicz);
50+
* Update pydicom imports to permit version 1.0 (pr/379) (Chris Markiewicz);
51+
* Workaround for Python 3.5.0 gzip regression (pr/383) (Ben Cipollini).
3752
* tripwire.TripWire object now raises subclass of AttributeError when trying
3853
to get an attribute, rather than a direct subclass of Exception. This
3954
prevents Python 3.5 triggering the tripwire when doing inspection prior to

bin/nib-dicomfs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/python
1+
#!python
22
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
33
# vi: set ft=python sts=4 ts=4 sw=4 et:
44
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##

bin/nib-ls

Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/python
1+
#!python
22
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
33
# vi: set ft=python sts=4 ts=4 sw=4 et:
44
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
@@ -13,7 +13,7 @@ Output a summary table for neuroimaging files (resolution, dimensionality, etc.)
1313
from __future__ import division, print_function, absolute_import
1414

1515
__author__ = 'Yaroslav Halchenko'
16-
__copyright__ = 'Copyright (c) 2011-2012 Yaroslav Halchenko ' \
16+
__copyright__ = 'Copyright (c) 2011-2015 Yaroslav Halchenko ' \
1717
'and NiBabel contributors'
1818
__license__ = 'MIT'
1919

@@ -141,6 +141,10 @@ def get_opt_parser():
141141
dest="verbose", default=0,
142142
help="Make more noise. Could be specified multiple times"),
143143

144+
Option("-H", "--header-fields",
145+
dest="header_fields", default='',
146+
help="Header fields (comma separated) to be printed as well (if present)"),
147+
144148
Option("-s", "--stats",
145149
action="store_true", dest='stats', default=False,
146150
help="Output basic data statistics"),
@@ -180,6 +184,23 @@ def proc_file(f, opts):
180184
else:
181185
row += [ '' ]
182186

187+
if opts.header_fields:
188+
# signals "all fields"
189+
if opts.header_fields == 'all':
190+
# TODO: might vary across file types, thus prior sensing
191+
# would be needed
192+
header_fields = h.keys()
193+
else:
194+
header_fields = opts.header_fields.split(',')
195+
196+
for f in header_fields:
197+
if not f: # skip empty
198+
continue
199+
try:
200+
row += [str(h[f])]
201+
except (KeyError, ValueError):
202+
row += [ 'error' ]
203+
183204
try:
184205
if (hasattr(h, 'get_qform') and hasattr(h, 'get_sform')
185206
and (h.get_qform() != h.get_sform()).any()):

bin/nib-nifti-dx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/env python
1+
#!python
22
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
33
# vi: set ft=python sts=4 ts=4 sw=4 et:
44
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##

bin/parrec2nii

Lines changed: 43 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
#!/usr/bin/env python
1+
#!python
22
"""PAR/REC to NIfTI converter
33
"""
44
from __future__ import division, print_function, absolute_import
@@ -8,6 +8,7 @@ import numpy as np
88
import numpy.linalg as npl
99
import sys
1010
import os
11+
import csv
1112
import nibabel
1213
import nibabel.parrec as pr
1314
from nibabel.parrec import one_line
@@ -66,6 +67,18 @@ def get_opt_parser():
6667
"""The magnetic field strength of the recording, only needed
6768
for --dwell-time. The field strength must be supplied
6869
because it is not encoded in the PAR/REC format.""")))
70+
p.add_option(
71+
Option("-i", "--volume-info", action="store_true", dest="vol_info",
72+
default=False,
73+
help=one_line(
74+
"""Export .PAR volume labels corresponding to the fourth
75+
dimension of the data. The dimension info will be stored in
76+
CSV format with the first row containing dimension labels
77+
and the subsequent rows (one per volume), the corresponding
78+
indices. Only labels that vary along the 4th dimension are
79+
exported (e.g. for a single volume structural scan there
80+
are no dynamic labels and no output file will be created).
81+
""")))
6982
p.add_option(
7083
Option("--origin", action="store", dest="origin", default="scanner",
7184
help=one_line(
@@ -113,6 +126,13 @@ def get_opt_parser():
113126
default=False,
114127
help=one_line("""Overwrite file if it exists. Default:
115128
False""")))
129+
p.add_option(
130+
Option("--strict-sort", action="store_true", dest="strict_sort",
131+
default=False,
132+
help=one_line("""Use additional keys in determining the order
133+
to sort the slices within the .REC file. This may be necessary
134+
for more complicated scans with multiple echos,
135+
cardiac phases, ASL label states, etc.""")))
116136
return p
117137

118138

@@ -148,7 +168,8 @@ def proc_file(infile, opts):
148168
infile = fname_ext_ul_case(infile)
149169
pr_img = pr.load(infile,
150170
permit_truncated=opts.permit_truncated,
151-
scaling=scaling)
171+
scaling=scaling,
172+
strict_sort=opts.strict_sort)
152173
pr_hdr = pr_img.header
153174
affine = pr_hdr.get_affine(origin=opts.origin)
154175
slope, intercept = pr_hdr.get_data_scaling(scaling)
@@ -185,7 +206,7 @@ def proc_file(infile, opts):
185206

186207
bvals, bvecs = pr_hdr.get_bvals_bvecs()
187208
if not opts.keep_trace: # discard Philips DTI trace if present
188-
if bvals is not None:
209+
if bvecs is not None:
189210
bad_mask = np.logical_and(bvals != 0, (bvecs == 0).all(axis=1))
190211
if bad_mask.sum() > 0:
191212
pl = 's' if bad_mask.sum() != 1 else ''
@@ -229,6 +250,14 @@ def proc_file(infile, opts):
229250
if opts.bvs:
230251
if bvals is None and bvecs is None:
231252
verbose('No DTI volumes detected, bvals and bvecs not written')
253+
elif bvecs is None:
254+
verbose('DTI volumes detected, but no diffusion direction info was'
255+
'found. Writing .bvals file only.')
256+
with open(basefilename + '.bvals', 'w') as fid:
257+
# np.savetxt could do this, but it's just a loop anyway
258+
for val in bvals:
259+
fid.write('%s ' % val)
260+
fid.write('\n')
232261
else:
233262
verbose('Writing .bvals and .bvecs files')
234263
# Transform bvecs with reorientation affine
@@ -246,6 +275,17 @@ def proc_file(infile, opts):
246275
fid.write('%s ' % val)
247276
fid.write('\n')
248277

278+
# export data labels varying along the 4th dimensions if requested
279+
if opts.vol_info:
280+
labels = pr_img.header.get_volume_labels()
281+
if len(labels) > 0:
282+
vol_keys = list(labels.keys())
283+
with open(basefilename + '.ordering.csv', 'w') as csvfile:
284+
csvwriter = csv.writer(csvfile, delimiter=',')
285+
csvwriter.writerow(vol_keys)
286+
for vals in zip(*[labels[k] for k in vol_keys]):
287+
csvwriter.writerow(vals)
288+
249289
# write out dwell time if requested
250290
if opts.dwell_time:
251291
try:

dev-requirements.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Requirements for running tests
2+
-r requirements.txt
3+
nose

doc-requirements.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# Requirements for building docs
2+
-r requirements.txt
3+
sphinx
4+
numpydoc
5+
texext
6+
matplotlib>=1.3

0 commit comments

Comments
 (0)