Skip to content

Commit ca37d4c

Browse files
committed
fix: resolved conflict
2 parents f5de829 + a7c6523 commit ca37d4c

24 files changed

+1459
-183
lines changed

bin/nib-ls

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ Output a summary table for neuroimaging files (resolution, dimensionality, etc.)
1313
from __future__ import division, print_function, absolute_import
1414

1515
__author__ = 'Yaroslav Halchenko'
16-
__copyright__ = 'Copyright (c) 2011-2012 Yaroslav Halchenko ' \
16+
__copyright__ = 'Copyright (c) 2011-2015 Yaroslav Halchenko ' \
1717
'and NiBabel contributors'
1818
__license__ = 'MIT'
1919

@@ -141,6 +141,10 @@ def get_opt_parser():
141141
dest="verbose", default=0,
142142
help="Make more noise. Could be specified multiple times"),
143143

144+
Option("-H", "--header-fields",
145+
dest="header_fields", default='',
146+
help="Header fields (comma separated) to be printed as well (if present)"),
147+
144148
Option("-s", "--stats",
145149
action="store_true", dest='stats', default=False,
146150
help="Output basic data statistics"),
@@ -180,6 +184,23 @@ def proc_file(f, opts):
180184
else:
181185
row += [ '' ]
182186

187+
if opts.header_fields:
188+
# signals "all fields"
189+
if opts.header_fields == 'all':
190+
# TODO: might vary across file types, thus prior sensing
191+
# would be needed
192+
header_fields = h.keys()
193+
else:
194+
header_fields = opts.header_fields.split(',')
195+
196+
for f in header_fields:
197+
if not f: # skip empty
198+
continue
199+
try:
200+
row += [str(h[f])]
201+
except (KeyError, ValueError):
202+
row += [ 'error' ]
203+
183204
try:
184205
if (hasattr(h, 'get_qform') and hasattr(h, 'get_sform')
185206
and (h.get_qform() != h.get_sform()).any()):

nibabel/affines.py

Lines changed: 50 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,13 @@
88
from .externals.six.moves import reduce
99

1010

11+
class AffineError(ValueError):
12+
""" Errors in calculating or using affines """
13+
# Inherits from ValueError to keep compatibility with ValueError previously
14+
# raised in append_diag
15+
pass
16+
17+
1118
def apply_affine(aff, pts):
1219
""" Apply affine matrix `aff` to points `pts`
1320
@@ -213,7 +220,7 @@ def append_diag(aff, steps, starts=()):
213220
if len(starts) == 0:
214221
starts = np.zeros(n_steps, dtype=steps.dtype)
215222
elif len(starts) != n_steps:
216-
raise ValueError('Steps should have same length as starts')
223+
raise AffineError('Steps should have same length as starts')
217224
old_n_out, old_n_in = aff.shape[0] - 1, aff.shape[1] - 1
218225
# make new affine
219226
aff_plus = np.zeros((old_n_out + n_steps + 1,
@@ -247,3 +254,45 @@ def dot_reduce(*args):
247254
... arg[N-2].dot(arg[N-1])))...``
248255
"""
249256
return reduce(lambda x, y: np.dot(y, x), args[::-1])
257+
258+
259+
def voxel_sizes(affine):
260+
r""" Return voxel size for each input axis given `affine`
261+
262+
The `affine` is the mapping between array (voxel) coordinates and mm
263+
(world) coordinates.
264+
265+
The voxel size for the first voxel (array) axis is the distance moved in
266+
world coordinates when moving one unit along the first voxel (array) axis.
267+
This is the distance between the world coordinate of voxel (0, 0, 0) and
268+
the world coordinate of voxel (1, 0, 0). The world coordinate vector of
269+
voxel coordinate vector (0, 0, 0) is given by ``v0 = affine.dot((0, 0, 0,
270+
1)[:3]``. The world coordinate vector of voxel vector (1, 0, 0) is
271+
``v1_ax1 = affine.dot((1, 0, 0, 1))[:3]``. The final 1 in the voxel
272+
vectors and the ``[:3]`` at the end are because the affine works on
273+
homogenous coodinates. The translations part of the affine is ``trans =
274+
affine[:3, 3]``, and the rotations, zooms and shearing part of the affine
275+
is ``rzs = affine[:3, :3]``. Because of the final 1 in the input voxel
276+
vector, ``v0 == rzs.dot((0, 0, 0)) + trans``, and ``v1_ax1 == rzs.dot((1,
277+
0, 0)) + trans``, and the difference vector is ``rzs.dot((0, 0, 0)) -
278+
rzs.dot((1, 0, 0)) == rzs.dot((1, 0, 0)) == rzs[:, 0]``. The distance
279+
vectors in world coordinates between (0, 0, 0) and (1, 0, 0), (0, 1, 0),
280+
(0, 0, 1) are given by ``rzs.dot(np.eye(3)) = rzs``. The voxel sizes are
281+
the Euclidean lengths of the distance vectors. So, the voxel sizes are
282+
the Euclidean lengths of the columns of the affine (excluding the last row
283+
and column of the affine).
284+
285+
Parameters
286+
----------
287+
affine : 2D array-like
288+
Affine transformation array. Usually shape (4, 4), but can be any 2D
289+
array.
290+
291+
Returns
292+
-------
293+
vox_sizes : 1D array
294+
Voxel sizes for each input axis of affine. Usually 1D array length 3,
295+
but in general has length (N-1) where input `affine` is shape (M, N).
296+
"""
297+
top_left = affine[:-1, :-1]
298+
return np.sqrt(np.sum(top_left ** 2, axis=0))

nibabel/freesurfer/io.py

Lines changed: 42 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77

88
from .. externals.six.moves import xrange
9+
from ..openers import Opener
910

1011

1112
def _fread3(fobj):
@@ -160,23 +161,50 @@ def read_morph_data(filepath):
160161
return curv
161162

162163

163-
def write_morph_data(filename, values):
164-
'''
165-
'''
166-
with open(filename, 'wb') as f:
164+
def write_morph_data(file_like, values, fnum=0):
165+
"""Write Freesurfer morphometry data `values` to file-like `file_like`
167166
168-
# magic number
169-
np.array([255], dtype='>u1').tofile(f)
170-
np.array([255], dtype='>u1').tofile(f)
171-
np.array([255], dtype='>u1').tofile(f)
167+
Equivalent to FreeSurfer's `write_curv.m`_
172168
173-
# vertices number and two un-used int4
174-
np.array([len(values)], dtype='>i4').tofile(f)
175-
np.array([0], dtype='>i4').tofile(f)
176-
np.array([1], dtype='>i4').tofile(f)
169+
See also:
170+
http://www.grahamwideman.com/gw/brain/fs/surfacefileformats.htm#CurvNew
177171
178-
# now the data
179-
np.array(values, dtype='>f4').tofile(f)
172+
.. _write_curv.m: \
173+
https://github.com/neurodebian/freesurfer/blob/debian-sloppy/matlab/write_curv.m
174+
175+
Parameters
176+
----------
177+
file_like : file-like
178+
String containing path of file to be written, or file-like object, open
179+
in binary write (`'wb'` mode, implementing the `write` method)
180+
values : array-like
181+
Surface morphometry values
182+
183+
Shape must be (N,), (N, 1), (1, N) or (N, 1, 1)
184+
fnum : int, optional
185+
Number of faces in the associated surface
186+
"""
187+
magic_bytes = np.array([255, 255, 255], dtype=np.uint8)
188+
189+
vector = np.asarray(values)
190+
vnum = np.prod(vector.shape)
191+
if vector.shape not in ((vnum,), (vnum, 1), (1, vnum), (vnum, 1, 1)):
192+
raise ValueError("Invalid shape: argument values must be a vector")
193+
194+
i4info = np.iinfo('i4')
195+
if vnum > i4info.max:
196+
raise ValueError("Too many values for morphometry file")
197+
if not i4info.min <= fnum <= i4info.max:
198+
raise ValueError("Argument fnum must be between {0} and {1}".format(
199+
i4info.min, i4info.max))
200+
201+
with Opener(file_like, 'wb') as fobj:
202+
fobj.write(magic_bytes)
203+
204+
# vertex count, face count (unused), vals per vertex (only 1 supported)
205+
fobj.write(np.array([vnum, fnum, 1], dtype='>i4'))
206+
207+
fobj.write(vector.astype('>f4'))
180208

181209

182210
def read_annot(filepath, orig_ids=False):

nibabel/freesurfer/tests/test_io.py

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,13 @@
1010

1111
from nose.tools import assert_true
1212
import numpy as np
13-
from numpy.testing import assert_equal, dec
13+
from numpy.testing import assert_equal, assert_raises, dec
1414

1515
from .. import (read_geometry, read_morph_data, read_annot, read_label,
16-
write_geometry, write_annot)
16+
write_geometry, write_morph_data, write_annot)
1717

1818
from ...tests.nibabel_data import get_nibabel_data
19+
from ...fileslice import strided_scalar
1920

2021

2122
DATA_SDIR = 'fsaverage'
@@ -92,6 +93,33 @@ def test_morph_data():
9293
curv = read_morph_data(curv_path)
9394
assert_true(-1.0 < curv.min() < 0)
9495
assert_true(0 < curv.max() < 1.0)
96+
with InTemporaryDirectory():
97+
new_path = 'test'
98+
write_morph_data(new_path, curv)
99+
curv2 = read_morph_data(new_path)
100+
assert_equal(curv2, curv)
101+
102+
103+
def test_write_morph_data():
104+
"""Test write_morph_data edge cases"""
105+
values = np.arange(20, dtype='>f4')
106+
okay_shapes = [(20,), (20, 1), (20, 1, 1), (1, 20)]
107+
bad_shapes = [(10, 2), (1, 1, 20, 1, 1)]
108+
big_num = np.iinfo('i4').max + 1
109+
with InTemporaryDirectory():
110+
for shape in okay_shapes:
111+
write_morph_data('test.curv', values.reshape(shape))
112+
# Check ordering is preserved, regardless of shape
113+
assert_equal(values, read_morph_data('test.curv'))
114+
assert_raises(ValueError, write_morph_data, 'test.curv',
115+
np.zeros(shape), big_num)
116+
# Windows 32-bit overflows Python int
117+
if np.dtype(np.int) != np.dtype(np.int32):
118+
assert_raises(ValueError, write_morph_data, 'test.curv',
119+
strided_scalar((big_num,)))
120+
for shape in bad_shapes:
121+
assert_raises(ValueError, write_morph_data, 'test.curv',
122+
values.reshape(shape))
95123

96124

97125
@freesurfer_test

0 commit comments

Comments
 (0)