Skip to content

Commit 6b308bf

Browse files
matthew-brettrmarkello
authored andcommitted
RF: some edits / doc extensions to brikhead.py
Building on PR #561.
1 parent 8c7e1eb commit 6b308bf

File tree

1 file changed

+87
-18
lines changed

1 file changed

+87
-18
lines changed

nibabel/brikhead.py

Lines changed: 87 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,13 @@
1212
See https://afni.nimh.nih.gov/pub/dist/doc/program_help/README.attributes.html
1313
for information on what is required to have a valid BRIK/HEAD dataset.
1414
15-
Some notes on the AFNI BRIK/HEAD format:
15+
Unless otherwise noted, descriptions AFNI attributes in the code refer to this
16+
document.
1617
17-
* In the AFNI HEAD file, the first two values of the attribute DATASET_RANK
18+
Notes
19+
-----
20+
21+
In the AFNI HEAD file, the first two values of the attribute DATASET_RANK
1822
determine the shape of the data array stored in the corresponding BRIK file.
1923
The first value, DATASET_RANK[0], must be set to 3 denoting a 3D image. The
2024
second value, DATASET_RANK[1], determines how many "sub-bricks" (in AFNI
@@ -152,6 +156,15 @@ def _get_datatype(info):
152156
-------
153157
dt : np.dtype
154158
Datatype of BRIK file associated with HEAD
159+
160+
Notes
161+
-----
162+
``BYTEORDER_STRING`` may be absent, signifying platform native byte order,
163+
or contain one of "LSB_FIRST" or "MSB_FIRST".
164+
165+
``BRICK_TYPES`` gives the storage data type for each sub-brick, with
166+
0=uint, 1=int16, 3=float32, 5=complex64 (see ``_dtype_dict``). This should
167+
generally be the same value for each sub-brick in the dataset.
155168
"""
156169
bo = info['BYTEORDER_STRING']
157170
bt = info['BRICK_TYPES']
@@ -196,17 +209,19 @@ def parse_AFNI_header(fobj):
196209
return parse_AFNI_header(src)
197210
# unpack variables in HEAD file
198211
head = fobj.read().split('\n\n')
199-
info = {key: value for key, value in map(_unpack_var, head)}
200-
return info
212+
return {key: value for key, value in map(_unpack_var, head)}
201213

202214

203215
class AFNIArrayProxy(ArrayProxy):
204-
"""
216+
""" Proxy object for AFNI image array.
217+
205218
Attributes
206219
----------
207220
scaling : np.ndarray
208-
Scaling factor (one factor per volume/subbrick) for data. Default: None
221+
Scaling factor (one factor per volume/sub-brick) for data. Default is
222+
None
209223
"""
224+
210225
@kw_only_meth(2)
211226
def __init__(self, file_like, header, mmap=True, keep_file_open=None):
212227
"""
@@ -233,7 +248,7 @@ def __init__(self, file_like, header, mmap=True, keep_file_open=None):
233248
a new file handle is created every time the image is accessed. If
234249
``'auto'``, and the optional ``indexed_gzip`` dependency is
235250
present, a single file handle is created and persisted. If
236-
``indexed_gzip`` is not available, behaviour is the same as if
251+
``indexed_gzip`` is not available, behavior is the same as if
237252
``keep_file_open is False``. If ``file_like`` refers to an open
238253
file handle, this setting has no effect. The default value
239254
(``None``) will result in the value of
@@ -319,6 +334,13 @@ def _calc_data_shape(self):
319334
Returns
320335
-------
321336
(x, y, z, t) : tuple of int
337+
338+
Notes
339+
-----
340+
``DATASET_RANK[0]`` gives number of spatial dimensions (and apparently
341+
must be 3). ``DATASET_RANK[1]`` gives the number of sub-bricks.
342+
``DATASET_DIMENSIONS`` is length 3, giving the number of voxels in i,
343+
j, k.
322344
"""
323345
dset_rank = self.info['DATASET_RANK']
324346
shape = tuple(self.info['DATASET_DIMENSIONS'][:dset_rank[0]])
@@ -335,6 +357,15 @@ def _calc_zooms(self):
335357
Returns
336358
-------
337359
zooms : tuple
360+
361+
Notes
362+
-----
363+
Gets zooms from attributes ``DELTA`` and ``TAXIS_FLOATS``.
364+
365+
``DELTA`` gives (x,y,z) voxel sizes.
366+
367+
``TAXIS_FLOATS`` should be length 5, with first entry giving "Time
368+
origin", and second giving "Time step (TR)".
338369
"""
339370
xyz_step = tuple(np.abs(self.info['DELTA']))
340371
t_step = self.info.get('TAXIS_FLOATS', (0, 0,))
@@ -344,12 +375,17 @@ def _calc_zooms(self):
344375

345376
def get_space(self):
346377
"""
347-
Returns space of dataset
378+
Return label for anatomical space to which this dataset is aligned.
348379
349380
Returns
350381
-------
351382
space : str
352383
AFNI "space" designation; one of [ORIG, ANAT, TLRC, MNI]
384+
385+
Notes
386+
-----
387+
There appears to be documentation for these spaces at
388+
https://afni.nimh.nih.gov/pub/dist/atlases/elsedemo/AFNI_atlas_spaces.niml
353389
"""
354390
listed_space = self.info.get('TEMPLATE_SPACE', 0)
355391
space = space_codes.space[listed_space]
@@ -369,8 +405,8 @@ def get_affine(self):
369405
[ 0. , 0. , 3. , -52.3511],
370406
[ 0. , 0. , 0. , 1. ]])
371407
"""
372-
# AFNI default is RAI-/DICOM order (i.e., RAI are - axis)
373-
# need to flip RA sign to align with nibabel RAS+ system
408+
# AFNI default is RAI- == LPS+ == DICOM order. We need to flip RA sign
409+
# to align with nibabel RAS+ system
374410
affine = np.asarray(self.info['IJK_TO_DICOM_REAL']).reshape(3, 4)
375411
affine = np.row_stack((affine * [[-1], [-1], [1]],
376412
[0, 0, 0, 1]))
@@ -387,6 +423,9 @@ def get_data_scaling(self):
387423
>>> header.get_data_scaling()
388424
array([ 3.88336300e-08])
389425
"""
426+
# BRICK_FLOAT_FACS has one value per sub-brick, such that the scaled
427+
# values for sub-brick array [n] are the values read from disk *
428+
# BRICK_FLOAT_FACS[n]
390429
floatfacs = self.info.get('BRICK_FLOAT_FACS', None)
391430
if floatfacs is None or not np.any(floatfacs):
392431
return None
@@ -405,7 +444,10 @@ def get_slope_inter(self):
405444
return None, None
406445

407446
def get_data_offset(self):
408-
"""Data offset in BRIK file"""
447+
"""Data offset in BRIK file
448+
449+
Offset is always 0.
450+
"""
409451
return DATA_OFFSET
410452

411453
def get_volume_labels(self):
@@ -461,7 +503,7 @@ class AFNIImage(SpatialImage):
461503

462504
@classmethod
463505
@kw_only_meth(1)
464-
def from_file_map(klass, file_map, mmap=True):
506+
def from_file_map(klass, file_map, mmap=True, keep_file_open=None):
465507
"""
466508
Creates an AFNIImage instance from `file_map`
467509
@@ -477,19 +519,32 @@ def from_file_map(klass, file_map, mmap=True):
477519
`mmap` value of True gives the same behavior as ``mmap='c'``. If
478520
image data file cannot be memory-mapped, ignore `mmap` value and
479521
read array from file.
522+
keep_file_open : {None, 'auto', True, False}, optional, keyword only
523+
`keep_file_open` controls whether a new file handle is created
524+
every time the image is accessed, or a single file handle is
525+
created and used for the lifetime of this ``ArrayProxy``. If
526+
``True``, a single file handle is created and used. If ``False``,
527+
a new file handle is created every time the image is accessed. If
528+
``'auto'``, and the optional ``indexed_gzip`` dependency is
529+
present, a single file handle is created and persisted. If
530+
``indexed_gzip`` is not available, behavior is the same as if
531+
``keep_file_open is False``. If ``file_like`` refers to an open
532+
file handle, this setting has no effect. The default value
533+
(``None``) will result in the value of
534+
``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used.
480535
"""
481536
with file_map['header'].get_prepare_fileobj('rt') as hdr_fobj:
482537
hdr = klass.header_class.from_fileobj(hdr_fobj)
483538
imgf = file_map['image'].fileobj
484-
if imgf is None:
485-
imgf = file_map['image'].filename
486-
data = klass.ImageArrayProxy(imgf, hdr.copy(), mmap=mmap)
539+
imgf = file_map['image'].filename if imgf is None else imgf
540+
data = klass.ImageArrayProxy(imgf, hdr.copy(), mmap=mmap,
541+
keep_file_open=keep_file_open)
487542
return klass(data, hdr.get_affine(), header=hdr, extra=None,
488543
file_map=file_map)
489544

490545
@classmethod
491546
@kw_only_meth(1)
492-
def from_filename(klass, filename, mmap=True):
547+
def from_filename(klass, filename, mmap=True, keep_file_open=None):
493548
"""
494549
Creates an AFNIImage instance from `filename`
495550
@@ -504,9 +559,23 @@ def from_filename(klass, filename, mmap=True):
504559
`mmap` value of True gives the same behavior as ``mmap='c'``. If
505560
image data file cannot be memory-mapped, ignore `mmap` value and
506561
read array from file.
562+
keep_file_open : {None, 'auto', True, False}, optional, keyword only
563+
`keep_file_open` controls whether a new file handle is created
564+
every time the image is accessed, or a single file handle is
565+
created and used for the lifetime of this ``ArrayProxy``. If
566+
``True``, a single file handle is created and used. If ``False``,
567+
a new file handle is created every time the image is accessed. If
568+
``'auto'``, and the optional ``indexed_gzip`` dependency is
569+
present, a single file handle is created and persisted. If
570+
``indexed_gzip`` is not available, behavior is the same as if
571+
``keep_file_open is False``. If ``file_like`` refers to an open
572+
file handle, this setting has no effect. The default value
573+
(``None``) will result in the value of
574+
``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT` being used.
507575
"""
508576
file_map = klass.filespec_to_file_map(filename)
509-
return klass.from_file_map(file_map, mmap=mmap)
577+
return klass.from_file_map(file_map, mmap=mmap,
578+
keep_file_open=keep_file_open)
510579

511580
@classmethod
512581
def filespec_to_file_map(klass, filespec):
@@ -539,7 +608,7 @@ def filespec_to_file_map(klass, filespec):
539608
image type.
540609
"""
541610
file_map = super(AFNIImage, klass).filespec_to_file_map(filespec)
542-
# check for AFNI-specific BRIK/HEAD compression idiosyncracies
611+
# check for AFNI-specific BRIK/HEAD compression idiosyncrasies
543612
for key, fholder in file_map.items():
544613
fname = fholder.filename
545614
if key == 'header' and not os.path.exists(fname):

0 commit comments

Comments
 (0)