Skip to content

Commit b3afe29

Browse files
committed
RF: refactor parrec2nii image writing
Use new explicit scaling to save the image using the standard nibabel mechanism.
1 parent 39a37d5 commit b3afe29

File tree

1 file changed

+31
-44
lines changed

1 file changed

+31
-44
lines changed

bin/parrec2nii

Lines changed: 31 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,13 @@ from optparse import OptionParser, Option
77
import numpy as np
88
import sys
99
import os
10-
import gzip
1110
import nibabel
1211
import nibabel.parrec as pr
1312
from nibabel.parrec import one_line
1413
from nibabel.mriutils import calculate_dwell_time, MRIError
1514
import nibabel.nifti1 as nifti1
1615
from nibabel.filename_parser import splitext_addext
17-
from nibabel.volumeutils import array_to_file, fname_ext_ul_case
16+
from nibabel.volumeutils import fname_ext_ul_case
1817

1918

2019
def get_opt_parser():
@@ -142,27 +141,44 @@ def proc_file(infile, opts):
142141
permit_truncated=opts.permit_truncated,
143142
scaling=scaling)
144143
pr_hdr = pr_img.header
145-
raw_data = pr_img.dataobj.get_unscaled()
146144
affine = pr_hdr.get_affine(origin=opts.origin)
147-
nimg = nifti1.Nifti1Image(raw_data, affine, pr_hdr)
145+
slope, intercept = pr_hdr.get_data_scaling(scaling)
146+
if opts.scaling != 'off':
147+
verbose('Using data scaling "%s"' % opts.scaling)
148+
# get original scaling, and decide if we scale in-place or not
149+
if opts.scaling == 'off':
150+
slope = np.array([1.])
151+
intercept = np.array([0.])
152+
in_data = pr_img.dataobj.get_unscaled()
153+
out_dtype = pr_hdr.get_data_dtype()
154+
elif not np.any(np.diff(slope)) and not np.any(np.diff(intercept)):
155+
# Single scalefactor case
156+
slope = slope.ravel()[0]
157+
intercept = intercept.ravel()[0]
158+
in_data = pr_img.dataobj.get_unscaled()
159+
out_dtype = pr_hdr.get_data_dtype()
160+
else:
161+
# Multi scalefactor case
162+
slope = np.array([1.])
163+
intercept = np.array([0.])
164+
in_data = np.array(pr_img.dataobj)
165+
out_dtype = np.float64
166+
nimg = nifti1.Nifti1Image(in_data, affine, pr_hdr)
148167
nhdr = nimg.header
168+
nhdr.set_data_dtype(out_dtype)
169+
nhdr.set_slope_inter(slope, intercept)
149170

150171
if 'parse' in opts.minmax:
151172
# need to get the scaled data
152173
verbose('Loading (and scaling) the data to determine value range')
153-
if opts.scaling == 'off':
154-
scaled_data = raw_data
155-
else:
156-
slope, intercept = pr_hdr.get_data_scaling(method=opts.scaling)
157-
scaled_data = slope * raw_data + intercept
158174
if opts.minmax[0] == 'parse':
159-
nhdr.structarr['cal_min'] = scaled_data.min()
175+
nhdr['cal_min'] = in_data.min() * slope + intercept
160176
else:
161-
nhdr.structarr['cal_min'] = float(opts.minmax[0])
177+
nhdr['cal_min'] = float(opts.minmax[0])
162178
if opts.minmax[1] == 'parse':
163-
nhdr.structarr['cal_max'] = scaled_data.max()
179+
nhdr['cal_max'] = in_data.max() * slope + intercept
164180
else:
165-
nhdr.structarr['cal_max'] = float(opts.minmax[1])
181+
nhdr['cal_max'] = float(opts.minmax[1])
166182

167183
# container for potential NIfTI1 header extensions
168184
if opts.store_header:
@@ -172,39 +188,10 @@ def proc_file(infile, opts):
172188
hdr_dump = fobj.read()
173189
dump_ext = nifti1.Nifti1Extension('comment', hdr_dump)
174190
exts.append(dump_ext)
175-
# finalize the header: set proper data offset, pixdims, ...
176-
nimg.update_header()
177-
178-
# prep a file
179-
if opts.compressed:
180-
verbose('Using gzip compression')
181-
outfile = gzip.open(outfilename, 'wb')
182-
else:
183-
outfile = open(outfilename, 'wb')
184191

185-
# get original scaling, and decide if we scale in-place or not
186-
if opts.scaling == 'off':
187-
slope = np.array([1.])
188-
intercept = np.array([0.])
189-
else:
190-
verbose('Using data scaling "%s"' % opts.scaling)
191-
slope, intercept = pr_hdr.get_data_scaling(method=opts.scaling)
192192
verbose('Writing %s' % outfilename)
193-
if not np.any(np.diff(slope)) and not np.any(np.diff(intercept)):
194-
# Single scalefactor case
195-
nhdr.set_slope_inter(slope.ravel()[0], intercept.ravel()[0])
196-
data_obj = raw_data
197-
else:
198-
# Multi scalefactor case
199-
nhdr.set_slope_inter(1, 0)
200-
nhdr.set_data_dtype(np.float64)
201-
data_obj = pr_img.dataobj
202-
nhdr.write_to(outfile)
203-
offset = nhdr.get_data_offset()
204-
array_to_file(data_obj,
205-
outfile,
206-
out_dtype=nhdr.get_data_dtype(), # for endianness
207-
offset=offset)
193+
nibabel.save(nimg, outfilename)
194+
208195
# write out bvals/bvecs if requested
209196
if opts.bvs:
210197
bvals, bvecs = pr_hdr.get_bvals_bvecs()

0 commit comments

Comments
 (0)