Skip to content

Commit f271565

Browse files
committed
fix resolved conflict
2 parents 05a75b9 + 0dad360 commit f271565

36 files changed

+855
-140
lines changed

CHANGES

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,11 @@
1-
Release 0.9.2 (January 31, 2014)
2-
===========
1+
Next Release
2+
============
33

44
* ENH: New ANTs interface: ApplyTransformsToPoints
5+
* FIX: MRTrix tracking algorithms were ignoring mask parameters.
6+
7+
Release 0.9.2 (January 31, 2014)
8+
============
59

610
* FIX: DataFinder was broken due to a typo
711
* FIX: Order of DataFinder outputs was not guaranteed, it's human sorted now

Vagrantfile

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,19 +20,19 @@ $script = <<SCRIPT
2020
# qconf -aattr queue slots "2, [neuro=3]" main.q
2121
2222
# install anaconda
23-
wget http://repo.continuum.io/miniconda/Miniconda-3.0.0-Linux-x86_64.sh -O miniconda.sh
23+
wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
2424
chmod +x miniconda.sh
2525
./miniconda.sh -b
2626
echo "export PATH=$HOME/miniconda/bin:\\$PATH" >> .bashrc
2727
2828
# install nipype dependencies
29-
$HOME/anaconda/bin/conda update --yes conda
30-
$HOME/anaconda/bin/conda install --yes pip numpy scipy nose traits networkx
31-
$HOME/anaconda/bin/conda install --yes dateutil ipython-notebook matplotlib
32-
$HOME/anaconda/bin/pip install nibabel --use-mirrors
33-
$HOME/anaconda/bin/pip install https://github.com/RDFLib/rdflib/archive/master.zip
34-
$HOME/anaconda/bin/pip install https://github.com/satra/prov/archive/enh/rdf.zip
35-
$HOME/anaconda/bin/pip install https://github.com/nipy/nipype/archive/master.zip
29+
$HOME/miniconda/bin/conda update --yes conda
30+
$HOME/miniconda/bin/conda install --yes pip numpy scipy nose traits networkx
31+
$HOME/miniconda/bin/conda install --yes dateutil ipython-notebook matplotlib
32+
$HOME/miniconda/bin/pip install nibabel --use-mirrors
33+
$HOME/miniconda/bin/pip install https://github.com/RDFLib/rdflib/archive/master.zip
34+
$HOME/miniconda/bin/pip install https://github.com/satra/prov/archive/enh/rdf.zip
35+
$HOME/miniconda/bin/pip install https://github.com/nipy/nipype/archive/master.zip
3636
SCRIPT
3737

3838
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
@@ -42,7 +42,7 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
4242
engine_config.vm.box = "gridneuro"
4343
#engine_config.vm.box_url = "http://files.vagrantup.com/precise64.box"
4444
engine_config.vm.box_url = "https://dl.dropboxusercontent.com/u/363467/precise64_neuro.box"
45-
engine_config.vm.network :forwarded_port, guest: 80, host: 8080
45+
#engine_config.vm.network :forwarded_port, guest: 80, host: 8080
4646

4747
#engine_config.vm.network :public_network, :bridge => 'en0: Wi-Fi (AirPort)'
4848
engine_config.vm.network :private_network, ip: "192.168.100.20"

nipype/info.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,8 @@ def get_nipype_gitversion():
104104
NUMPY_MIN_VERSION = '1.3'
105105
SCIPY_MIN_VERSION = '0.7'
106106
TRAITS_MIN_VERSION = '4.0'
107+
DATEUTIL_MIN_VERSION = '1.0'
108+
NOSE_MIN_VERSION = '1.0'
107109

108110
NAME = 'nipype'
109111
MAINTAINER = "nipype developers"
@@ -122,6 +124,7 @@ def get_nipype_gitversion():
122124
MICRO = _version_micro
123125
ISRELEASE = _version_extra == ''
124126
VERSION = __version__
125-
REQUIRES = ["nibabel (>=1.0)", "networkx (>=1.0)", "numpy (>=1.3)",
126-
"scipy (>=0.7)", "traits (>=4.0)"]
127+
REQUIRES = ["nibabel>=1.0", "networkx>=1.0", "numpy>=1.3",
128+
"python-dateutil>1.0", "scipy>=0.7", "traits>=4.0",
129+
"nose>=1.0"]
127130
STATUS = 'stable'

nipype/interfaces/base.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -731,6 +731,7 @@ def _get_trait_desc(self, inputs, name, spec):
731731
desc = spec.desc
732732
xor = spec.xor
733733
requires = spec.requires
734+
argstr = spec.argstr
734735

735736
manhelpstr = ['\t%s' % name]
736737

@@ -753,6 +754,17 @@ def _get_trait_desc(self, inputs, name, spec):
753754
initial_indent='\t\t',
754755
subsequent_indent='\t\t')
755756

757+
if argstr:
758+
pos = spec.position
759+
if pos is not None:
760+
manhelpstr += wrap('flag: %s, position: %s' % (argstr, pos), 70,
761+
initial_indent='\t\t',
762+
subsequent_indent='\t\t')
763+
else:
764+
manhelpstr += wrap('flag: %s' % argstr, 70,
765+
initial_indent='\t\t',
766+
subsequent_indent='\t\t')
767+
756768
if xor:
757769
line = '%s' % ', '.join(xor)
758770
manhelpstr += wrap(line, 70,
@@ -785,7 +797,7 @@ def _inputs_help(cls):
785797

786798
opthelpstr = ['', '\t[Optional]']
787799
for name, spec in sorted(inputs.traits(transient=None).items()):
788-
if spec in mandatory_items:
800+
if name in mandatory_items:
789801
continue
790802
opthelpstr += cls._get_trait_desc(inputs, name, spec)
791803

nipype/interfaces/camino/__init__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,12 @@
55

66
from .connectivity import Conmat
77
from .convert import (Image2Voxel, FSL2Scheme, VtkStreamlines, ProcStreamlines,
8-
TractShredder, DT2NIfTI, NIfTIDT2Camino, AnalyzeHeader)
8+
TractShredder, DT2NIfTI, NIfTIDT2Camino, AnalyzeHeader,
9+
Shredder)
910
from .dti import (DTIFit, ModelFit, DTLUTGen, PicoPDFs, Track, TrackPICo,
1011
TrackBayesDirac, TrackDT, TrackBallStick, TrackBootstrap,
1112
ComputeFractionalAnisotropy, ComputeMeanDiffusivity,
1213
ComputeTensorTrace, ComputeEigensystem, DTMetric)
1314
from .calib import (SFPICOCalibData, SFLUTGen)
14-
from .odf import (QBallMX, LinRecon, SFPeaks)
15+
from .odf import (QBallMX, LinRecon, SFPeaks, MESD)
16+
from .utils import ImageStats

nipype/interfaces/camino/convert.py

Lines changed: 88 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,12 @@
77
88
"""
99
import os
10+
import glob
1011

1112
from nipype.interfaces.base import (CommandLineInputSpec, CommandLine, traits,
1213
TraitedSpec, File, StdOutCommandLine,
13-
StdOutCommandLineInputSpec, isdefined)
14+
OutputMultiPath, StdOutCommandLineInputSpec,
15+
isdefined)
1416
from nipype.utils.filemanip import split_filename
1517

1618
class Image2VoxelInputSpec(StdOutCommandLineInputSpec):
@@ -226,16 +228,21 @@ class ProcStreamlinesInputSpec(StdOutCommandLineInputSpec):
226228
outputtracts = traits.Bool(argstr='-outputtracts', desc="Output streamlines in raw binary format.")
227229

228230
outputroot = File(exists=False, argstr='-outputroot %s',
229-
desc='root directory for output')
231+
desc='Prepended onto all output file names.')
230232

231233
gzip = traits.Bool(argstr='-gzip', desc="save the output image in gzip format")
232-
outputcp = traits.Bool(argstr='-outputcp', desc="output the connection probability map (Analyze image, float)")
233-
outputsc = traits.Bool(argstr='-outputsc', desc="output the connection probability map (raw streamlines, int)")
234-
outputacm = traits.Bool(argstr='-outputacm', desc="output all tracts in a single connection probability map (Analyze image)")
235-
outputcbs = traits.Bool(argstr='-outputcbs', desc="outputs connectivity-based segmentation maps; requires target outputfile")
234+
outputcp = traits.Bool(argstr='-outputcp', desc="output the connection probability map (Analyze image, float)",
235+
requires=['outputroot','seedfile'])
236+
outputsc = traits.Bool(argstr='-outputsc', desc="output the connection probability map (raw streamlines, int)",
237+
requires=['outputroot','seedfile'])
238+
outputacm = traits.Bool(argstr='-outputacm', desc="output all tracts in a single connection probability map (Analyze image)",
239+
requires=['outputroot','seedfile'])
240+
outputcbs = traits.Bool(argstr='-outputcbs', desc="outputs connectivity-based segmentation maps; requires target outputfile",
241+
requires=['outputroot','targetfile','seedfile'])
236242

237243
class ProcStreamlinesOutputSpec(TraitedSpec):
238244
proc = File(exists=True, desc='Processed Streamlines')
245+
outputroot_files = OutputMultiPath(File(exists=True))
239246

240247
class ProcStreamlines(StdOutCommandLine):
241248
"""
@@ -256,9 +263,33 @@ class ProcStreamlines(StdOutCommandLine):
256263
input_spec=ProcStreamlinesInputSpec
257264
output_spec=ProcStreamlinesOutputSpec
258265

266+
def _format_arg(self, name, spec, value):
267+
if name == 'outputroot':
268+
return spec.argstr % self._get_actual_outputroot(value)
269+
return super(ProcStreamlines, self)._format_arg(name, spec, value)
270+
271+
def _run_interface(self, runtime):
272+
outputroot = self.inputs.outputroot
273+
if isdefined(outputroot):
274+
actual_outputroot = self._get_actual_outputroot(outputroot)
275+
base, filename, ext = split_filename(actual_outputroot)
276+
if not os.path.exists(base):
277+
os.makedirs(base)
278+
new_runtime = super(ProcStreamlines, self)._run_interface(runtime)
279+
self.outputroot_files = glob.glob(os.path.join(os.getcwd(),actual_outputroot+'*'))
280+
return new_runtime
281+
else:
282+
new_runtime = super(ProcStreamlines, self)._run_interface(runtime)
283+
return new_runtime
284+
285+
def _get_actual_outputroot(self, outputroot):
286+
actual_outputroot = os.path.join('procstream_outfiles', outputroot)
287+
return actual_outputroot
288+
259289
def _list_outputs(self):
260290
outputs = self.output_spec().get()
261291
outputs['proc'] = os.path.abspath(self._gen_outfilename())
292+
outputs['outputroot_files'] = self.outputroot_files
262293
return outputs
263294

264295
def _gen_outfilename(self):
@@ -581,3 +612,54 @@ def _list_outputs(self):
581612
def _gen_outfilename(self):
582613
_, name , _ = split_filename(self.inputs.in_file)
583614
return name + ".hdr"
615+
616+
class ShredderInputSpec(StdOutCommandLineInputSpec):
617+
in_file = File(exists=True, argstr='< %s', mandatory=True, position=-2, desc='raw binary data file')
618+
619+
offset = traits.Int(argstr='%d', units='NA',
620+
desc='initial offset of offset bytes', position=1)
621+
622+
chunksize = traits.Int(argstr='%d', units='NA',
623+
desc='reads and outputs a chunk of chunksize bytes', position=2)
624+
625+
space = traits.Int(argstr='%d', units='NA',
626+
desc='skips space bytes', position=3)
627+
628+
class ShredderOutputSpec(TraitedSpec):
629+
shredded = File(exists=True, desc='Shredded binary data file')
630+
631+
class Shredder(StdOutCommandLine):
632+
"""
633+
Extracts periodic chunks from a data stream.
634+
635+
Shredder makes an initial offset of offset bytes. It then reads and outputs
636+
chunksize bytes, skips space bytes, and repeats until there is no more input.
637+
638+
If the chunksize is negative, chunks of size |chunksize| are read and the
639+
byte ordering of each chunk is reversed. The whole chunk will be reversed, so
640+
the chunk must be the same size as the data type, otherwise the order of the
641+
values in the chunk, as well as their endianness, will be reversed.
642+
643+
Examples
644+
--------
645+
646+
>>> import nipype.interfaces.camino as cam
647+
>>> shred = cam.Shredder()
648+
>>> shred.inputs.in_file = 'SubjectA.Bfloat'
649+
>>> shred.inputs.offset = 0
650+
>>> shred.inputs.chunksize = 1
651+
>>> shred.inputs.space = 2
652+
>>> shred.run() # doctest: +SKIP
653+
"""
654+
_cmd = 'shredder'
655+
input_spec=ShredderInputSpec
656+
output_spec=ShredderOutputSpec
657+
658+
def _list_outputs(self):
659+
outputs = self.output_spec().get()
660+
outputs['shredded_file'] = os.path.abspath(self._gen_outfilename())
661+
return outputs
662+
663+
def _gen_outfilename(self):
664+
_, name , _ = split_filename(self.inputs.in_file)
665+
return name + "_shredded"

nipype/interfaces/camino/odf.py

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,126 @@ def _gen_outfilename(self):
160160
_, name , _ = split_filename(self.inputs.scheme_file)
161161
return name + '_recondata.Bdouble'
162162

163+
class MESDInputSpec(StdOutCommandLineInputSpec):
164+
in_file = File(exists=True, argstr='-inputfile %s', mandatory=True, position=1,
165+
desc='voxel-order data filename')
166+
inverter = traits.Enum('SPIKE', 'PAS', argstr='-filter %s', position=2, mandatory=True,
167+
desc=('The inversion index specifies the type of inversion to perform on the data.'
168+
'The currently available choices are:'
169+
'Inverter name | Inverter parameters'
170+
'---------------|------------------'
171+
'SPIKE | bd (b-value x diffusivity along the fibre.)'
172+
'PAS | r'))
173+
inverter_param = traits.Float(argstr='%f', units='NA', position=3, mandatory=True,
174+
desc=('Parameter associated with the inverter. Cf. inverter description for'
175+
'more information.'))
176+
fastmesd = traits.Bool(argstr='-fastmesd', requires=['mepointset'],
177+
desc=('Turns off numerical integration checks and fixes the integration point set size at that of'
178+
'the index specified by -basepointset..'))
179+
mepointset = traits.Int(argstr='-mepointset %d', units='NA',
180+
desc=('Use a set of directions other than those in the scheme file for the deconvolution kernel.'
181+
'The number refers to the number of directions on the unit sphere. For example, '
182+
'"-mepointset 54" uses the directions in "camino/PointSets/Elec054.txt".'))
183+
scheme_file = File(exists=True, argstr='-schemefile %s', mandatory=True,
184+
desc='Specifies the scheme file for the diffusion MRI data')
185+
bgmask = File(exists=True, argstr='-bgmask %s', desc='background mask')
186+
inputdatatype = traits.Enum('float', 'char', 'short', 'int', 'long', 'double', argstr='-inputdatatype %s',
187+
desc=('Specifies the data type of the input file: "char", "short", "int", "long",'
188+
'"float" or "double". The input file must have BIG-ENDIAN ordering.'
189+
'By default, the input type is "float".'))
190+
191+
class MESDOutputSpec(TraitedSpec):
192+
mesd_data = File(exists=True, desc='MESD data')
193+
194+
class MESD(StdOutCommandLine):
195+
"""
196+
MESD is a general program for maximum entropy spherical deconvolution.
197+
It also runs PASMRI, which is a special case of spherical deconvolution.
198+
The input data must be in voxel order.
199+
200+
The format of the output in each voxel is:
201+
{ exitcode, ln(A^star(0)), lambda_0, lambda_1, ..., lambda_N }
202+
203+
The exitcode contains the results of three tests. The first test thresholds
204+
the maximum relative error between the numerical integrals computed at con-
205+
vergence and those computed using a larger test point set; if the error is
206+
greater than a threshold the exitcode is increased from zero to one as a
207+
warning; if it is greater than a larger threshold the exitcode is increased to
208+
two to suggest failure. The second test thresholds the predicted error in
209+
numerical integrals computed using the test point set; if the predicted error
210+
is greater than a threshold the exitcode is increased by 10. The third test
211+
thresholds the RMS error between the measurements and their predictions from
212+
the fitted deconvolution; if the errors are greater than a threshold, the exit
213+
code is increased by 100. An exitcode of 112 means that all three tests were
214+
failed and the result is likely to be unreliable. If all is well the exitcode
215+
is zero. Results are often still reliable even if one or two of the tests are
216+
failed.
217+
218+
Other possible exitcodes are:
219+
5 - The optimization failed to converge
220+
-1 - Background
221+
-100 - Something wrong in the MRI data, e.g. negative or zero measurements,
222+
so that the optimization could not run.
223+
224+
The standard MESD implementation is computationally demanding, particularly
225+
as the number of measurements increases (computation is approximately O(N^2),
226+
where N is the number of measurements). There are two ways to obtain significant
227+
computational speed-up:
228+
229+
i) Turn off error checks and use a small point set for computing numerical
230+
integrals in the algorithm by adding the flag -fastmesd. Sakaie CDMRI 2008
231+
shows that using the smallest point set (-basepointset 0) with no
232+
error checks usually has only a minor effect on the output of the algorithm,
233+
but provides a major reduction in computation time. You can increase the point
234+
set size using -basepointset with an argument higher than 0, which may produce
235+
better results in some voxels, but will increase computation time, which
236+
approximately doubles every time the point set index increases by 1.
237+
238+
ii) Reduce the complexity of the maximum entropy encoding using -mepointset <X>.
239+
By default <X> = N, the number of measurements, and is the number of parameters
240+
in the max. ent. representation of the output function, ie the number of
241+
lambda parameters, as described in Jansons and Alexander Inverse Problems 2003.
242+
However, we can represent the function using less components and <X> here
243+
specifies the number of lambda parameters. To obtain speed-up, set <X>
244+
< N; complexity become O(<X>^2) rather than O(N^2). Note that <X> must be chosen
245+
so that the camino/PointSets directory contains a point set with that number
246+
of elements. When -mepointset decreases, the numerical integration checks
247+
make less and less of a difference and smaller point sets for numerical
248+
integration (see -basepointset) become adequate. So when <X> is low -fastmesd is
249+
worth using to get even more speed-up.
250+
251+
The choice of <X> is a parameter of the technique. Too low and you lose angular
252+
resoloution; too high and you see no computational benefit and may even suffer
253+
from overfitting. Empirically, we have found that <X>=16 often gives good
254+
results and good speed up, but it is worth trying a few values a comparing
255+
performance. The reduced encoding is described in the following ISMRM abstract:
256+
Sweet and Alexander "Reduced Encoding Persistent Angular Structure" 572 ISMRM 2010.
257+
258+
Example
259+
---------
260+
Run MESD on every voxel of the data file SubjectA.Bfloat using the PASMRI kernel.
261+
262+
>>> import nipype.interfaces.camino as cam
263+
>>> mesd = cam.MESD()
264+
>>> mesd.inputs.in_file = 'SubjectA.Bfloat'
265+
>>> mesd.inputs.scheme_file = 'A.scheme'
266+
>>> mesd.inputs.inverter = 'PAS'
267+
>>> mesd.inputs.inverter_param = 1.4
268+
>>> mesd.run() # doctest: +SKIP
269+
"""
270+
_cmd = 'mesd'
271+
input_spec=MESDInputSpec
272+
output_spec=MESDOutputSpec
273+
274+
def _list_outputs(self):
275+
outputs = self.output_spec().get()
276+
outputs['mesd_data'] = os.path.abspath(self._gen_outfilename())
277+
return outputs
278+
279+
def _gen_outfilename(self):
280+
_, name , _ = split_filename(self.inputs.scheme_file)
281+
return name + '_MESD.Bdouble'
282+
163283
class SFPeaksInputSpec(StdOutCommandLineInputSpec):
164284
in_file = File(exists=True, argstr='-inputfile %s', mandatory=True,
165285
desc='Voxel-order data of spherical functions')

0 commit comments

Comments
 (0)