Skip to content

Commit 915e6e8

Browse files
committed
Merge branch 'master' of git://github.com/nipy/nipype
2 parents 4b30e13 + 6b0fbaa commit 915e6e8

File tree

113 files changed

+11320
-394
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

113 files changed

+11320
-394
lines changed

.travis.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ before_install:
3434
echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg;
3535
fi
3636
install:
37+
- sudo apt-get install fusefat
3738
- conda update --yes conda
3839
- conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION
3940
- source activate testenv
@@ -56,7 +57,7 @@ install:
5657
- pip install -r requirements.txt # finish remaining requirements
5758
- python setup.py install
5859
script:
59-
- python -W once:FSL:UserWarning:nipype `which nosetests` --with-doctest --with-cov --cover-package nipype --cov-config .coveragerc --logging-level=INFO
60+
- python -W once:FSL:UserWarning:nipype `which nosetests` --with-doctest --with-cov --cover-package nipype --cov-config .coveragerc --logging-level=DEBUG --verbosity=3
6061
after_success:
6162
- coveralls --config_file .coveragerc
6263
deploy:

CHANGES

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
Next release
22
============
33

4+
* ENH: Created interface for BrainSuite Cortical Surface Extraction command line tools (https://github.com/nipy/nipype/pull/1305)
45
* FIX: job execution on systems/approaches where locale is undefined (https://github.com/nipy/nipype/pull/1401)
56
* FIX: Clean up byte/unicode issues using subprocess (https://github.com/nipy/nipype/pull/1394)
67
* FIX: Prevent crash when tvtk is loaded - ETS_TOOLKIT=null (https://github.com/nipy/nipype/pull/973)
@@ -24,6 +25,9 @@ Next release
2425
* ENH: New interfaces for interacting with AWS S3: S3DataSink and S3DataGrabber (https://github.com/nipy/nipype/pull/1201)
2526
* ENH: Interfaces for MINC tools (https://github.com/nipy/nipype/pull/1304)
2627
* FIX: Use realpath to determine hard link source (https://github.com/nipy/nipype/pull/1388)
28+
* FIX: Correct linking/copying fallback behavior (https://github.com/nipy/nipype/pull/1391)
29+
* ENH: Nipype workflow and interfaces for FreeSurfer's recon-all (https://github.com/nipy/nipype/pull/1326)
30+
* FIX: Permit relative path for concatenated_file input to Concatenate() (https://github.com/nipy/nipype/pull/1411)
2731

2832
Release 0.11.0 (September 15, 2015)
2933
============

CHANGES.orig

Lines changed: 538 additions & 0 deletions
Large diffs are not rendered by default.

README.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,10 @@ NIPYPE: Neuroimaging in Python: Pipelines and Interfaces
3434
:target: https://pypi.python.org/pypi/nipype/
3535
:alt: License
3636

37+
.. image:: https://img.shields.io/badge/gitter-join%20chat%20%E2%86%92-brightgreen.svg?style=flat
38+
:target: http://gitter.im/nipy/nipype
39+
:alt: Chat
40+
3741
Current neuroimaging software offer users an incredible opportunity to
3842
analyze data using a variety of different algorithms. However, this has
3943
resulted in a heterogeneous collection of specialized applications

Vagrantfile

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,19 +20,20 @@ $script = <<SCRIPT
2020
# qconf -aattr queue slots "2, [neuro=3]" main.q
2121
2222
# install anaconda
23-
wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
23+
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh
2424
chmod +x miniconda.sh
2525
./miniconda.sh -b
26-
echo "export PATH=$HOME/miniconda/bin:\\$PATH" >> .bashrc
26+
echo "export PATH=$HOME/miniconda3/bin:\\$PATH" >> .bashrc
2727
2828
# install nipype dependencies
29-
$HOME/miniconda/bin/conda update --yes conda
30-
$HOME/miniconda/bin/conda install --yes pip numpy scipy nose traits networkx
31-
$HOME/miniconda/bin/conda install --yes dateutil ipython-notebook matplotlib
32-
$HOME/miniconda/bin/pip install nibabel --use-mirrors
33-
$HOME/miniconda/bin/pip install https://github.com/RDFLib/rdflib/archive/master.zip
34-
$HOME/miniconda/bin/pip install https://github.com/trungdong/prov/archive/rdf.zip
35-
$HOME/miniconda/bin/pip install https://github.com/nipy/nipype/archive/master.zip
29+
$HOME/miniconda3/bin/conda update --yes conda
30+
$HOME/miniconda3/bin/conda install --yes pip scipy nose networkx lxml future simplejson
31+
$HOME/miniconda3/bin/conda install --yes python-dateutil jupyter matplotlib
32+
$HOME/miniconda3/bin/pip install nibabel
33+
$HOME/miniconda3/bin/pip install prov
34+
$HOME/miniconda3/bin/pip install xvfbwrapper
35+
$HOME/miniconda3/bin/pip install traits
36+
$HOME/miniconda3/bin/pip install https://github.com/nipy/nipype/archive/master.zip
3637
SCRIPT
3738

3839
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|

circle.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ dependencies:
1212
- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh)
1313
override:
1414
# Install apt packages
15-
- sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk xvfb
15+
- sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk xvfb fusefat
1616
- echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile
1717
- echo 'source /etc/afni/afni.sh' >> $HOME/.profile
1818
- mkdir -p ~/examples/ && ln -sf /usr/share/fsl-feeds/ ~/examples/feeds

doc/_templates/layout.html

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,12 @@
1414
ga('create', 'UA-339450-7', 'nipy.org/nipype');
1515
ga('send', 'pageview');
1616
</script>
17+
<script>
18+
((window.gitter = {}).chat = {}).options = {
19+
room: 'nipy/nipype'
20+
};
21+
</script>
22+
<script src="https://sidecar.gitter.im/dist/sidecar.v1.js" async defer></script>
1723
{% endblock %}
1824

1925
{% block header %}

examples/README

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
A dataset for use with these scripts can be downloaded from the nipype
22
website. At the time of writing, it's at:
33

4-
http://nipy.sourceforge.net/nipype/users/pipeline_tutorial.html
4+
http://nipy.org/nipype/users/pipeline_tutorial.html

examples/smri_fsreconall.py

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
#!/usr/bin/env python
2+
"""
3+
================
4+
sMRI: FSReconAll
5+
================
6+
7+
This script, smri_fsreconall.py, demonstrates the ability to use the
8+
reconall nipype workflow with a set of subjects and then make an average
9+
subject::
10+
11+
python smri_fsreconall.py
12+
13+
Import necessary modules from nipype.
14+
"""
15+
16+
import os
17+
18+
import nipype.pipeline.engine as pe
19+
import nipype.interfaces.io as nio
20+
from nipype.workflows.smri.freesurfer import create_reconall_workflow
21+
from nipype.interfaces.freesurfer.utils import MakeAverageSubject
22+
from nipype.interfaces.utility import IdentityInterface
23+
24+
"""
25+
Assign the tutorial directory
26+
"""
27+
28+
tutorial_dir = os.path.abspath('smri_fsreconall_tutorial')
29+
if not os.path.isdir(tutorial_dir):
30+
os.mkdir(tutorial_dir)
31+
32+
"""
33+
Define the workflow directories
34+
"""
35+
36+
subject_list = ['s1', 's3']
37+
data_dir = os.path.abspath('data')
38+
subjects_dir = os.path.join(tutorial_dir, 'subjects_dir')
39+
if not os.path.exists(subjects_dir):
40+
os.mkdir(subjects_dir)
41+
42+
wf = pe.Workflow(name="l1workflow")
43+
wf.base_dir = os.path.join(tutorial_dir, 'workdir')
44+
45+
"""
46+
Create inputspec
47+
"""
48+
49+
inputspec = pe.Node(interface=IdentityInterface(['subject_id']),
50+
name="inputspec")
51+
inputspec.iterables = ("subject_id", subject_list)
52+
53+
"""
54+
Grab data
55+
"""
56+
57+
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
58+
outfields=['struct']),
59+
name='datasource')
60+
datasource.inputs.base_directory = data_dir
61+
datasource.inputs.template = '%s/%s.nii'
62+
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
63+
datasource.inputs.subject_id = subject_list
64+
datasource.inputs.sort_filelist = True
65+
66+
wf.connect(inputspec, 'subject_id', datasource, 'subject_id')
67+
68+
"""
69+
Run recon-all
70+
"""
71+
72+
recon_all = create_reconall_workflow()
73+
recon_all.inputs.inputspec.subjects_dir = subjects_dir
74+
75+
wf.connect(datasource, 'struct', recon_all, 'inputspec.T1_files')
76+
wf.connect(inputspec, 'subject_id', recon_all, 'inputspec.subject_id')
77+
78+
"""
79+
Make average subject
80+
"""
81+
82+
average = pe.JoinNode(interface=MakeAverageSubject(),
83+
joinsource="inputspec",
84+
joinfield="subjects_ids",
85+
name="average")
86+
average.inputs.subjects_dir = subjects_dir
87+
88+
wf.connect(recon_all, 'postdatasink_outputspec.subject_id', average, 'subjects_ids')
89+
90+
wf.run("MultiProc", plugin_args={'n_procs': 4})

nipype/algorithms/tests/test_auto_Overlap.py

Lines changed: 0 additions & 47 deletions
This file was deleted.

0 commit comments

Comments
 (0)