Skip to content

Commit 99f4a53

Browse files
author
bpinsard
committed
Merge branch 'master' into fsl_model
* master: (53 commits) Added a test for SPM bool conversion. Call super for all SPM _format_args() call super Make all Booleans to be formated as 1,0 in SPM updated changelog fix: added test for deepcopy bug fix: added back deepcopy fix: thanks to @rkern we now have a passing test with dynamic traits retaining their type through pickling fix: test that fails when traitedspec is not pickled properly updated changelog Added a default to maintain backwards compatibility utils.Merge option for not flattening the output list utils.Merge option for not flattening the output list utils.Merge option for not flattening the output list added mention about the new interface to the changelog Added tests stub files Make check-before-commit Significantly faster smoothing in tessellation_tutorial with MeshFix, CFF output option Interface for MeshFix by Marco Attene, Mirko Windhoff, Axel Thielscher Fixed deprecated config option. ...
2 parents 0dfcd56 + 993deb3 commit 99f4a53

File tree

38 files changed

+620
-206
lines changed

38 files changed

+620
-206
lines changed

.travis.yml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
# vim ft=yaml
2+
# travis-ci.org definition for nipy build
3+
#
4+
# We pretend to be erlang because we need can't use the python support in
5+
# travis-ci; it uses virtualenvs, they do not have numpy, scipy, matplotlib,
6+
# and it is impractical to build them
7+
language: erlang
8+
env:
9+
- PYTHON=python PYSUF=''
10+
# - PYTHON=python3 PYSUF=3 : python3-numpy not currently available
11+
install:
12+
- sudo apt-get install $PYTHON-dev
13+
- sudo apt-get install $PYTHON-numpy
14+
- sudo apt-get install $PYTHON-scipy
15+
- sudo apt-get install $PYTHON-networkx
16+
- sudo apt-get install $PYTHON-traits
17+
- sudo apt-get install $PYTHON-setuptools
18+
- sudo easy_install$PYSUF nibabel # Latest pypi
19+
- sudo apt-get install $PYTHON-nose
20+
script:
21+
# Change into an innocuous directory and find tests from installation
22+
- make test

CHANGES

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1+
Next release
2+
============
3+
4+
*ENH: New interfaces: ICC, Meshfix
5+
*ENH: added no_flatten option to Merge
6+
7+
*FIX: fixed dynamic traits bug
8+
19
Release 0.6.0 (Jun 30, 2012)
210
============================
311

Makefile

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,8 @@
55
PYTHON ?= python
66
NOSETESTS ?= nosetests
77

8-
zipdoc:
9-
@echo "Clean documentation directory."
10-
python setup.py clean
11-
@echo "Build documentation.zip..."
12-
python setup.py build_sphinx
13-
@echo "Clean documentation directory."
14-
python setup.py clean
8+
zipdoc: html
9+
zip documentation.zip doc/_build/html
1510

1611
sdist: zipdoc
1712
@echo "Building source distribution..."
@@ -75,6 +70,3 @@ check-before-commit: trailing-spaces html test
7570
@echo "removed spaces"
7671
@echo "built docs"
7772
@echo "ran test"
78-
79-
80-

build_docs.py

Lines changed: 58 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,6 @@
1818
from distutils.cmd import Command
1919
from distutils.command.clean import clean
2020

21-
# Sphinx import.
22-
from sphinx.setup_command import BuildDoc
23-
2421
_info_fname = pjoin(os.path.dirname(__file__), 'nipype', 'info.py')
2522
INFO_VARS = {}
2623
exec(open(_info_fname, 'rt').read(), {}, INFO_VARS)
@@ -104,61 +101,66 @@ def relative_path(filename):
104101

105102
################################################################################
106103
# Distutils Command class build the docs
107-
class MyBuildDoc(BuildDoc):
108-
""" Sub-class the standard sphinx documentation building system, to
109-
add logics for API generation and matplotlib's plot directive.
110-
"""
111-
112-
def run(self):
113-
self.run_command('api_docs')
114-
# We need to be in the doc directory for to plot_directive
115-
# and API generation to work
116-
"""
117-
os.chdir('doc')
118-
try:
119-
BuildDoc.run(self)
120-
finally:
121-
os.chdir('..')
122-
"""
123-
# It put's the build in a doc/doc/_build directory with the
124-
# above?!?! I'm leaving the code above here but commented out
125-
# in case I'm missing something?
126-
BuildDoc.run(self)
127-
self.zip_docs()
128-
129-
def zip_docs(self):
130-
if not os.path.exists(DOC_BUILD_DIR):
131-
raise OSError, 'Doc directory does not exist.'
132-
target_file = os.path.join('doc', 'documentation.zip')
133-
# ZIP_DEFLATED actually compresses the archive. However, there
134-
# will be a RuntimeError if zlib is not installed, so we check
135-
# for it. ZIP_STORED produces an uncompressed zip, but does not
136-
# require zlib.
137-
try:
138-
zf = zipfile.ZipFile(target_file, 'w',
139-
compression=zipfile.ZIP_DEFLATED)
140-
except RuntimeError:
141-
warnings.warn('zlib not installed, storing the docs '
142-
'without compression')
143-
zf = zipfile.ZipFile(target_file, 'w',
144-
compression=zipfile.ZIP_STORED)
145-
146-
for root, dirs, files in os.walk(DOC_BUILD_DIR):
147-
relative = relative_path(root)
148-
if not relative.startswith('.doctrees'):
149-
for f in files:
150-
zf.write(os.path.join(root, f),
151-
os.path.join(relative, 'html_docs', f))
152-
zf.close()
153-
154-
155-
def finalize_options(self):
156-
""" Override the default for the documentation build
157-
directory.
104+
# Sphinx import.
105+
try:
106+
from sphinx.setup_command import BuildDoc
107+
except:
108+
MyBuildDoc = None
109+
else:
110+
class MyBuildDoc(BuildDoc):
111+
""" Sub-class the standard sphinx documentation building system, to
112+
add logics for API generation and matplotlib's plot directive.
158113
"""
159-
self.build_dir = os.path.join(*DOC_BUILD_DIR.split(os.sep)[:-1])
160-
BuildDoc.finalize_options(self)
161114

115+
def run(self):
116+
self.run_command('api_docs')
117+
# We need to be in the doc directory for to plot_directive
118+
# and API generation to work
119+
"""
120+
os.chdir('doc')
121+
try:
122+
BuildDoc.run(self)
123+
finally:
124+
os.chdir('..')
125+
"""
126+
# It put's the build in a doc/doc/_build directory with the
127+
# above?!?! I'm leaving the code above here but commented out
128+
# in case I'm missing something?
129+
BuildDoc.run(self)
130+
self.zip_docs()
131+
132+
def zip_docs(self):
133+
if not os.path.exists(DOC_BUILD_DIR):
134+
raise OSError, 'Doc directory does not exist.'
135+
target_file = os.path.join('doc', 'documentation.zip')
136+
# ZIP_DEFLATED actually compresses the archive. However, there
137+
# will be a RuntimeError if zlib is not installed, so we check
138+
# for it. ZIP_STORED produces an uncompressed zip, but does not
139+
# require zlib.
140+
try:
141+
zf = zipfile.ZipFile(target_file, 'w',
142+
compression=zipfile.ZIP_DEFLATED)
143+
except RuntimeError:
144+
warnings.warn('zlib not installed, storing the docs '
145+
'without compression')
146+
zf = zipfile.ZipFile(target_file, 'w',
147+
compression=zipfile.ZIP_STORED)
148+
149+
for root, dirs, files in os.walk(DOC_BUILD_DIR):
150+
relative = relative_path(root)
151+
if not relative.startswith('.doctrees'):
152+
for f in files:
153+
zf.write(os.path.join(root, f),
154+
os.path.join(relative, 'html_docs', f))
155+
zf.close()
156+
157+
158+
def finalize_options(self):
159+
""" Override the default for the documentation build
160+
directory.
161+
"""
162+
self.build_dir = os.path.join(*DOC_BUILD_DIR.split(os.sep)[:-1])
163+
BuildDoc.finalize_options(self)
162164

163165
################################################################################
164166
# Distutils Command class to clean

examples/fmri_fsl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,7 @@ def subjectinfo(subject_id):
612612

613613
l1pipeline = pe.Workflow(name= "level1")
614614
l1pipeline.base_dir = os.path.abspath('./fsl/workingdir')
615-
l1pipeline.config = dict(crashdump_dir=os.path.abspath('./fsl/crashdumps'))
615+
l1pipeline.config = {"execution": {"crashdump_dir":os.path.abspath('./fsl/crashdumps')}}
616616

617617
l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
618618
(infosource, firstlevel, [(('subject_id', subjectinfo), 'modelfit.modelspec.subject_info')]),

examples/tessellation_tutorial.py

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
Import the necessary modules and workflow from nipype.
3636
"""
3737
import nipype.pipeline.engine as pe # pypeline engine
38+
import nipype.interfaces.cmtk as cmtk
3839
import nipype.interfaces.io as nio # Data i/o
3940
import os, os.path as op
4041
from nipype.workflows.smri.freesurfer import create_tessellation_flow
@@ -56,13 +57,27 @@
5657
======
5758
5859
Create the tessellation workflow and set inputs
60+
Here we will choose Gifti (gii) as the output format, because
61+
we want to able to view the surface in ConnectomeViewer.
62+
63+
In you intend to view the meshes in gmsh or Blender, you should change
64+
the workflow creation to use stereolithographic (stl) format.
5965
"""
6066

61-
tessflow = create_tessellation_flow(name='tessflow')
67+
tessflow = create_tessellation_flow(name='tessflow', out_format='gii')
6268
tessflow.inputs.inputspec.subject_id = 'fsaverage'
6369
tessflow.inputs.inputspec.subjects_dir = subjects_dir
6470
tessflow.inputs.inputspec.lookup_file = lookup_file
6571

72+
"""
73+
We also create a conditional node to package the surfaces for ConnectomeViewer.
74+
Simply set cff to "False" to ignore this step.
75+
"""
76+
77+
cff = True
78+
if cff:
79+
cff = pe.Node(interface=cmtk.CFFConverter(), name='cff')
80+
cff.inputs.out_file = 'Meshes.cff'
6681
"""
6782
Outputs
6883
=======
@@ -85,4 +100,14 @@
85100
tesspipe = pe.Workflow(name='tessellate_tutorial')
86101
tesspipe.base_dir = output_dir
87102
tesspipe.connect([(tessflow, datasink,[('outputspec.meshes', '@meshes.all')])])
103+
104+
"""
105+
If the surfaces are to be packaged, this will connect the CFFConverter
106+
node to the tessellation and smoothing workflow, as well as to the datasink.
107+
"""
108+
109+
if cff:
110+
tesspipe.connect([(tessflow, cff,[('outputspec.meshes', 'gifti_surfaces')])])
111+
tesspipe.connect([(cff, datasink,[('connectome_file', '@cff')])])
112+
88113
tesspipe.run()

nipype/algorithms/icc.py

Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
from numpy import ones, kron, mean, eye, hstack, dot, tile
2+
from scipy.linalg import pinv
3+
from ..interfaces.base import BaseInterfaceInputSpec, TraitedSpec, \
4+
BaseInterface, traits, File
5+
import nibabel as nb
6+
import numpy as np
7+
import os
8+
9+
10+
class ICCInputSpec(BaseInterfaceInputSpec):
11+
subjects_sessions = traits.List(traits.List(File(exists=True)),
12+
desc="n subjects m sessions 3D stat files",
13+
mandatory=True)
14+
mask = File(exists=True, mandatory=True)
15+
16+
17+
class ICCOutputSpec(TraitedSpec):
18+
icc_map = File(exists=True)
19+
session_var_map = File(exists=True, desc="variance between sessions")
20+
subject_var_map = File(exists=True, desc="variance between subjects")
21+
22+
23+
class ICC(BaseInterface):
24+
'''
25+
Calculates Interclass Correlation Coefficient (3,1) as defined in
26+
P. E. Shrout & Joseph L. Fleiss (1979). "Intraclass Correlations: Uses in
27+
Assessing Rater Reliability". Psychological Bulletin 86 (2): 420-428. This
28+
particular implementation is aimed at relaibility (test-retest) studies.
29+
'''
30+
input_spec = ICCInputSpec
31+
output_spec = ICCOutputSpec
32+
33+
def _run_interface(self, runtime):
34+
maskdata = nb.load(self.inputs.mask).get_data()
35+
maskdata = np.logical_not(np.logical_or(maskdata == 0, np.isnan(maskdata)))
36+
37+
session_datas = [[nb.load(fname).get_data()[maskdata].reshape(-1, 1) for fname in sessions] for sessions in self.inputs.subjects_sessions]
38+
list_of_sessions = [np.dstack(session_data) for session_data in session_datas]
39+
all_data = np.hstack(list_of_sessions)
40+
icc = np.zeros(session_datas[0][0].shape)
41+
session_F = np.zeros(session_datas[0][0].shape)
42+
session_var = np.zeros(session_datas[0][0].shape)
43+
subject_var = np.zeros(session_datas[0][0].shape)
44+
45+
for x in range(icc.shape[0]):
46+
Y = all_data[x, :, :]
47+
icc[x], subject_var[x], session_var[x], session_F[x], _, _ = ICC_rep_anova(Y)
48+
49+
nim = nb.load(self.inputs.subjects_sessions[0][0])
50+
new_data = np.zeros(nim.get_shape())
51+
new_data[maskdata] = icc.reshape(-1,)
52+
new_img = nb.Nifti1Image(new_data, nim.get_affine(), nim.get_header())
53+
nb.save(new_img, 'icc_map.nii')
54+
55+
new_data = np.zeros(nim.get_shape())
56+
new_data[maskdata] = session_var.reshape(-1,)
57+
new_img = nb.Nifti1Image(new_data, nim.get_affine(), nim.get_header())
58+
nb.save(new_img, 'session_var_map.nii')
59+
60+
new_data = np.zeros(nim.get_shape())
61+
new_data[maskdata] = subject_var.reshape(-1,)
62+
new_img = nb.Nifti1Image(new_data, nim.get_affine(), nim.get_header())
63+
nb.save(new_img, 'subject_var_map.nii')
64+
65+
return runtime
66+
67+
def _list_outputs(self):
68+
outputs = self._outputs().get()
69+
outputs['icc_map'] = os.path.abspath('icc_map.nii')
70+
outputs['sessions_F_map'] = os.path.abspath('sessions_F_map.nii')
71+
outputs['session_var_map'] = os.path.abspath('session_var_map.nii')
72+
outputs['subject_var_map'] = os.path.abspath('subject_var_map.nii')
73+
return outputs
74+
75+
76+
def ICC_rep_anova(Y):
77+
'''
78+
the data Y are entered as a 'table' ie subjects are in rows and repeated
79+
measures in columns
80+
81+
--------------------------------------------------------------------------
82+
One Sample Repeated measure ANOVA
83+
Y = XB + E with X = [FaTor / Subjects]
84+
--------------------------------------------------------------------------
85+
'''
86+
87+
[nb_subjects, nb_conditions] = Y.shape
88+
dfc = nb_conditions - 1
89+
dfe = (nb_subjects - 1) * dfc
90+
dfr = nb_subjects - 1
91+
92+
# Compute the repeated measure effect
93+
# ------------------------------------
94+
95+
# Sum Square Total
96+
mean_Y = mean(Y)
97+
SST = ((Y - mean_Y) ** 2).sum()
98+
99+
# create the design matrix for the different levels
100+
x = kron(eye(nb_conditions), ones((nb_subjects, 1))) # sessions
101+
x0 = tile(eye(nb_subjects), (nb_conditions, 1)) # subjects
102+
X = hstack([x, x0])
103+
104+
# Sum Square Error
105+
predicted_Y = dot(dot(dot(X, pinv(dot(X.T, X))), X.T), Y.flatten('F'))
106+
residuals = Y.flatten('F') - predicted_Y
107+
SSE = (residuals ** 2).sum()
108+
109+
residuals.shape = Y.shape
110+
111+
MSE = SSE / dfe
112+
113+
# Sum square session effect - between colums/sessions
114+
SSC = ((mean(Y, 0) - mean_Y) ** 2).sum() * nb_subjects
115+
MSC = SSC / dfc / nb_subjects
116+
117+
session_effect_F = MSC / MSE
118+
119+
# Sum Square subject effect - between rows/subjects
120+
SSR = SST - SSC - SSE
121+
MSR = SSR / dfr
122+
123+
# ICC(3,1) = (mean square subjeT - mean square error) / (mean square subjeT + (k-1)*-mean square error)
124+
ICC = (MSR - MSE) / (MSR + dfc * MSE)
125+
126+
e_var = MSE #variance of error
127+
r_var = (MSR - MSE)/nb_conditions #variance between subjects
128+
129+
return ICC, r_var, e_var, session_effect_F, dfc, dfe

0 commit comments

Comments
 (0)