Skip to content

Commit 4bd6ca2

Browse files
committed
new version of fix, complete rewrite
1 parent 0d56d0b commit 4bd6ca2

File tree

1 file changed

+243
-91
lines changed

1 file changed

+243
-91
lines changed

nipype/interfaces/fsl/fix.py

Lines changed: 243 additions & 91 deletions
Original file line numberDiff line numberDiff line change
@@ -1,146 +1,298 @@
11
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
22
# vi: set ft=python sts=4 ts=4 sw=4 et:
33
"""The fix module provides classes for interfacing with the `FSL FIX
4-
<http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FIX/index.html>`_ command line tools.
4+
<http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FIX/index.html>` command line tools.
5+
56
This was written to work with FSL version v5.0
6-
Change directory to provide relative paths for doctests
7-
>>> import os
8-
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
9-
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
10-
>>> os.chdir(datadir)
117
12-
Example Usage:
8+
The following example assumes that melodic has already been run, so
9+
the datagrabber is configured to start from there (a list of melodic
10+
output directories). If no hand_labels_noise.txt exists already, this
11+
will fail and comment on that.
12+
13+
EXAMPLE:
14+
subject_list = ['1','2','3']
15+
16+
fix_pipeline = pe.Workflow(name='fix_pipeline')
17+
fix_pipeline.base_dir = os.path.abspath('./')
18+
19+
info = dict(mel_ica=[['subject_id']])
1320
14-
def flatten(l):
15-
# turn 2D list into 1D
16-
l = sum(l, [])
17-
return(l)
21+
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'], outfields=['mel_ica']), name='datasource')
22+
datasource.inputs.base_directory = os.path.abspath('<path_to_base_directory>')
23+
datasource.inputs.template = '%s/<path_to_melodic_ica>'
24+
datasource.inputs.template_args = info
25+
datasource.inputs.subject_id = subject_list
26+
datasource.inputs.sort_filelist = True
27+
datasource.iterables = ('subject_id', subject_list)
1828
19-
# extract features
20-
extract_features = pe.MapNode(interface=fix.FeatureExtractor(), name='extract_features', iterfield=['mel_ica'])
21-
preproc.connect(feat, 'feat_dir', extract_features, 'mel_ica')
29+
# create training set by looking into which mel_icas have hand_labels_noise.txt files in them
30+
create_training_set = pe.JoinNode(interface=fix.TrainingSetCreator(), joinfield=['mel_icas_in'], joinsource='datasource', name='trainingset_creator')
2231
23-
# the next two nodes are simply for assembling a training set for the classifier. This looks for handlabeled noise txt files in all the specified feat_dirs
24-
training_input = pe.JoinNode(interface=util.IdentityInterface(fields=['mel_ica']), joinfield=['mel_ica'], joinsource='datasource', name='training_input')
25-
preproc.connect(extract_features, 'mel_ica', training_input, 'mel_ica')
32+
# train the classifier
33+
train_node = pe.Node(interface=fix.Training(trained_wts_filestem='foo'), name='train_node')
2634
27-
create_training_set = pe.Node(interface=fix.TrainingSetCreator(), name='trainingset_creator')
28-
preproc.connect(training_input, ('mel_ica', flatten), create_training_set, 'mel_icas_in')
35+
# test accuracy. Probably not necessary, and also failing on my setup because of fix itself (no error msg)
36+
accuracy_tester = pe.Node(interface=fix.AccuracyTester(output_directory='more_foo'), name='accuracy_tester')
2937
30-
# now train the classifier
31-
train_node = pe.Node(interface=fix.Training(trained_wts_filestem='core_shell_py'), name='train_node')
32-
preproc.connect(create_training_set, 'mel_icas_out', train_node, 'mel_icas')
38+
# classify components
39+
classify_node = pe.Node(interface=fix.Classifier(), name='classify')
3340
34-
# ask classifier to label ICA components as noise or signal
35-
classify_node = pe.MapNode(interface=fix.Classifier(thresh=5), name='classify', iterfield=['mel_ica'])
36-
preproc.connect(train_node, 'trained_wts_file', classify_node, 'trained_wts_file')
37-
preproc.connect(feat, 'feat_dir', classify_node, 'mel_ica')
41+
# apply cleanup
42+
cleaner_node = pe.Node(interface=fix.Cleaner(), name='cleaner')
3843
39-
# remove noise
40-
cleaner_node = pe.MapNode(interface=fix.Cleaner(cleanup_motion=True,), name='cleaner', iterfield=['artifacts_list_file'])
41-
preproc.connect(classify_node, 'artifacts_list_file', cleaner_node, 'artifacts_list_file')
44+
fix_pipeline.connect(datasource, 'mel_ica', create_training_set, 'mel_icas_in')
45+
fix_pipeline.connect(create_training_set, 'mel_icas_out', train_node, 'mel_icas')
46+
fix_pipeline.connect(train_node, 'trained_wts_file', accuracy_tester, 'trained_wts_file')
47+
fix_pipeline.connect(datasource, 'mel_ica', accuracy_tester, 'mel_icas')
48+
fix_pipeline.connect(train_node, 'trained_wts_file', classify_node, 'trained_wts_file')
49+
fix_pipeline.connect(datasource, 'mel_ica', classify_node, 'mel_ica')
50+
fix_pipeline.connect(classify_node, 'artifacts_list_file', cleaner_node, 'artifacts_list_file')
4251
43-
# extract mean func
44-
meanfunc = pe.MapNode(interface=fsl.ImageMaths(op_string = '-Tmean', suffix='_mean'), name='meanfunc', iterfield = ['in_file'])
45-
preproc.connect(cleaner_node, 'cleaned_functional_file', meanfunc, 'in_file')
52+
fix_pipeline.write_graph()
53+
outgraph = fix_pipeline.run()
4654
4755
"""
4856

4957
from nipype.interfaces.base import (
5058
TraitedSpec,
5159
CommandLineInputSpec,
5260
CommandLine,
53-
Directory,
5461
InputMultiPath,
55-
traits,
56-
File
62+
OutputMultiPath,
63+
BaseInterface,
64+
BaseInterfaceInputSpec,
65+
traits
66+
)
67+
from nipype.interfaces.traits_extension import (
68+
Directory,
69+
File,
70+
isdefined
5771
)
5872
import os
5973

60-
class FIXInputSpec(CommandLineInputSpec):
61-
mel_ica = InputMultiPath(Directory(exists=True), copyfile=False,
62-
desc='Melodic output directory or directories',
74+
class TrainingSetCreatorInputSpec(BaseInterfaceInputSpec):
75+
mel_icas_in = InputMultiPath(Directory(exists=True), copyfile=False,
76+
desc='Melodic output directories',
6377
argstr='%s', position=-1)
6478

65-
66-
# Different modes of operation, which are pretty much mutually exclusive
67-
_xor_inputs = ('extract_features', 'classify', 'apply_cleanup', 'train', 'test_accuracy')
79+
class TrainingSetCreatorOutputSpec(TraitedSpec):
80+
mel_icas_out = OutputMultiPath(Directory(exists=True), copyfile=False,
81+
desc='Hand labels for noise vs signal',
82+
argstr='%s', position=-1)
6883

69-
# /usr/local/fix/fix -f <mel.ica>
70-
extract_features = traits.Bool(desc='Extract features (for later training and/or classifying)',
71-
argstr='-f', xor=_xor_inputs, requires='mel_ica')
7284

73-
# /usr/local/fix/fix -c <mel.ica> <training.RData> <thresh>
74-
classify = traits.Bool(desc='Classify ICA components using a specific training dataset (<thresh> is in the range 0-100, typically 5-20)',
75-
argstr='-c', xor=_xor_inputs, requires='mel_ica')
85+
class TrainingSetCreator(BaseInterface):
86+
'''Goes through set of provided melodic output directories, to find all
87+
the ones that have a hand_labels_noise.txt file in them.
7688
77-
# /usr/local/fix/fix -a <mel.ica/fix4melview_TRAIN_thr.txt> [-m [-h <highpass>]] [-A] [-x <confound>] [-x <confound2>] etc.
78-
apply_cleanup = traits.Bool(desc='Apply cleanup, using artefacts listed in the .txt file',
79-
argstr='-a', xor=_xor_inputs, requires='artifacts_list_file') # todo, optional args, required inputs
89+
This is outsourced as a separate class, so that the pipeline is
90+
rerun everytime a handlabeled file has been changed, or a new one
91+
created.
8092
81-
train = traits.Bool(desc='Train the classifier based on your own FEAT/MELODIC output directory',
82-
argstr='-t %s', value="training", xor=_xor_inputs) # todo, optional args
93+
'''
94+
input_spec = TrainingSetCreatorInputSpec
95+
output_spec = TrainingSetCreatorOutputSpec
96+
_always_run = True
8397

84-
test_accuracy = traits.Bool(desc='Test the accuracy of an existing training dataset on a set of hand-labelled subjects',
85-
argstr='-C', xor=_xor_inputs)
98+
def _run_interface(self, runtime):
99+
mel_icas = []
100+
for item in self.inputs.mel_icas_in:
101+
if os.path.exists(os.path.join(item,'hand_labels_noise.txt')):
102+
mel_icas.append(item)
86103

104+
if len(mel_icas) == 0:
105+
raise Exception('%s did not find any hand_labels_noise.txt files in the following directories: %s' % (self.__class__.__name__, mel_icas))
87106

88-
# shared args for different modes
89-
artifacts_list_file = File(desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually', argstr='%s')
107+
return runtime
90108

91-
trained_wts_file = File(desc='trained-weights file', argstr='%s')
109+
def _list_outputs(self):
110+
mel_icas = []
111+
for item in self.inputs.mel_icas_in:
112+
if os.path.exists(os.path.join(item,'hand_labels_noise.txt')):
113+
mel_icas.append(item)
114+
outputs = self._outputs().get()
115+
outputs['mel_icas_out'] = mel_icas
116+
return outputs
92117

93118

94119

95120

96-
# leave-one-out cross validation
97-
loo = traits.Bool(argstr='-l', requires=['train'],
98-
desc='full leave-one-out test with classifier training')
99-
100-
# args for classify
121+
class FeatureExtractorInputSpec(CommandLineInputSpec):
122+
mel_ica = Directory(exists=True, copyfile=False, desc='Melodic output directory or directories',
123+
argstr='%s', position=-1)
101124

102-
highpass = traits.Float(argstr='-m -h %f', requires=['apply_cleanup'],
103-
desc='cleanup motion confounds', value=100, xor=_xor_cleanup)
104125

126+
class FeatureExtractorOutputSpec(TraitedSpec):
127+
mel_ica = Directory(exists=True, copyfile=False, desc='Melodic output directory or directories',
128+
argstr='%s', position=-1)
105129

106-
# for apply_cleanup
107130

108-
_xor_cleanup = ('cleanup_motion', 'highpass_filter')
131+
class FeatureExtractor(CommandLine):
132+
'''
133+
Extract features (for later training and/or classifying)
134+
'''
135+
input_spec = FeatureExtractorInputSpec
136+
output_spec = FeatureExtractorOutputSpec
137+
cmd = 'fix -f'
109138

110-
cleanup_motion = traits.Bool(argstr='-m', requires=['apply_cleanup'],
111-
desc='cleanup motion confounds, looks for design.fsf for highpass filter cut-off', xor=_xor_cleanup)
139+
def _list_outputs(self):
140+
outputs = self.output_spec().get()
141+
outputs['mel_ica'] = self.inputs.mel_ica
142+
return outputs
112143

113-
highpass = traits.Float(argstr='-m -h %f', requires=['apply_cleanup'],
114-
desc='cleanup motion confounds', value=100, xor=_xor_cleanup)
115144

116-
aggressive = traits.Bool(argstr='-A', requires=['apply_cleanup'],
117-
desc='Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.')
145+
class TrainingInputSpec(CommandLineInputSpec):
146+
mel_icas = InputMultiPath(Directory(exists=True), copyfile=False,
147+
desc='Melodic output directories',
148+
argstr='%s', position=-1)
118149

119-
confound_file = traits.File(argstr='-x %s', requires=['apply_cleanup'],
120-
desc='Include additional confound file.')
150+
trained_wts_filestem = traits.Str(desc='trained-weights filestem, used for trained_wts_file and output directories', argstr='%s', position=1)
121151

122-
confound_file_1 = traits.File(argstr='-x %s', requires=['apply_cleanup'],
123-
desc='Include additional confound file.')
152+
loo = traits.Bool(argstr='-l', desc='full leave-one-out test with classifier training', position=2)
124153

125-
confound_file_2 = traits.File(argstr='-x %s', requires=['apply_cleanup'],
126-
desc='Include additional confound file.')
127154

128-
155+
class TrainingOutputSpec(TraitedSpec):
156+
trained_wts_file = File(exists=True, desc='Trained-weights file')
157+
158+
159+
class Training(CommandLine):
160+
'''
161+
Train the classifier based on your own FEAT/MELODIC output directory.
162+
'''
163+
input_spec = TrainingInputSpec
164+
output_spec = TrainingOutputSpec
165+
cmd = 'fix -t'
166+
167+
def _list_outputs(self):
168+
outputs = self.output_spec().get()
169+
if isdefined(self.inputs.trained_wts_filestem):
170+
outputs['trained_wts_file'] = os.path.abspath(self.inputs.trained_wts_filestem + '.RData')
171+
else:
172+
outputs['trained_wts_file'] = os.path.abspath('trained_wts_file.RData')
173+
return outputs
174+
175+
176+
177+
178+
class AccuracyTesterInputSpec(CommandLineInputSpec):
179+
mel_icas = InputMultiPath(Directory(exists=True), copyfile=False,
180+
desc='Melodic output directories',
181+
argstr='%s', position=3, mandatory=True)
182+
183+
trained_wts_file = File(desc='trained-weights file', argstr='%s', position=1, mandatory=True)
184+
185+
output_directory = Directory(desc='Path to folder in which to store the results of the accuracy test.', argstr='%s', position=2, mandatory=True)
186+
187+
188+
class AccuracyTesterOutputSpec(TraitedSpec):
189+
output_directory = Directory(desc='Path to folder in which to store the results of the accuracy test.', argstr='%s', position=1)
190+
191+
192+
class AccuracyTester(CommandLine):
193+
'''
194+
Test the accuracy of an existing training dataset on a set of hand-labelled subjects.
195+
Note: This may or may not be working. Couldn't presently not confirm because fix fails on this (even outside of nipype) without leaving an error msg.
196+
'''
197+
input_spec = AccuracyTesterInputSpec
198+
output_spec = AccuracyTesterOutputSpec
199+
cmd = 'fix -C'
129200

130-
class FIXOutputSpec(TraitedSpec):
131-
output_file = File(desc = "Zip file", exists = True)
201+
def _list_outputs(self):
202+
outputs = self.output_spec().get()
203+
if isdefined(self.inputs.output_directory):
204+
outputs['output_directory'] = Directory(exists=False, value=self.inputs.output_directory)
205+
else:
206+
outputs['output_directory'] = Directory(exists=False, value='accuracy_test')
207+
return outputs
208+
209+
210+
211+
class ClassifierInputSpec(CommandLineInputSpec):
212+
mel_ica = Directory(exists=True, copyfile=False, desc='Melodic output directory or directories',
213+
argstr='%s', position=1)
214+
215+
trained_wts_file = File(exists=True, desc='trained-weights file', argstr='%s', position=2, mandatory=True, copyfile=False)
216+
217+
thresh = traits.Int(argstr='%d', desc='Threshold for cleanup.', position=-1, mandatory=True)
218+
219+
artifacts_list_file = File(desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually')
132220

133-
class FIX(CommandLine):
134-
input_spec = FIXInputSpec
135-
output_spec = FIXOutputSpec
136-
cmd = 'fix'
221+
class ClassifierOutputSpec(TraitedSpec):
222+
artifacts_list_file = File(desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually')
223+
224+
225+
class Classifier(CommandLine):
226+
'''
227+
Classify ICA components using a specific training dataset (<thresh> is in the range 0-100, typically 5-20).
228+
'''
229+
input_spec = ClassifierInputSpec
230+
output_spec = ClassifierOutputSpec
231+
cmd = 'fix -c'
232+
233+
def _gen_artifacts_list_file(self, mel_ica, thresh):
234+
235+
_, trained_wts_file = os.path.split(self.inputs.trained_wts_file)
236+
trained_wts_filestem = trained_wts_file.split('.')[0]
237+
filestem = 'fix4melview_' + trained_wts_filestem + '_thr'
238+
239+
fname = os.path.join(mel_ica, filestem + str(thresh) + '.txt')
240+
return fname
137241

138242
def _list_outputs(self):
139-
outputs = self.output_spec().get()
140-
return outputs
243+
outputs = self.output_spec().get()
244+
outputs['artifacts_list_file'] = self._gen_artifacts_list_file(self.inputs.mel_ica, self.inputs.thresh)
245+
246+
return outputs
247+
248+
141249

142-
if __name__ == '__main__':
143250

144-
fix = FIX()
145-
print fix.cmdline
146-
fix.run()
251+
class CleanerInputSpec(CommandLineInputSpec):
252+
artifacts_list_file = File(exists=True, argstr='%s', position=1, mandatory=True, desc='Text file listing which ICs are artifacts; can be the output from classification or can be created manually')
253+
254+
cleanup_motion = traits.Bool(argstr='-m',
255+
desc='cleanup motion confounds, looks for design.fsf for highpass filter cut-off',
256+
position=2)
257+
258+
highpass = traits.Float(argstr='-m -h %f',
259+
desc='cleanup motion confounds', value=100, position=2)
260+
261+
aggressive = traits.Bool(argstr='-A',
262+
desc='Apply aggressive (full variance) cleanup, instead of the default less-aggressive (unique variance) cleanup.', position=3)
263+
264+
confound_file = traits.File(argstr='-x %s',
265+
desc='Include additional confound file.', position=4)
266+
267+
confound_file_1 = traits.File(argstr='-x %s',
268+
desc='Include additional confound file.', position=5)
269+
270+
confound_file_2 = traits.File(argstr='-x %s',
271+
desc='Include additional confound file.', position=6)
272+
273+
274+
class CleanerOutputSpec(TraitedSpec):
275+
cleaned_functional_file = File(exists=True, desc='Cleaned session data')
276+
277+
278+
class Cleaner(CommandLine):
279+
'''
280+
Extract features (for later training and/or classifying)
281+
'''
282+
input_spec = CleanerInputSpec
283+
output_spec = CleanerOutputSpec
284+
cmd = 'fix -a'
285+
286+
def _get_cleaned_functional_filename(self, artifacts_list_filename):
287+
''' extract the proper filename from the first line of the artifacts file '''
288+
artifacts_list_file = open(artifacts_list_filename, 'r')
289+
functional_filename, extension = artifacts_list_file.readline().split('.')
290+
artifacts_list_file_path, artifacts_list_filename = os.path.split(artifacts_list_filename)
291+
292+
return(os.path.join(artifacts_list_file_path, functional_filename + '_clean.nii.gz'))
293+
294+
def _list_outputs(self):
295+
outputs = self.output_spec().get()
296+
outputs['cleaned_functional_file'] = self._get_cleaned_functional_filename(self.inputs.artifacts_list_file)
297+
return outputs
298+

0 commit comments

Comments
 (0)