Skip to content

Commit ff97f6a

Browse files
ShotgunosineDylan
authored andcommitted
[ENH] Add minimal 3dbucket interface
1 parent ea74b63 commit ff97f6a

File tree

2 files changed

+87
-1
lines changed

2 files changed

+87
-1
lines changed

nipype/interfaces/afni/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818
Seg, SkullStrip, TCorr1D, TCorrMap, TCorrelate,
1919
TShift, Volreg, Warp, QwarpPlusMinus, Qwarp)
2020
from .svm import (SVMTest, SVMTrain)
21-
from .utils import (AFNItoNIFTI, Autobox, Axialize, BrickStat, Calc, Cat, Copy,
21+
from .utils import (AFNItoNIFTI, Autobox, Axialize, BrickStat, Bucket,
22+
Calc, Cat, Copy,
2223
Edge3, Eval, FWHMx, MaskTool, Merge, Notes, NwarpApply,
2324
Refit, Resample, TCat, TStat, To3D, Unifize, ZCutUp, GCOR,
2425
Zcat, Zeropad)

nipype/interfaces/afni/utils.py

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -270,6 +270,91 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None):
270270

271271
return outputs
272272

273+
class BucketInputSpec(AFNICommandInputSpec):
274+
in_file = traits.List(
275+
traits.Tuple(
276+
(File(
277+
exists=True,
278+
desc='input file',
279+
copyfile=False),
280+
traits.Str(argstr="'%s'")),
281+
artstr="%s%s"),
282+
position=-1,
283+
mandatory=True,
284+
argstr="%s",
285+
desc='List of tuples of input datasets and subbrick selection strings'
286+
'as described in more detail in the following afni help string'
287+
'Input dataset specified using one of these forms:'
288+
' \'prefix+view\', \'prefix+view.HEAD\', or \'prefix+view.BRIK\'.'
289+
'You can also add a sub-brick selection list after the end of the'
290+
'dataset name. This allows only a subset of the sub-bricks to be'
291+
'included into the output (by default, all of the input dataset'
292+
'is copied into the output). A sub-brick selection list looks like'
293+
'one of the following forms:'
294+
' fred+orig[5] ==> use only sub-brick #5'
295+
' fred+orig[5,9,17] ==> use #5, #9, and #17'
296+
' fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8'
297+
' fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13'
298+
'Sub-brick indexes start at 0. You can use the character \'$\''
299+
'to indicate the last sub-brick in a dataset; for example, you'
300+
'can select every third sub-brick by using the selection list'
301+
' fred+orig[0..$(3)]'
302+
'N.B.: The sub-bricks are output in the order specified, which may'
303+
' not be the order in the original datasets. For example, using'
304+
' fred+orig[0..$(2),1..$(2)]'
305+
' will cause the sub-bricks in fred+orig to be output into the'
306+
' new dataset in an interleaved fashion. Using'
307+
' fred+orig[$..0]'
308+
' will reverse the order of the sub-bricks in the output.'
309+
'N.B.: Bucket datasets have multiple sub-bricks, but do NOT have'
310+
' a time dimension. You can input sub-bricks from a 3D+time dataset'
311+
' into a bucket dataset. You can use the \'3dinfo\' program to see'
312+
' how many sub-bricks a 3D+time or a bucket dataset contains.'
313+
'N.B.: In non-bucket functional datasets (like the \'fico\' datasets'
314+
' output by FIM, or the \'fitt\' datasets output by 3dttest), sub-brick'
315+
' [0] is the \'intensity\' and sub-brick [1] is the statistical parameter'
316+
' used as a threshold. Thus, to create a bucket dataset using the'
317+
' intensity from dataset A and the threshold from dataset B, and'
318+
' calling the output dataset C, you would type'
319+
' 3dbucket -prefix C -fbuc \'A+orig[0]\' -fbuc \'B+orig[1]\''
320+
'WARNING: using this program, it is possible to create a dataset that'
321+
' has different basic datum types for different sub-bricks'
322+
' (e.g., shorts for brick 0, floats for brick 1).'
323+
' Do NOT do this! Very few AFNI programs will work correctly'
324+
' with such datasets!')
325+
out_file = File(
326+
argstr='-prefix %s',
327+
name_template='buck')
328+
329+
330+
class Bucket(AFNICommand):
331+
"""Concatenate sub-bricks from input datasets into one big
332+
'bucket' dataset.
333+
334+
For complete details, see the `3dbucket Documentation.
335+
<https://afni.nimh.nih.gov/pub/dist/doc/program_help/3dbucket.html>`_
336+
337+
Examples
338+
========
339+
340+
>>> from nipype.interfaces import afni
341+
>>> bucket = afni.Bucket()
342+
>>> bucket.inputs.in_file = [('functional.nii',"{2..$}"), ('functional.nii',"{1}")]
343+
>>> bucket.inputs.out_file = 'vr_base'
344+
>>> bucket.cmdline # doctest: +ALLOW_UNICODE
345+
"3dbucket -prefix vr_base functional.nii'{2..$}' functional.nii'{1}'"
346+
>>> res = bucket.run() # doctest: +SKIP
347+
348+
"""
349+
350+
_cmd = '3dbucket'
351+
input_spec = BucketInputSpec
352+
output_spec = AFNICommandOutputSpec
353+
354+
def _format_arg(self, name, spec, value):
355+
if name == 'in_file':
356+
return spec.argstr%(' '.join([i[0]+"'"+i[1]+"'" for i in value]))
357+
return super(Bucket, self)._format_arg(name, spec, value)
273358

274359
class CalcInputSpec(AFNICommandInputSpec):
275360
in_file_a = File(

0 commit comments

Comments
 (0)