Skip to content

Commit 4a5ec02

Browse files
committed
FIX: Update DVARS headers, remove vxwise-stdDVARS, fix carpetplot entries
1 parent 02d13a7 commit 4a5ec02

File tree

2 files changed

+35
-17
lines changed

2 files changed

+35
-17
lines changed

fmriprep/interfaces/confounds.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
1111
"""
1212
import os
13+
import re
1314
import shutil
1415
import numpy as np
1516
import pandas as pd
@@ -27,6 +28,7 @@
2728
class GatherConfoundsInputSpec(BaseInterfaceInputSpec):
2829
signals = File(exists=True, desc='input signals')
2930
dvars = File(exists=True, desc='file containing DVARS')
31+
std_dvars = File(exists=True, desc='file containing standardized DVARS')
3032
fd = File(exists=True, desc='input framewise displacement')
3133
tcompcor = File(exists=True, desc='input tCompCorr')
3234
acompcor = File(exists=True, desc='input aCompCorr')
@@ -124,7 +126,7 @@ def _run_interface(self, runtime):
124126
return runtime
125127

126128

127-
def _gather_confounds(signals=None, dvars=None, fdisp=None,
129+
def _gather_confounds(signals=None, dvars=None, std_dvars=None, fdisp=None,
128130
tcompcor=None, acompcor=None, cos_basis=None,
129131
motion=None, aroma=None, newpath=None):
130132
"""
@@ -134,16 +136,16 @@ def _gather_confounds(signals=None, dvars=None, fdisp=None,
134136
>>> from tempfile import TemporaryDirectory
135137
>>> tmpdir = TemporaryDirectory()
136138
>>> os.chdir(tmpdir.name)
137-
>>> pd.DataFrame({'a': [0.1]}).to_csv('signals.tsv', index=False, na_rep='n/a')
138-
>>> pd.DataFrame({'b': [0.2]}).to_csv('dvars.tsv', index=False, na_rep='n/a')
139+
>>> pd.DataFrame({'Global Signal': [0.1]}).to_csv('signals.tsv', index=False, na_rep='n/a')
140+
>>> pd.DataFrame({'stdDVARS': [0.2]}).to_csv('dvars.tsv', index=False, na_rep='n/a')
139141
>>> out_file, confound_list = _gather_confounds('signals.tsv', 'dvars.tsv')
140142
>>> confound_list
141143
['Global signals', 'DVARS']
142144
143145
>>> pd.read_csv(out_file, sep='\s+', index_col=None,
144146
... engine='python') # doctest: +NORMALIZE_WHITESPACE
145-
a b
146-
0 0.1 0.2
147+
global_signal std_dvars
148+
0 0.1 0.2
147149
>>> tmpdir.cleanup()
148150
149151
@@ -153,6 +155,12 @@ def less_breakable(a_string):
153155
''' hardens the string to different envs (i.e. case insensitive, no whitespace, '#' '''
154156
return ''.join(a_string.split()).strip('#')
155157

158+
# Taken from https://stackoverflow.com/questions/1175208/
159+
# If we end up using it more than just here, probably worth pulling in a well-tested package
160+
def camel_to_snake(name):
161+
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
162+
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
163+
156164
def _adjust_indices(left_df, right_df):
157165
# This forces missing values to appear at the beggining of the DataFrame
158166
# instead of the end
@@ -167,6 +175,7 @@ def _adjust_indices(left_df, right_df):
167175
all_files = []
168176
confounds_list = []
169177
for confound, name in ((signals, 'Global signals'),
178+
(std_dvars, 'Standardized DVARS'),
170179
(dvars, 'DVARS'),
171180
(fdisp, 'Framewise displacement'),
172181
(tcompcor, 'tCompCor'),
@@ -183,7 +192,7 @@ def _adjust_indices(left_df, right_df):
183192
for file_name in all_files: # assumes they all have headings already
184193
new = pd.read_csv(file_name, sep="\t")
185194
for column_name in new.columns:
186-
new.rename(columns={column_name: less_breakable(column_name)},
195+
new.rename(columns={column_name: camel_to_snake(less_breakable(column_name))},
187196
inplace=True)
188197

189198
_adjust_indices(confounds_data, new)

fmriprep/workflows/bold/confounds.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def init_bold_confs_wf(mem_gb, metadata, name="bold_confs_wf"):
171171
tcc_msk = pe.Node(niu.Function(function=_maskroi), name='tcc_msk')
172172

173173
# DVARS
174-
dvars = pe.Node(nac.ComputeDVARS(save_all=True, remove_zerovariance=True),
174+
dvars = pe.Node(nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True),
175175
name="dvars", mem_gb=mem_gb)
176176

177177
# Frame displacement
@@ -200,9 +200,15 @@ def init_bold_confs_wf(mem_gb, metadata, name="bold_confs_wf"):
200200
name="signals", mem_gb=mem_gb)
201201

202202
# Arrange confounds
203-
add_header = pe.Node(
203+
add_dvars_header = pe.Node(
204+
AddTSVHeader(columns=["dvars"]),
205+
name="add_dvars_header", mem_gb=0.01, run_without_submitting=True)
206+
add_std_dvars_header = pe.Node(
207+
AddTSVHeader(columns=["std_dvars"]),
208+
name="add_std_dvars_header", mem_gb=0.01, run_without_submitting=True)
209+
add_motion_headers = pe.Node(
204210
AddTSVHeader(columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]),
205-
name="add_header", mem_gb=0.01, run_without_submitting=True)
211+
name="add_motion_headers", mem_gb=0.01, run_without_submitting=True)
206212
concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True)
207213

208214
# Generate reportlet
@@ -275,14 +281,17 @@ def _pick_wm(files):
275281
(mrg_lbl, signals, [('out', 'label_files')]),
276282

277283
# Collate computed confounds together
278-
(inputnode, add_header, [('movpar_file', 'in_file')]),
284+
(inputnode, add_motion_headers, [('movpar_file', 'in_file')]),
285+
(dvars, add_dvars_header, [('out_nstd', 'in_file')]),
286+
(dvars, add_std_dvars_header, [('out_std', 'in_file')]),
279287
(signals, concat, [('out_file', 'signals')]),
280-
(dvars, concat, [('out_all', 'dvars')]),
281288
(fdisp, concat, [('out_file', 'fd')]),
282289
(tcompcor, concat, [('components_file', 'tcompcor'),
283290
('pre_filter_file', 'cos_basis')]),
284291
(acompcor, concat, [('components_file', 'acompcor')]),
285-
(add_header, concat, [('out_file', 'motion')]),
292+
(add_motion_headers, concat, [('out_file', 'motion')]),
293+
(add_dvars_header, concat, [('out_file', 'dvars')]),
294+
(add_std_dvars_header, concat, [('out_file', 'std_dvars')]),
286295

287296
# Set outputs
288297
(concat, outputnode, [('confounds_file', 'confounds_file')]),
@@ -358,11 +367,11 @@ def init_carpetplot_wf(mem_gb, metadata, name="bold_carpet_wf"):
358367
conf_plot = pe.Node(FMRISummary(
359368
tr=metadata['RepetitionTime'],
360369
confounds_list=[
361-
('GlobalSignal', None, 'GS'),
362-
('CSF', None, 'GSCSF'),
363-
('WhiteMatter', None, 'GSWM'),
364-
('stdDVARS', None, 'DVARS'),
365-
('FramewiseDisplacement', 'mm', 'FD')]),
370+
('global_signal', None, 'GS'),
371+
('csf', None, 'GSCSF'),
372+
('white_matter', None, 'GSWM'),
373+
('std_dvars', None, 'DVARS'),
374+
('framewise_displacement', 'mm', 'FD')]),
366375
name='conf_plot', mem_gb=mem_gb)
367376
ds_report_bold_conf = pe.Node(
368377
DerivativesDataSink(suffix='carpetplot'),

0 commit comments

Comments
 (0)