Skip to content

Commit bd6609a

Browse files
committed
ENH: Add MCRIBS surface processing workflow
1 parent 9d070eb commit bd6609a

File tree

1 file changed

+139
-40
lines changed

1 file changed

+139
-40
lines changed

nibabies/workflows/anatomical/surfaces.py

Lines changed: 139 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,140 @@
1-
# Use infant_recon_all to generate subcortical segmentations and cortical parcellations
1+
"""Anatomical surface projections"""
2+
from nipype.interfaces import freesurfer as fs
3+
from nipype.interfaces import io as nio
4+
from nipype.interfaces import utility as niu
5+
from nipype.pipeline import engine as pe
6+
from niworkflows.engine.workflows import LiterateWorkflow
7+
from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert
8+
from niworkflows.interfaces.freesurfer import PatchedRobustRegister as RobustRegister
9+
from smriprep.workflows.surfaces import init_gifti_surface_wf
10+
11+
SURFACE_INPUTS = [
12+
"subjects_dir",
13+
"subject_id",
14+
"t1w",
15+
"t2w",
16+
"flair",
17+
"skullstripped_t1",
18+
"corrected_t1",
19+
"ants_segs",
20+
]
21+
SURFACE_OUTPUTS = [
22+
"subjects_dir",
23+
"subject_id",
24+
"t1w2fsnative_xfm",
25+
"fsnative2t1w_xfm",
26+
"surfaces",
27+
"morphometrics",
28+
"out_aseg",
29+
"out_aparc",
30+
]
31+
32+
33+
def init_mcribs_surface_recon_wf(*, mcribs_dir=None, name="mcribs_surface_recon_wf"):
34+
from niworkflows.interfaces.nibabel import MapLabels, ReorientImage
35+
36+
from ...interfaces.mcribs import MCRIBReconAll
37+
38+
inputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_INPUTS), name='inputnode')
39+
outputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_OUTPUTS), name='outputnode')
40+
41+
wf = LiterateWorkflow(name=name)
42+
wf.__desc__ = f"""\
43+
Brain surfaces were reconstructed using `MCRIBReconAll` [M-CRIB-S, @mcribs],
44+
leveraging the masked, preprocessed T2w and remapped anatomical segmentation.
45+
"""
46+
47+
# dictionary to map labels from FS to M-CRIB-S
48+
aseg2mcrib = {
49+
2: 51,
50+
3: 21,
51+
4: 49,
52+
5: 0,
53+
7: 17,
54+
8: 17,
55+
10: 43,
56+
11: 41,
57+
12: 47,
58+
13: 47,
59+
14: 0,
60+
15: 0,
61+
16: 19,
62+
17: 1,
63+
18: 3,
64+
26: 41,
65+
28: 45,
66+
31: 49,
67+
41: 52,
68+
42: 20,
69+
43: 50,
70+
44: 0,
71+
46: 18,
72+
47: 18,
73+
49: 42,
74+
50: 40,
75+
51: 46,
76+
52: 46,
77+
53: 2,
78+
54: 4,
79+
58: 40,
80+
60: 44,
81+
63: 50,
82+
253: 48,
83+
}
84+
map_labels = pe.Node(MapLabels(mappings=aseg2mcrib), name="map_labels")
85+
86+
t2w_las = pe.Node(ReorientImage(target_orientation="LAS"), name="t2w_las")
87+
seg_las = t2w_las.clone(name="seg_las")
88+
89+
mcribs_recon = pe.Node(
90+
MCRIBReconAll(surfrecon=True, autorecon_after_surf=True), name="mcribs_recon"
91+
)
92+
if mcribs_dir:
93+
mcribs_recon.inputs.outdir = mcribs_dir
94+
95+
fssource = pe.Node(nio.FreeSurferSource(), name='fssource', run_without_submitting=True)
96+
norm2nii = pe.Node(fs.MRIConvert(out_type="niigz"), name="norm2nii")
97+
98+
fsnative2t1w_xfm = pe.Node(
99+
RobustRegister(auto_sens=True, est_int_scale=True),
100+
name='fsnative2t1w_xfm',
101+
)
102+
103+
t1w2fsnative_xfm = pe.Node(
104+
LTAConvert(out_lta=True, invert=True),
105+
name="t1w2fsnative_xfm",
106+
)
107+
gifti_surface_wf = init_gifti_surface_wf()
108+
109+
# fmt:off
110+
wf.connect([
111+
(inputnode, t2w_las, [("t2w", "in_file")]),
112+
(inputnode, map_labels, [("ants_segs", "in_file")])
113+
(map_labels, seg_las, [("out_file", "in_file")]),
114+
(inputnode, mcribs_recon, [
115+
("subjects_dir", "subjects_dir"),
116+
("subject_id", "subject_id")]),
117+
(t2w_las, mcribs_recon, [("out_file", "t2w_file")]),
118+
(seg_las, mcribs_recon, [("out_file", "segmentation_file")]),
119+
(map_labels, outputnode, [("out_file", "out_aseg")]),
120+
121+
# copied from infantFS workflow
122+
(inputnode, fsnative2t1w_xfm, [('skullstripped_t1', 'target_file')]),
123+
(fssource, norm2nii, [('norm', 'in_file')]),
124+
(norm2nii, fsnative2t1w_xfm, [('out_file', 'source_file')]),
125+
(fsnative2t1w_xfm, t1w2fsnative_xfm, [('out_reg_file', 'in_lta')]),
126+
(inputnode, gifti_surface_wf, [
127+
("subjects_dir", "subjects_dir"),
128+
("subject_id", "subject_id")]),
129+
(fsnative2t1w_xfm, gifti_surface_wf, [
130+
('out_reg_file', 'inputnode.fsnative2t1w_xfm')]),
131+
(gifti_surface_wf, outputnode, [
132+
('outputnode.surfaces', 'surfaces'),
133+
('outputnode.morphometrics', 'morphometrics'),
134+
]),
135+
])
136+
# fmt:on
137+
return wf
2138

3139
from nipype.interfaces import fsl
4140
from nipype.interfaces import utility as niu
@@ -9,49 +145,12 @@
9145

10146

11147
def init_infant_surface_recon_wf(*, age_months, use_aseg=False, name="infant_surface_recon_wf"):
12-
from nipype.interfaces import freesurfer as fs
13-
from nipype.interfaces import io as nio
14-
from niworkflows.engine.workflows import LiterateWorkflow
15-
from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert
16-
from niworkflows.interfaces.freesurfer import (
17-
PatchedRobustRegister as RobustRegister,
18-
)
19-
from smriprep.workflows.surfaces import init_gifti_surface_wf
20-
21148
from nibabies.interfaces.freesurfer import InfantReconAll
22149

23150
# Synchronized inputs to smriprep.workflows.surfaces.init_surface_recon_wf
24151
wf = LiterateWorkflow(name=name)
25-
inputnode = pe.Node(
26-
niu.IdentityInterface(
27-
fields=[
28-
"subjects_dir",
29-
"subject_id",
30-
"t1w",
31-
"t2w",
32-
"flair",
33-
"skullstripped_t1",
34-
"corrected_t1",
35-
"ants_segs",
36-
],
37-
),
38-
name="inputnode",
39-
)
40-
outputnode = pe.Node(
41-
niu.IdentityInterface(
42-
fields=[
43-
"subjects_dir",
44-
"subject_id",
45-
"t1w2fsnative_xfm",
46-
"fsnative2t1w_xfm",
47-
"surfaces",
48-
"morphometrics",
49-
"out_aseg",
50-
"out_aparc",
51-
]
52-
),
53-
name="outputnode",
54-
)
152+
inputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_INPUTS), name="inputnode")
153+
outputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_OUTPUTS), name="outputnode")
55154

56155
wf.__desc__ = f"""\
57156
Brain surfaces were reconstructed using `infant_recon_all` [FreeSurfer

0 commit comments

Comments
 (0)