Skip to content
This repository was archived by the owner on Dec 27, 2022. It is now read-only.

Commit c726c73

Browse files
authored
Merge pull request #53 from akeshavan/incorporate_eddy_quad
Incorporate eddy quad
2 parents 5fdca31 + cad2e63 commit c726c73

File tree

10 files changed

+117
-26
lines changed

10 files changed

+117
-26
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,3 +103,6 @@ ENV/
103103

104104
# Mac OS nonsense:
105105
.DS_Store
106+
107+
#kubernetes stuff
108+
kubernetes/jobs/

dmriprep/data.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -425,6 +425,21 @@ def postprocess(self, subject):
425425
stdout=fnull,
426426
stderr=subprocess.STDOUT)
427427

428+
# if the site is CBIC, then the freesurfer directory has an additional level.
429+
# move that level up by 1 (e.g. removing the T1w_VNavNorm folder
430+
if subject.site == 'Site-CBIC':
431+
newpath = freesurfer_path.replace('T1w_VNavNorm/', '')
432+
move_cmd = 'mv {oldpath} {newpath}'.format(oldpath=freesurfer_path, newpath=newpath)
433+
fnull1 = open(os.devnull, 'w')
434+
subprocess.call(move_cmd.split(),
435+
stdout=fnull1,
436+
stderr=subprocess.STDOUT)
437+
438+
# now check that the AP/PA files are named correctly
439+
# eg it should look like "sub-{id}_dir-{dir}_acq-dwi_epi.nii.gz
440+
# but sometimes it looks like sub-{id}_acq-dwi_run-01_epi.nii.gz
441+
# which is silly. the direction should be in the filename.
442+
428443

429444
class Subject:
430445
"""A single dMRI study subject"""

dmriprep/qc.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import os.path as op
44
from dipy.segment.mask import median_otsu
55
from io import BytesIO
6-
from nipype.utils.filemanip import save_json
6+
from nipype.utils.filemanip import save_json, load_json
77
import base64
88
import matplotlib
99
matplotlib.use('agg')
@@ -212,6 +212,7 @@ def createB0_ColorFA_Mask_Sprites(b0_file, colorFA_file, mask_file):
212212
def create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
213213
color_fa_file, anat_mask_file,
214214
outlier_indices,
215+
eddy_qc_file,
215216
outpath=op.abspath('./report.json')):
216217

217218
report = {}
@@ -229,6 +230,7 @@ def create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
229230
report['eddy_report'] = f.readlines()
230231

231232
report['eddy_params'] = np.genfromtxt(eddy_rms).tolist()
232-
233+
eddy_qc = load_json(eddy_qc_file)
234+
report['eddy_quad'] = eddy_qc
233235
save_json(outpath, report)
234236
return outpath

dmriprep/run.py

Lines changed: 51 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,8 @@ def run_dmriprep_pe(subject_id, dwi_file, dwi_file_AP, dwi_file_PA,
301301

302302
# write the graph (this is saved to the working dir)
303303
wf.write_graph()
304-
304+
wf.config['execution']['remove_unnecessary_outputs'] = False
305+
wf.config['execution']['keep_inputs'] = True
305306
wf.run()
306307

307308

@@ -363,6 +364,31 @@ def get_dmriprep_pe_workflow():
363364
import multiprocessing
364365
eddy.inputs.num_threads = multiprocessing.cpu_count()
365366

367+
eddy_quad = pe.Node(fsl.EddyQuad(verbose=True), name="eddy_quad")
368+
get_path = lambda x: x.split('.nii.gz')[0]
369+
wf.connect(prep, ('fsl_eddy.out_corrected', get_path), eddy_quad, "base_name")
370+
wf.connect(inputspec, 'bval_file', eddy_quad, 'bval_file')
371+
wf.connect(prep, 'Rotate_Bvec.out_file', eddy_quad, 'bvec_file')
372+
wf.connect(prep, 'peb_correction.topup.out_field', eddy_quad, 'field')
373+
wf.connect(prep, 'gen_index.out_file', eddy_quad, 'idx_file')
374+
wf.connect(prep, 'peb_correction.topup.out_enc_file', eddy_quad, 'param_file')
375+
376+
# need a mask file for eddy_quad. lets get it from the B0.
377+
def get_b0_mask_fn(b0_file):
378+
import nibabel as nib
379+
from nipype.utils.filemanip import fname_presuffix
380+
from dipy.segment.mask import median_otsu
381+
import os
382+
383+
mask_file = fname_presuffix(b0_file, suffix="_mask", newpath=os.path.abspath('.'))
384+
img = nib.load(b0_file)
385+
data, aff = img.get_data(), img.affine
386+
_, mask = median_otsu(data, 2, 1)
387+
nib.Nifti1Image(mask.astype(float), aff).to_filename(mask_file)
388+
return mask_file
389+
390+
391+
366392
def id_outliers_fn(outlier_report, threshold, dwi_file):
367393
"""Get list of scans that exceed threshold for number of outliers
368394
@@ -437,13 +463,19 @@ def num_outliers(scan, outliers):
437463
wf.connect(inputspec, 'dwi_file_pa', list_merge, 'in2')
438464

439465
merge = pe.Node(fsl.Merge(dimension='t'), name="mergeAPPA")
440-
# merge.inputs.in_files = [dwi_file_ap, dwi_file_pa]
441466
wf.connect(merge, 'merged_file', prep, 'inputnode.alt_file')
442467
wf.connect(list_merge, 'out', merge, 'in_files')
443468

444469
fslroi = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name="fslroi")
445470
wf.connect(prep, "outputnode.out_file", fslroi, "in_file")
446471

472+
b0mask_node = pe.Node(niu.Function(input_names=['b0_file'],
473+
output_names=['mask_file'],
474+
function=get_b0_mask_fn),
475+
name="getB0Mask")
476+
wf.connect(fslroi, 'roi_file', b0mask_node, 'b0_file')
477+
wf.connect(b0mask_node, 'mask_file', eddy_quad, 'mask_file')
478+
447479
bbreg = pe.Node(fs.BBRegister(contrast_type="t2", init="coreg",
448480
out_fsl_file=True,
449481
# subjects_dir=subjects_dir,
@@ -487,7 +519,7 @@ def drop_outliers_fn(in_file, in_bval, in_bvec, drop_scans):
487519
from nipype.utils.filemanip import fname_presuffix
488520

489521
img = nib.load(op.abspath(in_file))
490-
img_data = img.get_fdata()
522+
img_data = img.get_data()
491523
img_data_thinned = np.delete(img_data,
492524
drop_scans,
493525
axis=3)
@@ -679,7 +711,7 @@ def binarize_aparc(aparc_aseg):
679711
wf.connect(prep, "fsl_eddy.out_residuals",
680712
datasink, "dmriprep.qc.@eddyresid")
681713

682-
# the file that told us which volumes to trop
714+
# the file that told us which volumes to drop
683715
wf.connect(id_outliers_node, "outpath", datasink, "dmriprep.qc.@droppedscans")
684716

685717
# the tensors of the dropped volumes dwi
@@ -700,6 +732,15 @@ def binarize_aparc(aparc_aseg):
700732
wf.connect(get_tensor_eddy, "color_fa_file", datasink, "dmriprep.dti_eddy.@colorfa")
701733
wf.connect(scale_tensor_eddy, "out_file", datasink, "dmriprep.dti_eddy.@scaled_tensor")
702734

735+
# all the eddy_quad stuff
736+
wf.connect(eddy_quad, 'out_qc_json', datasink, "dmriprep.qc.@eddyquad_json")
737+
wf.connect(eddy_quad, 'out_qc_pdf', datasink, "dmriprep.qc.@eddyquad_pdf")
738+
wf.connect(eddy_quad, 'out_avg_b_png', datasink, "dmriprep.qc.@eddyquad_bpng")
739+
wf.connect(eddy_quad, 'out_avg_b0_png', datasink, "dmriprep.qc.@eddyquad_b0png")
740+
wf.connect(eddy_quad, 'out_cnr_png', datasink, "dmriprep.qc.@eddyquad_cnr")
741+
wf.connect(eddy_quad, 'out_vdm_png', datasink, "dmriprep.qc.@eddyquad_vdm")
742+
wf.connect(eddy_quad, 'out_residuals', datasink, 'dmriprep.qc.@eddyquad_resid')
743+
703744
# anatomical registration stuff
704745
wf.connect(bbreg, "min_cost_file", datasink, "dmriprep.reg.@mincost")
705746
wf.connect(bbreg, "out_fsl_file", datasink, "dmriprep.reg.@fslfile")
@@ -711,23 +752,26 @@ def binarize_aparc(aparc_aseg):
711752
wf.connect(reslice_orig_to_dwi, 'out_file', datasink, 'dmriprep.anat.@T1w')
712753

713754
def report_fn(dwi_corrected_file, eddy_rms, eddy_report,
714-
color_fa_file, anat_mask_file, outlier_indices):
755+
color_fa_file, anat_mask_file, outlier_indices,
756+
eddy_qc_file):
715757
from dmriprep.qc import create_report_json
716758

717759
report = create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
718-
color_fa_file, anat_mask_file, outlier_indices)
760+
color_fa_file, anat_mask_file, outlier_indices,
761+
eddy_qc_file)
719762
return report
720763

721764
report_node = pe.Node(niu.Function(
722765
input_names=['dwi_corrected_file', 'eddy_rms',
723766
'eddy_report', 'color_fa_file',
724-
'anat_mask_file', 'outlier_indices'],
767+
'anat_mask_file', 'outlier_indices', 'eddy_qc_file'],
725768
output_names=['report'],
726769
function=report_fn
727770
), name="reportJSON")
728771

729772
# for the report, lets show the eddy corrected (full volume) image
730773
wf.connect(voltransform, "transformed_file", report_node, 'dwi_corrected_file')
774+
wf.connect(eddy_quad, 'out_qc_json', report_node, 'eddy_qc_file')
731775

732776
# add the rms movement output from eddy
733777
wf.connect(prep, "fsl_eddy.out_movement_rms", report_node, 'eddy_rms')
@@ -762,7 +806,6 @@ def name_files_nicely(dwi_file, subject_id):
762806
("art.eddy_corrected_outliers", dwi_fname.replace("dwi", "outliers")),
763807
("color_fa", "colorfa"),
764808
("orig_out", dwi_fname.replace("_dwi", "_T1w")),
765-
# ("eddy_corrected_", dwi_fname.replace("dwi", "")),
766809
("stats.eddy_corrected", dwi_fname.replace("dwi", "artStats")),
767810
("eddy_corrected.eddy_parameters", dwi_fname + ".eddy_parameters"),
768811
("qc/eddy_corrected", "qc/" + dwi_fname),

docker/Dockerfile

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \
3939

4040
ENTRYPOINT ["/neurodocker/startup.sh"]
4141

42-
ENV FSLDIR="/opt/fsl-6.0.0" \
43-
PATH="/opt/fsl-6.0.0/bin:$PATH"
42+
ENV FSLDIR="/opt/fsl-5.0.11" \
43+
PATH="/opt/fsl-5.0.11/bin:$PATH"
4444
RUN apt-get update -qq \
4545
&& apt-get install -y -q --no-install-recommends \
4646
bc \
@@ -63,15 +63,15 @@ RUN apt-get update -qq \
6363
&& apt-get clean \
6464
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* \
6565
&& echo "Downloading FSL ..." \
66-
&& mkdir -p /opt/fsl-6.0.0 \
67-
&& curl -fsSL --retry 5 https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-6.0.0-centos6_64.tar.gz \
68-
| tar -xz -C /opt/fsl-6.0.0 --strip-components 1 \
66+
&& mkdir -p /opt/fsl-5.0.11 \
67+
&& curl -fsSL --retry 5 https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-5.0.11-centos6_64.tar.gz \
68+
| tar -xz -C /opt/fsl-5.0.11 --strip-components 1 \
6969
&& sed -i '$iecho Some packages in this Docker container are non-free' $ND_ENTRYPOINT \
7070
&& sed -i '$iecho If you are considering commercial use of this container, please consult the relevant license:' $ND_ENTRYPOINT \
7171
&& sed -i '$iecho https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence' $ND_ENTRYPOINT \
7272
&& sed -i '$isource $FSLDIR/etc/fslconf/fsl.sh' $ND_ENTRYPOINT \
7373
&& echo "Installing FSL conda environment ..." \
74-
&& bash /opt/fsl-6.0.0/etc/fslconf/fslpython_install.sh -f /opt/fsl-6.0.0
74+
&& bash /opt/fsl-5.0.11/etc/fslconf/fslpython_install.sh -f /opt/fsl-5.0.11
7575

7676
ENV CONDA_DIR="/opt/miniconda-latest" \
7777
PATH="/opt/miniconda-latest/bin:$PATH"
@@ -134,8 +134,8 @@ COPY ./license.txt /opt/freesurfer-6.0.0/license.txt
134134

135135
#&& sync && conda clean -tipsy && sync
136136

137-
ADD environment.yml environment.yml
138137
RUN apt-get update && apt-get install -y git gcc libopenblas-base
138+
ADD environment.yml environment.yml
139139
RUN conda env create -f environment.yml
140140

141141
ENV LD_LIBRARY_PATH=/usr/lib/openblas-base/
@@ -153,7 +153,7 @@ RUN echo '{ \
153153
\n [ \
154154
\n "fsl", \
155155
\n { \
156-
\n "version": "6.0.0" \
156+
\n "version": "5.0.11" \
157157
\n } \
158158
\n ], \
159159
\n [ \

docker/environment.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ dependencies:
1111
- pandas
1212
- tqdm
1313
- pip:
14-
- "--editable=git+https://github.com/nipy/nipype@35b58cdb59a900430f4b9639e220c8408341805d#egg=nipype"
14+
- "--editable=git+https://github.com/akeshavan/nipype@7b5e7ab92183eb561980ec713d86eb69f28733e1#egg=nipype"
15+
- "--editable=git+https://git.fmrib.ox.ac.uk/matteob/eddy_qc_release@57bb11da6a634c4195593fbc439ba9f8998157b0#egg=eddy_qc"
1516
- bids
1617
- duecredit

kubernetes/create_kube_job.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
#!/usr/bin/env python
2+
import sys
3+
from boto3 import Session
4+
5+
if __name__ == "__main__":
6+
if len(sys.argv) > 1:
7+
subject = sys.argv[1]
8+
9+
session = Session()
10+
credentials = session.get_credentials()
11+
# Credentials are refreshable, so accessing your access key / secret key
12+
# separately can lead to a race condition. Use this to get an actual matched
13+
# set.
14+
current_credentials = credentials.get_frozen_credentials()
15+
16+
access_key = current_credentials.access_key
17+
secret_key = current_credentials.secret_key
18+
19+
with open("run_dmriprep.yml.tmpl", 'r') as template:
20+
with open("jobs/job_{}.yml".format(subject), 'w') as f:
21+
all_text = "\n".join(template.readlines())
22+
all_text = all_text.replace("{{subject_lower}}", subject.lower())
23+
all_text = all_text.replace("{{subject}}", subject)
24+
all_text = all_text.replace("{{access_key}}", access_key)
25+
all_text = all_text.replace("{{secret_key}}", secret_key)
26+
f.write(all_text)
27+

kubernetes/docker/build_tag_push.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
#!/bin/bash
2-
docker build -t dmriprep:kube1 -f dockerfile-dmriprep-kube1 .
3-
docker tag dmriprep:kube1 gcr.io/dmriprep/dmriprep:kube1
4-
docker push gcr.io/dmriprep/dmriprep:kube1
2+
docker build -t dmriprep:kube3 -f dockerfile-dmriprep-kube .
3+
docker tag dmriprep:kube3 gcr.io/dmriprep/dmriprep:kube3
4+
docker push gcr.io/dmriprep/dmriprep:kube3
Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
apiVersion: batch/v1
22
kind: Job
33
metadata:
4-
name: dmriprep0
4+
name: dmriprep-{{subject_lower}}
55
spec:
66
template:
77
spec:
88
containers:
99
- name: dmriprep-subject
10-
image: gcr.io/dmriprep/dmriprep:kube1
11-
command: ["/neurodocker/startup.sh", "./dmriprep_all.sh", "sub-NDARED632KNG", "{{access_key}}", "{{secret_key}}"]
10+
image: gcr.io/dmriprep/dmriprep:kube3
11+
command: ["/neurodocker/startup.sh", "./dmriprep_all.sh", "{{subject}}", "{{access_key}}", "{{secret_key}}"]
1212
restartPolicy: Never
1313
backoffLimit: 1

kubernetes/setup_gcp_kubernetes_dmriprep.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,4 +15,4 @@ CLUSTERNAME=dmriprep
1515
gcloud config set compute/zone $ZONE
1616

1717
# start the cluster!
18-
gcloud beta container clusters create $CLUSTERNAME --machine-type n1-highmem-4 --enable-autoscaling --max-nodes=$MAX_NODES --num-nodes 2 --cluster-version latest --node-labels dmriprep/node-purpose=core
18+
gcloud beta container clusters create $CLUSTERNAME --machine-type n1-highmem-4 --enable-autoscaling --max-nodes=$MAX_NODES --num-nodes 1 --cluster-version latest --node-labels dmriprep/node-purpose=core

0 commit comments

Comments
 (0)