diff --git a/dmriprep/.idea/dmriprep.iml b/dmriprep/.idea/dmriprep.iml
new file mode 100644
index 0000000..86abb4c
--- /dev/null
+++ b/dmriprep/.idea/dmriprep.iml
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dmriprep/.idea/libraries/R_User_Library.xml b/dmriprep/.idea/libraries/R_User_Library.xml
new file mode 100644
index 0000000..71f5ff7
--- /dev/null
+++ b/dmriprep/.idea/libraries/R_User_Library.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dmriprep/.idea/misc.xml b/dmriprep/.idea/misc.xml
new file mode 100644
index 0000000..1d8a5d6
--- /dev/null
+++ b/dmriprep/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dmriprep/.idea/modules.xml b/dmriprep/.idea/modules.xml
new file mode 100644
index 0000000..3913f59
--- /dev/null
+++ b/dmriprep/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dmriprep/.idea/vcs.xml b/dmriprep/.idea/vcs.xml
new file mode 100644
index 0000000..6c0b863
--- /dev/null
+++ b/dmriprep/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dmriprep/.idea/workspace.xml b/dmriprep/.idea/workspace.xml
new file mode 100644
index 0000000..733e5cb
--- /dev/null
+++ b/dmriprep/.idea/workspace.xml
@@ -0,0 +1,497 @@
+
+
+
+
+<<<<<<< HEAD
+
+
+=======
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+=======
+
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+=======
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+=======
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+=======
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+=======
+>>>>>>> reinvent dmriprep
+
+
+
+
+ aparc
+ get_eddy_args_node
+ _inputs_node
+ check_shelled
+ save_json
+ load_json
+ make_mean_b0
+ make_mean_b0_node
+ 'mean_file_out'
+ btr
+ mean
+ make_gtab_and_b0s_node
+ topup_inputs_from_dwi_files
+ slicetime_lookup
+ get_topup_inputs_node
+ img
+ denoise_node
+ apply_mask_premoco_node
+ btr_node
+ make_gtab_and_b0s
+ itertools
+<<<<<<< HEAD
+ get_eddy_inputs_node
+ get_eddy_inputs
+ eddy
+ eddy_inputs_from_dwi_files
+ make_gtab_node
+ eddy_node
+ bvecs_reor
+ drop_outliers_fn_node
+=======
+>>>>>>> reinvent dmriprep
+
+
+ correct_vecs_and_make_b0s
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+=======
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+=======
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+=======
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+=======
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+
+=======
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1566137307717
+
+
+ 1566137307717
+
+
+<<<<<<< HEAD
+
+
+=======
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+
+=======
+
+
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+<<<<<<< HEAD
+
+=======
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+=======
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+=======
+
+
+
+
+
+
+
+
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+
+=======
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+=======
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+
+
+
+
+
+
+
+=======
+
+
+
+
+
+
+
+
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+=======
+
+
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+<<<<<<< HEAD
+
+
+
+
+
+
+
+
+
+
+
+=======
+
+
+
+
+>>>>>>> reinvent dmriprep
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/dmriprep/__init__.py b/dmriprep/__init__.py
index 6150760..fa3abe6 100644
--- a/dmriprep/__init__.py
+++ b/dmriprep/__init__.py
@@ -16,11 +16,6 @@
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
-from . import data
-from . import io
-from . import qc
-from . import run
-
module_logger = logging.getLogger(__name__)
# get the log level from environment variable
diff --git a/dmriprep/b02b0.cnf b/dmriprep/b02b0.cnf
new file mode 100644
index 0000000..77c4cd5
--- /dev/null
+++ b/dmriprep/b02b0.cnf
@@ -0,0 +1,26 @@
+# Resolution (knot-spacing) of warps in mm
+--warpres=20,16,14,12,10,6,4,4,4
+# Subsampling level (a value of 2 indicates that a 2x2x2 neighbourhood is collapsed to 1 voxel)
+--subsamp=2,2,2,2,2,1,1,1,1
+# FWHM of gaussian smoothing
+--fwhm=8,6,4,3,3,2,1,0,0
+# Maximum number of iterations
+--miter=5,5,5,5,5,10,10,20,20
+# Relative weight of regularisation
+--lambda=0.005,0.001,0.0001,0.000015,0.000005,0.0000005,0.00000005,0.0000000005,0.00000000001
+# If set to 1 lambda is multiplied by the current average squared difference
+--ssqlambda=1
+# Regularisation model
+--regmod=bending_energy
+# If set to 1 movements are estimated along with the field
+--estmov=1,1,1,1,1,0,0,0,0
+# 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient
+--minmet=0,0,0,0,0,1,1,1,1
+# Quadratic or cubic splines
+--splineorder=3
+# Precision for calculation and storage of Hessian
+--numprec=double
+# Linear or spline interpolation
+--interp=spline
+# If set to 1 the images are individually scaled to a common mean intensity
+--scale=1
\ No newline at end of file
diff --git a/dmriprep/b02b0_1.cnf b/dmriprep/b02b0_1.cnf
new file mode 100644
index 0000000..a15519e
--- /dev/null
+++ b/dmriprep/b02b0_1.cnf
@@ -0,0 +1,26 @@
+# Resolution (knot-spacing) of warps in mm
+--warpres=20,16,14,12,10,6,4,4,4
+# Subsampling level (a value of 2 indicates that a 2x2x2 neighbourhood is collapsed to 1 voxel)
+--subsamp=1,1,1,1,1,1,1,1,1
+# FWHM of gaussian smoothing
+--fwhm=8,6,4,3,3,2,1,0,0
+# Maximum number of iterations
+--miter=5,5,5,5,5,10,10,20,20
+# Relative weight of regularisation
+--lambda=0.0005,0.0001,0.00001,0.0000015,0.0000005,0.0000005,0.00000005,0.0000000005,0.00000000001
+# If set to 1 lambda is multiplied by the current average squared difference
+--ssqlambda=1
+# Regularisation model
+--regmod=bending_energy
+# If set to 1 movements are estimated along with the field
+--estmov=1,1,1,1,1,0,0,0,0
+# 0=Levenberg-Marquardt, 1=Scaled Conjugate Gradient
+--minmet=0,0,0,0,0,1,1,1,1
+# Quadratic or cubic splines
+--splineorder=3
+# Precision for calculation and storage of Hessian
+--numprec=double
+# Linear or spline interpolation
+--interp=spline
+# If set to 1 the images are individually scaled to a common mean intensity
+--scale=1
\ No newline at end of file
diff --git a/dmriprep/cli.py b/dmriprep/cli.py
index 8b45d7e..48a4f98 100644
--- a/dmriprep/cli.py
+++ b/dmriprep/cli.py
@@ -18,7 +18,7 @@
@click.command()
-@click.option('--participant-label',
+@click.option('--participant_label',
help="The label(s) of the participant(s) that should be"
"analyzed. The label corresponds to"
"sub- from the BIDS spec (so it does"
@@ -26,29 +26,38 @@
"all subjects will be analyzed. Multiple participants"
"can be specified with a space separated list.",
default=None)
-@click.option('--eddy-niter',
- help="Fixed number of eddy iterations. See "
- "https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/eddy/UsersGuide"
- "#A--niter",
- default=5, type=(int))
-@click.option('--slice-outlier-threshold',
+@click.option('--session',
+ help="The session number off the participant data in the case of multiple sessions.",
+ default=1)
+@click.option('--denoise_strategy',
+ help="Denoising strategy. Options are: mppca, localpca, or nlmeans.",
+ default="mppca")
+@click.option('--slice_outlier_threshold',
help="Number of allowed outlier slices per volume. "
"If this is exceeded the volume is dropped from analysis. "
"If an int is provided, it is treated as number of allowed "
"outlier slices. If a float between 0 and 1 "
"(exclusive) is provided, it is treated the fraction of "
"allowed outlier slices.",
- default=0.02)
+ default=0.05)
@click.argument('bids_dir',
)
@click.argument('output_dir',
)
+@click.option('--plugin',
+ help="Scheduling plugin type. Default is MultiProc.",
+ default="MultiProc")
+@click.option('--vox',
+ help="Voxel resolution. Default is 2mm.",
+ default='2mm')
+@click.option('--verbose',
+ help="Verbose logging. Default is False.",
+ default=False)
@click.argument('analysis_level',
type=click.Choice(['participant', 'group']),
default='participant')
-def main(participant_label, bids_dir, output_dir,
- eddy_niter=5, slice_outlier_threshold=0.02,
- analysis_level="participant"):
+def main(participant_label, session, bids_dir, output_dir, plugin_type='MultiProc', verbose=False,
+ denoise_strategy="mppca", slice_outlier_threshold=0.05, vox_size='2mm', analysis_level="participant"):
"""
BIDS_DIR: The directory with the input dataset formatted according to
the BIDS standard.
@@ -66,20 +75,24 @@ def main(participant_label, bids_dir, output_dir,
raise NotImplementedError('The only valid analysis level for dmriprep '
'is participant at the moment.')
- inputs = io.get_bids_files(participant_label, bids_dir)
+ from dmriprep.io import get_bids_layout
- for subject_inputs in inputs:
- run.run_dmriprep_pe(**subject_inputs,
- working_dir=os.path.join(output_dir, 'scratch'),
- out_dir=output_dir,
- eddy_niter=eddy_niter,
- slice_outlier_threshold=slice_outlier_threshold)
+ sub_dict = get_bids_layout(bids_dir, participant_label, session)
+ dwi = sub_dict[session]['dwi']
+ fbvec = sub_dict[session]['bvec']
+ fbval = sub_dict[session]['bval']
+ metadata = sub_dict[session]['metadata']
+
+ wf = run.init_single_subject_wf(dwi, fbvec, fbval, metadata, out_dir=output_dir, strategy=denoise_strategy,
+ vox_size=vox_size, plugin_type=plugin_type, outlier_thresh=slice_outlier_threshold,
+ verbose=verbose)
+ wf.run()
return 0
+
@click.command()
-@click.argument('output_dir',
- )
+@click.argument('output_dir')
@click.option('--subject', help="subject id to download (will choose 1 subject if not specified",
default="sub-NDARBA507GCT")
@click.option('--study', help="which study to download. Right now we only support the HBN dataset",
@@ -126,7 +139,7 @@ def upload(output_dir, bucket, access_key, secret_key, provider='s3', subject=No
output_dir += '/'
if provider == 's3' or provider == 'S3':
- client = boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key)
+ client = boto3.client('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key)
if subject is not None:
assert os.path.exists(os.path.join(output_dir, subject)), 'this subject id does not exist!'
diff --git a/dmriprep/core.py b/dmriprep/core.py
new file mode 100644
index 0000000..2a297fb
--- /dev/null
+++ b/dmriprep/core.py
@@ -0,0 +1,508 @@
+import os
+
+import nibabel as nib
+import numpy as np
+
+
+def make_gtab(fbval, fbvec, sesdir):
+ from dipy.io import save_pickle
+ from dipy.core.gradients import gradient_table
+ from dipy.io import read_bvals_bvecs
+
+ if fbval and fbvec:
+ bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
+ else:
+ raise ValueError("Either bvals or bvecs files not found (or rescaling failed)!")
+
+ namer_dir = sesdir + "/dmri_tmp"
+ if not os.path.isdir(namer_dir):
+ os.mkdir(namer_dir)
+
+ gtab_file = "%s%s" % (namer_dir, "/gtab.pkl")
+
+ # Creating the gradient table
+ gtab = gradient_table(bvals, bvecs)
+
+ # Correct b0 threshold
+ gtab.b0_threshold = min(bvals)
+
+ # Show info
+ print(gtab.info)
+
+ # Save gradient table to pickle
+ save_pickle(gtab_file, gtab)
+
+ return gtab_file, gtab
+
+
+def rescale_bvec(bvec, bvec_rescaled):
+ """
+ Normalizes b-vectors to be of unit length for the non-zero b-values. If the
+ b-value is 0, the vector is untouched.
+
+ Parameters
+ ----------
+ bvec : str
+ File name of the original b-vectors file.
+ bvec_rescaled : str
+ File name of the new (normalized) b-vectors file. Must have extension `.bvec`.
+
+ Returns
+ -------
+ bvec_rescaled : str
+ File name of the new (normalized) b-vectors file. Must have extension `.bvec`.
+ """
+ bv1 = np.array(np.loadtxt(bvec))
+ # Enforce proper dimensions
+ bv1 = bv1.T if bv1.shape[0] == 3 else bv1
+
+ # Normalize values not close to norm 1
+ bv2 = [
+ b / np.linalg.norm(b) if not np.isclose(np.linalg.norm(b), 0) else b
+ for b in bv1
+ ]
+ np.savetxt(bvec_rescaled, bv2)
+ return bvec_rescaled
+
+
+def correct_vecs_and_make_b0s(fbval, fbvec, dwi_file, sesdir):
+ from dipy.io import read_bvals_bvecs
+ from dmriprep.core import make_gtab, rescale_bvec
+ from dmriprep.utils import is_hemispherical
+
+ namer_dir = sesdir + "/dmri_tmp"
+ if not os.path.isdir(namer_dir):
+ os.mkdir(namer_dir)
+
+ bvec_rescaled = "%s%s" % (namer_dir, "/bvec_scaled.bvec")
+
+ # loading bvecs/bvals
+ bvals, bvecs = read_bvals_bvecs(fbval, fbvec)
+ bvecs[np.where(np.any(abs(bvecs) >= 10, axis=1) == True)] = [1, 0, 0]
+ bvecs[np.where(bvals == 0)] = 0
+ if (
+ len(
+ bvecs[
+ np.where(
+ np.logical_and(
+ bvals > 50, np.all(abs(bvecs) == np.array([0, 0, 0]), axis=1)
+ )
+ )
+ ]
+ )
+ > 0
+ ):
+ raise ValueError(
+ "WARNING: Encountered potentially corrupted bval/bvecs. Check to ensure volumes with a "
+ "diffusion weighting are not being treated as B0's along the bvecs"
+ )
+ np.savetxt(fbval, bvals)
+ np.savetxt(fbvec, bvecs)
+ bvec_rescaled = rescale_bvec(fbvec, bvec_rescaled)
+ vecs_rescaled = np.genfromtxt(bvec_rescaled)
+ vecs = np.round(vecs_rescaled, 8)[~(np.round(vecs_rescaled, 8) == 0).all(1)]
+ [is_hemi, pole] = is_hemispherical(vecs)
+ if is_hemi is True:
+ raise ValueError(
+ "B-vectors for this data are hemispherical and therefore use of topup/eddy routines is not "
+ "advised"
+ )
+
+ [gtab_file, gtab] = make_gtab(fbval, bvec_rescaled, sesdir)
+
+ # Get b0 indices
+ b0s = np.where(gtab.bvals == gtab.b0_threshold)[0].tolist()
+ print("%s%s" % ("b0's found at: ", b0s))
+ firstb0 = b0s[0]
+
+ # Extract and Combine all b0s collected
+ print("Extracting b0's...")
+ b0_vols = []
+ dwi_img = nib.load(dwi_file)
+ dwi_data = dwi_img.get_data()
+ for b0 in b0s:
+ print(b0)
+ b0_vols.append(dwi_data[:, :, :, b0])
+
+ firstb0_file = sesdir + "/firstb0.nii.gz"
+ nib.save(
+ nib.Nifti1Image(
+ b0_vols[firstb0].astype(np.float32), dwi_img.affine, dwi_img.header
+ ),
+ firstb0_file,
+ )
+
+ return firstb0, firstb0_file, gtab_file, b0_vols, b0s
+
+
+def topup_inputs_from_dwi_files(dwi, sesdir, spec_acqp, b0_vols, topup_config_odd):
+ from collections import defaultdict
+
+ """Create a datain spec and a slspec from a list of dwi files."""
+ # Write the datain.txt file
+ datain_lines = []
+ spec_counts = defaultdict(int)
+
+ dwi_img = nib.load(dwi)
+ imain_data = []
+ for b0_vol in b0_vols:
+ num_trs = 1 if len(b0_vol.shape) < 4 else b0_vol.shape[3]
+ datain_lines.extend([spec_acqp] * num_trs)
+ spec_counts[spec_acqp] += num_trs
+ imain_data.append(b0_vol)
+
+ # Make a 4d series
+ imain_output = sesdir + "/topup_imain.nii.gz"
+ imain_data_4d = [imain_vol[..., np.newaxis] for imain_vol in imain_data]
+ imain_img = nib.Nifti1Image(
+ np.concatenate(imain_data_4d, 3), dwi_img.affine, dwi_img.header
+ )
+ assert imain_img.shape[3] == len(datain_lines)
+ imain_img.to_filename(imain_output)
+
+ # Write the datain text file
+ datain_file = sesdir + "/acqparams.txt"
+ with open(datain_file, "w") as f:
+ f.write("\n".join(datain_lines))
+
+ example_b0 = b0_vols[0]
+ topup_config = "b02b0.cnf"
+ if 1 in (example_b0.shape[0] % 2, example_b0.shape[1] % 2, example_b0.shape[2] % 2):
+ print("Using slower b02b0_1.cnf because an axis has an odd number of slices")
+ topup_config = topup_config_odd
+
+ return datain_file, imain_output, example_b0, datain_lines, topup_config
+
+
+def eddy_inputs_from_dwi_files(sesdir, gtab_file):
+ from dipy.io import load_pickle
+
+ b0s_mask_all = []
+ gtab = load_pickle(gtab_file)
+ b0s_mask = gtab.b0s_mask
+ b0s_mask_all.append(b0s_mask)
+
+ # Create the index file
+ index_file = sesdir + "/index.txt"
+ ix_vec = []
+ i = 1
+ pastfirst0s = False
+ for val in gtab.bvals:
+ if val > gtab.b0_threshold:
+ pastfirst0s = True
+ elif val <= gtab.b0_threshold and pastfirst0s is True:
+ i = i + 1
+ ix_vec.append(i)
+ with open(index_file, "w") as f:
+ f.write(" ".join(map(str, ix_vec)))
+
+ return index_file
+
+
+def id_outliers_fn(outlier_report, threshold, dwi_file):
+ """Get list of scans that exceed threshold for number of outliers
+ Parameters
+ ----------
+ outlier_report: string
+ Path to the fsl_eddy outlier report
+ threshold: int or float
+ If threshold is an int, it is treated as number of allowed outlier
+ slices. If threshold is a float between 0 and 1 (exclusive), it is
+ treated the fraction of allowed outlier slices before we drop the
+ whole volume.
+ dwi_file: string
+ Path to nii dwi file to determine total number of slices
+ Returns
+ -------
+ drop_scans: numpy.ndarray
+ List of scan indices to drop
+ """
+ import nibabel as nib
+ import numpy as np
+ import os.path as op
+ import parse
+
+ with open(op.abspath(outlier_report), "r") as fp:
+ lines = fp.readlines()
+
+ p = parse.compile(
+ "Slice {slice:d} in scan {scan:d} is an outlier with "
+ "mean {mean_sd:f} standard deviations off, and mean "
+ "squared {mean_sq_sd:f} standard deviations off."
+ )
+
+ outliers = [p.parse(l).named for l in lines]
+ scans = {d["scan"] for d in outliers}
+
+ def num_outliers(scan, outliers):
+ return len([d for d in outliers if d["scan"] == scan])
+
+ if 0 < threshold < 1:
+ img = nib.load(dwi_file)
+ try:
+ threshold *= img.header.get_n_slices()
+ except nib.spatialimages.HeaderDataError:
+ print(
+ "WARNING. We are not sure which dimension has the "
+ "slices in this image. So we are using the 3rd dim.",
+ img.shape,
+ )
+ threshold *= img.shape[2]
+
+ drop_scans = np.array([s for s in scans if num_outliers(s, outliers) > threshold])
+
+ outpath = op.abspath("dropped_scans.txt")
+ np.savetxt(outpath, drop_scans, fmt="%d")
+
+ return drop_scans, outpath
+
+
+def drop_outliers_fn(in_file, in_bval, in_bvec, drop_scans):
+ """Drop outlier volumes from dwi file
+ Parameters
+ ----------
+ in_file: string
+ Path to nii dwi file to drop outlier volumes from
+ in_bval: string
+ Path to bval file to drop outlier volumes from
+ in_bvec: string
+ Path to bvec file to drop outlier volumes from
+ drop_scans: numpy.ndarray or str
+ List of scan indices to drop. If str, then assume path to text file
+ listing outlier volumes.
+
+ Returns
+ -------
+ out_file: string
+ Path to "thinned" output dwi file
+ out_bval: string
+ Path to "thinned" output bval file
+ out_bvec: string
+ Path to "thinned" output bvec file
+ """
+ import nibabel as nib
+ import numpy as np
+ import os.path as op
+ from nipype.utils.filemanip import fname_presuffix
+
+ if isinstance(drop_scans, str):
+ try:
+ drop_scans = np.genfromtxt(drop_scans)
+ except TypeError:
+ print(
+ "Unrecognized file format. Unable to load vector from drop_scans file."
+ )
+
+ img = nib.load(op.abspath(in_file))
+ img_data = img.get_data()
+ img_data_thinned = np.delete(img_data, drop_scans, axis=3)
+ if isinstance(img, nib.nifti1.Nifti1Image):
+ img_thinned = nib.Nifti1Image(
+ img_data_thinned.astype(np.float64), img.affine, header=img.header
+ )
+ elif isinstance(img, nib.nifti2.Nifti2Image):
+ img_thinned = nib.Nifti2Image(
+ img_data_thinned.astype(np.float64), img.affine, header=img.header
+ )
+ else:
+ raise TypeError("in_file does not contain Nifti image datatype.")
+
+ out_file = fname_presuffix(in_file, suffix="_thinned", newpath=op.abspath("."))
+ nib.save(img_thinned, op.abspath(out_file))
+
+ bval = np.loadtxt(in_bval)
+ bval_thinned = np.delete(bval, drop_scans, axis=0)
+ out_bval = fname_presuffix(in_bval, suffix="_thinned", newpath=op.abspath("."))
+ np.savetxt(out_bval, bval_thinned)
+
+ bvec = np.loadtxt(in_bvec)
+ bvec_thinned = np.delete(bvec, drop_scans, axis=0)
+ out_bvec = fname_presuffix(in_bvec, suffix="_thinned", newpath=op.abspath("."))
+ np.savetxt(out_bvec, bvec_thinned)
+
+ print("%s%s" % ('Dropping outlier volumes:\n', drop_scans))
+ return out_file, out_bval, out_bvec
+
+
+def get_params(A):
+ """This is a copy of spm's spm_imatrix where
+ we already know the rotations and translations matrix,
+ shears and zooms (as outputs from fsl FLIRT/avscale)
+ Let A = the 4x4 rotation and translation matrix
+ R = [ c5*c6, c5*s6, s5]
+ [-s4*s5*c6-c4*s6, -s4*s5*s6+c4*c6, s4*c5]
+ [-c4*s5*c6+s4*s6, -c4*s5*s6-s4*c6, c4*c5]
+ """
+
+ def rang(b):
+ a = min(max(b, -1), 1)
+ return a
+
+ Ry = np.arcsin(A[0, 2])
+ # Rx = np.arcsin(A[1, 2] / np.cos(Ry))
+ # Rz = np.arccos(A[0, 1] / np.sin(Ry))
+
+ if (abs(Ry) - np.pi / 2) ** 2 < 1e-9:
+ Rx = 0
+ Rz = np.arctan2(-rang(A[1, 0]), rang(-A[2, 0] / A[0, 2]))
+ else:
+ c = np.cos(Ry)
+ Rx = np.arctan2(rang(A[1, 2] / c), rang(A[2, 2] / c))
+ Rz = np.arctan2(rang(A[0, 1] / c), rang(A[0, 0] / c))
+
+ rotations = [Rx, Ry, Rz]
+ translations = [A[0, 3], A[1, 3], A[2, 3]]
+
+ return rotations, translations
+
+
+def get_flirt_motion_parameters(flirt_mats):
+ import os.path as op
+ from nipype.interfaces.fsl.utils import AvScale
+ from dmriprep.core import get_params
+
+ motion_params = open(op.abspath("motion_parameters.par"), "w")
+ for mat in flirt_mats:
+ res = AvScale(mat_file=mat).run()
+ A = np.asarray(res.outputs.rotation_translation_matrix)
+ rotations, translations = get_params(A)
+ for i in rotations + translations:
+ motion_params.write("%f " % i)
+ motion_params.write("\n")
+ motion_params.close()
+ motion_params = op.abspath("motion_parameters.par")
+ return motion_params
+
+
+def read_nifti_sidecar(json_file):
+ import json
+
+ with open(json_file, "r") as f:
+ metadata = json.load(f)
+ pe_dir = metadata["PhaseEncodingDirection"]
+ slice_times = metadata.get("SliceTiming")
+ trt = metadata.get("TotalReadoutTime")
+ if trt is None:
+ pass
+
+ return {
+ "PhaseEncodingDirection": pe_dir,
+ "SliceTiming": slice_times,
+ "TotalReadoutTime": trt,
+ }
+
+
+def extract_metadata(metadata):
+ from dmriprep.core import read_nifti_sidecar
+
+ acqp_lines = {
+ "i": "1 0 0 %.6f",
+ "j": "0 1 0 %.6f",
+ "k": "0 0 1 %.6f",
+ "i-": "-1 0 0 %.6f",
+ "j-": "0 -1 0 %.6f",
+ "k-": "0 0 -1 %.6f",
+ }
+ spec = read_nifti_sidecar(metadata)
+ spec_line = acqp_lines[spec["PhaseEncodingDirection"]]
+ spec_acqp = spec_line % spec["TotalReadoutTime"]
+ spec_slice = spec["SliceTiming"]
+ return spec_line, spec_acqp, spec_slice
+
+
+def check_shelled(gtab_file):
+ from dipy.io import load_pickle
+
+ # Check whether data is shelled
+ gtab = load_pickle(gtab_file)
+ if len(np.unique(gtab.bvals)) > 2:
+ is_shelled = True
+ else:
+ is_shelled = False
+ return is_shelled
+
+
+def make_mean_b0(in_file):
+ import time
+
+ b0_img = nib.load(in_file)
+ b0_img_data = b0_img.get_data()
+ mean_b0 = np.mean(b0_img_data, axis=3, dtype=b0_img_data.dtype)
+ mean_file_out = in_file.split(".nii.gz")[0] + "_mean.nii.gz"
+ nib.save(
+ nib.Nifti1Image(mean_b0, affine=b0_img.affine, header=b0_img.header),
+ mean_file_out,
+ )
+ while os.path.isfile(mean_file_out) is False:
+ time.sleep(1)
+ return mean_file_out
+
+
+def suppress_gibbs(in_file, sesdir):
+ from time import time
+ from dipy.denoise.gibbs import gibbs_removal
+
+ t = time()
+ img = nib.load(in_file)
+ img_data = img.get_data()
+ gibbs_corr_data = gibbs_removal(img_data)
+ print("Time taken for gibbs suppression: ", -t + time())
+ gibbs_free_file = sesdir + "/gibbs_free_data.nii.gz"
+ nib.save(
+ nib.Nifti1Image(gibbs_corr_data.astype(np.float32), img.affine, img.header),
+ gibbs_free_file,
+ )
+ return gibbs_corr_data, gibbs_free_file
+
+
+def denoise(
+ in_file,
+ sesdir,
+ gtab_file,
+ mask,
+ strategy,
+ N=4,
+ patch_radius=2,
+ smooth_factor=3,
+ tau_factor=2.3,
+ block_radius=1,
+):
+ from time import time
+ from dipy.denoise.noise_estimate import estimate_sigma
+ from dipy.denoise.pca_noise_estimate import pca_noise_estimate
+ from dipy.denoise.localpca import localpca, mppca
+ from dipy.denoise.nlmeans import nlmeans
+ from dipy.io import load_pickle
+
+ gtab = load_pickle(gtab_file)
+ t = time()
+ img = nib.load(in_file)
+ img_data = img.get_data()
+ if strategy == "mppca":
+ img_data_den = mppca(img_data, patch_radius=patch_radius)
+ elif strategy == "localpca":
+ sigma = pca_noise_estimate(
+ img_data, gtab, correct_bias=True, smooth=smooth_factor
+ )
+ img_data_den = localpca(
+ img_data, sigma, tau_factor=tau_factor, patch_radius=patch_radius
+ )
+ elif strategy == "nlmeans":
+ sigma = estimate_sigma(img_data, N=N)
+ img_data_den = nlmeans(
+ img_data,
+ sigma=sigma,
+ mask=mask,
+ patch_radius=patch_radius,
+ block_radius=block_radius,
+ rician=True,
+ )
+ else:
+ raise ValueError("Denoising strategy not recognized.")
+ print("Time taken for denoising: ", -t + time())
+ denoised_file = sesdir + "/preprocessed_data_denoised_" + strategy + ".nii.gz"
+ nib.save(
+ nib.Nifti1Image(img_data_den.astype(np.float32), img.affine, img.header),
+ denoised_file,
+ )
+ return img_data_den, denoised_file
diff --git a/dmriprep/data.py b/dmriprep/data.py
index d06767e..7f1a3d7 100644
--- a/dmriprep/data.py
+++ b/dmriprep/data.py
@@ -16,7 +16,6 @@
from dask.diagnostics import ProgressBar
from tqdm.auto import tqdm
-
mod_logger = logging.getLogger(__name__)
@@ -179,12 +178,13 @@ def _cumulative_paths(path_parts, add_ext=""):
except IndexError:
pass
- return [''.join(path_parts[:i+1]) + add_ext
+ return [''.join(path_parts[:i + 1]) + add_ext
for i in range(len(path_parts))]
class Study:
"""A dMRI based study with a BIDS compliant directory structure"""
+
def __init__(self, study_id, bucket, s3_prefix, subjects=None):
"""Initialize a Study instance
@@ -293,7 +293,6 @@ def __init__(self, study_id, bucket, s3_prefix, subjects=None):
else:
self._n_discarded = len([s for s in subjects if not s.valid])
-
@property
def study_id(self):
"""An identifier string for the study"""
@@ -391,6 +390,7 @@ class S3BidsStudy(Study):
"""
"""
+
def __init__(self, study_id, bucket, s3_prefix=None,
subjects=None):
"""
@@ -427,8 +427,6 @@ def list_all_subjects(self):
# XXX Ariel will figure this out.
-
-
class HBN(Study):
"""The HBN study
@@ -436,6 +434,7 @@ class HBN(Study):
--------
dmriprep.data.Study
"""
+
def __init__(self, subjects=None):
"""Initialize the HBN instance
@@ -463,6 +462,7 @@ def list_all_subjects(self):
dict
dict with participant_id as keys and site_id as values
"""
+
def get_site_tsv_keys(site_id):
pre = 'data/Projects/HBN/MRI/'
raw = pre + f'{site_id}/participants.tsv'
@@ -568,6 +568,7 @@ def postprocess(self, subject):
class Subject:
"""A single dMRI study subject"""
+
def __init__(self, subject_id, study, site=None):
"""Initialize a Subject instance
diff --git a/dmriprep/dmriprep.py b/dmriprep/dmriprep.py
index 47911d9..0e30d86 100644
--- a/dmriprep/dmriprep.py
+++ b/dmriprep/dmriprep.py
@@ -3,11 +3,5 @@
"""Main module."""
import logging
-import os
-import os.path as op
-import subprocess
-
-from .run import run_dmriprep
-
mod_logger = logging.getLogger(__name__)
diff --git a/dmriprep/eddy_params.json b/dmriprep/eddy_params.json
new file mode 100644
index 0000000..631aad8
--- /dev/null
+++ b/dmriprep/eddy_params.json
@@ -0,0 +1,19 @@
+{
+ "flm": "linear",
+ "slm": "linear",
+ "fep": false,
+ "interp": "spline",
+ "nvoxhp": 1000,
+ "fudge_factor": 10,
+ "dont_sep_offs_move": false,
+ "dont_peas": false,
+ "niter": 5,
+ "method": "jac",
+ "repol": true,
+ "num_threads": 1,
+ "is_shelled": true,
+ "use_cuda": false,
+ "cnr_maps": true,
+ "residuals": false,
+ "output_type": "NIFTI_GZ"
+}
diff --git a/dmriprep/interfaces.py b/dmriprep/interfaces.py
new file mode 100644
index 0000000..1458735
--- /dev/null
+++ b/dmriprep/interfaces.py
@@ -0,0 +1,120 @@
+import os
+import os.path as op
+
+from nipype.interfaces import fsl
+from nipype.interfaces.base import isdefined
+
+
+class ExtendedEddyOutputSpec(fsl.epi.EddyOutputSpec):
+ from nipype.interfaces.base import File
+
+ shell_PE_translation_parameters = File(
+ exists=True,
+ desc="the translation along the PE-direction between the different shells",
+ )
+ outlier_map = File(
+ exists=True,
+ desc="All numbers are either 0, meaning that scan-slice "
+ "is not an outliers, or 1 meaning that it is.",
+ )
+ outlier_n_stdev_map = File(
+ exists=True,
+ desc="how many standard deviations off the mean difference "
+ "between observation and prediction is.",
+ )
+ outlier_n_sqr_stdev_map = File(
+ exists=True,
+ desc="how many standard deviations off the square root of the "
+ "mean squared difference between observation and prediction is.",
+ )
+ outlier_free_data = File(
+ exists=True,
+ desc=" the original data given by --imain not corrected for "
+ "susceptibility or EC-induced distortions or subject movement, but with "
+ "outlier slices replaced by the Gaussian Process predictions.",
+ )
+
+
+class ExtendedEddy(fsl.Eddy):
+ output_spec = ExtendedEddyOutputSpec
+ _num_threads = 1
+
+ def __init__(self, **inputs):
+ super(fsl.Eddy, self).__init__(**inputs)
+ self.inputs.on_trait_change(self._num_threads_update, "num_threads")
+ if not isdefined(self.inputs.num_threads):
+ self.inputs.num_threads = self._num_threads
+ else:
+ self._num_threads_update()
+ self.inputs.on_trait_change(self._use_cuda, "use_cuda")
+ if isdefined(self.inputs.use_cuda):
+ self._use_cuda()
+
+ def _list_outputs(self):
+ outputs = self.output_spec().get()
+ outputs["out_corrected"] = os.path.abspath("%s.nii.gz" % self.inputs.out_base)
+ outputs["out_parameter"] = os.path.abspath(
+ "%s.eddy_parameters" % self.inputs.out_base
+ )
+
+ # File generation might depend on the version of EDDY
+ out_rotated_bvecs = os.path.abspath(
+ "%s.eddy_rotated_bvecs" % self.inputs.out_base
+ )
+ out_movement_rms = os.path.abspath(
+ "%s.eddy_movement_rms" % self.inputs.out_base
+ )
+ out_restricted_movement_rms = os.path.abspath(
+ "%s.eddy_restricted_movement_rms" % self.inputs.out_base
+ )
+ out_shell_alignment_parameters = os.path.abspath(
+ "%s.eddy_post_eddy_shell_alignment_parameters" % self.inputs.out_base
+ )
+ shell_PE_translation_parameters = op.abspath(
+ "%s.eddy_post_eddy_shell_PE_translation_parameters" % self.inputs.out_base
+ )
+ out_outlier_report = os.path.abspath(
+ "%s.eddy_outlier_report" % self.inputs.out_base
+ )
+ outlier_map = op.abspath("%s.eddy_outlier_map" % self.inputs.out_base)
+ outlier_n_stdev_map = op.abspath(
+ "%s.eddy_outlier_n_stdev_map" % self.inputs.out_base
+ )
+ outlier_n_sqr_stdev_map = op.abspath(
+ "%s.eddy_outlier_n_sqr_stdev_map" % self.inputs.out_base
+ )
+
+ if isdefined(self.inputs.cnr_maps) and self.inputs.cnr_maps:
+ out_cnr_maps = os.path.abspath(
+ "%s.eddy_cnr_maps.nii.gz" % self.inputs.out_base
+ )
+ if os.path.exists(out_cnr_maps):
+ outputs["out_cnr_maps"] = out_cnr_maps
+ if isdefined(self.inputs.residuals) and self.inputs.residuals:
+ out_residuals = os.path.abspath(
+ "%s.eddy_residuals.nii.gz" % self.inputs.out_base
+ )
+ if os.path.exists(out_residuals):
+ outputs["out_residuals"] = out_residuals
+
+ if os.path.exists(out_rotated_bvecs):
+ outputs["out_rotated_bvecs"] = out_rotated_bvecs
+ if os.path.exists(out_movement_rms):
+ outputs["out_movement_rms"] = out_movement_rms
+ if os.path.exists(out_restricted_movement_rms):
+ outputs["out_restricted_movement_rms"] = out_restricted_movement_rms
+ if os.path.exists(out_shell_alignment_parameters):
+ outputs["out_shell_alignment_parameters"] = out_shell_alignment_parameters
+ if os.path.exists(out_outlier_report):
+ outputs["out_outlier_report"] = out_outlier_report
+
+ if op.exists(shell_PE_translation_parameters):
+ outputs["shell_PE_translation_parameters"] = shell_PE_translation_parameters
+ if op.exists(outlier_map):
+ outputs["outlier_map"] = outlier_map
+ if op.exists(outlier_n_stdev_map):
+ outputs["outlier_n_stdev_map"] = outlier_n_stdev_map
+ if op.exists(outlier_n_sqr_stdev_map):
+ outputs["outlier_n_sqr_stdev_map"] = outlier_n_sqr_stdev_map
+
+ return outputs
diff --git a/dmriprep/io.py b/dmriprep/io.py
index a154387..aa5e0c3 100644
--- a/dmriprep/io.py
+++ b/dmriprep/io.py
@@ -9,6 +9,8 @@
import bids
+# TODO: Update the below and replace with get_bids_layout. Will need to be able to fetch data from multiple runs for
+# separate AP/PA acquisitions and for discretized multishell acquisitions (e.g. HCP)
def get_bids_subject_input_files(subject_id, bids_input_directory):
"""
Function to return the needed files for dmriprep based on subject id and a bids directory.
@@ -51,14 +53,14 @@ def get_bids_subject_input_files(subject_id, bids_input_directory):
bvec_file = [d.path for d in dwi_files if d.filename.endswith('.bvec')]
assert len(bvec_file) == 1, 'found {} bvec files and we need just 1'.format(len(bvec_file))
- subjects_dir = op.join(bids_input_directory, 'derivatives', 'sub-'+subject_id)
+ subjects_dir = op.join(bids_input_directory, 'derivatives', 'sub-' + subject_id)
if not op.exists(op.join(subjects_dir, 'freesurfer')):
raise NotImplementedError('we have not yet implemented a version of dmriprep that runs freesurfer for you.'
'please run freesurfer and try again'
)
- outputs = dict(subject_id="sub-"+subject_id,
+ outputs = dict(subject_id="sub-" + subject_id,
dwi_file=dwi_file[0],
dwi_file_AP=ap_file[0].path,
dwi_file_PA=pa_file[0].path,
@@ -82,3 +84,56 @@ def get_bids_files(subject_id, bids_input_directory):
return [get_bids_subject_input_files(sub, bids_input_directory) for sub in subjects]
else:
return [get_bids_subject_input_files(subject_id, bids_input_directory)]
+
+
+def get_bids_layout(bdir, subj=None, sesh=None):
+ from dmriprep.utils import merge_dicts
+ import bids
+ layout = bids.BIDSLayout(bdir, validate=False)
+ # get all files matching the specific modality we are using
+ if not subj:
+ # list of all the subjects
+ subjs = layout.get_subjects()
+ else:
+ # make it a list so we can iterate
+ subjs = [subj]
+ assert subj in subjs, "subject {} is not in the bids folder".format(subj)
+ for sub in subjs:
+ if not sesh:
+ seshs = layout.get_sessions(subject=sub, derivatives=False)
+ # in case there are non-session level inputs
+ seshs += [None]
+ else:
+ # make a list so we can iterate
+ seshs = [sesh]
+ print('\n')
+ print("%s%s" % ('Subject:', sub))
+ print("%s%s" % ('Sessions:', seshs))
+ print('\n')
+ # all the combinations of sessions and tasks that are possible
+ sub_dict = dict()
+ for ses in seshs:
+ # the attributes for our modality img
+ mod_attributes = [sub, ses]
+ # the keys for our modality img
+ mod_keys = ['subject', 'session']
+ # our query we will use for each modality img
+ mod_query = {'modality': 'dwi'}
+
+ for attr, key in zip(mod_attributes, mod_keys):
+ if attr:
+ mod_query[key] = attr
+
+ dwi = layout.get(**merge_dicts(mod_query, {'extensions': 'nii.gz|nii'}))
+ bval = layout.get(**merge_dicts(mod_query, {'extensions': 'bval'}))
+ bvec = layout.get(**merge_dicts(mod_query, {'extensions': 'bvec'}))
+ jso = layout.get(**merge_dicts(mod_query, {'extensions': 'json'}))
+
+ sub_dict[ses] = {}
+ sub_dict[ses] = {}
+ sub_dict[ses]['dwi'] = dwi[0][0]
+ sub_dict[ses]['bval'] = bval[0][0]
+ sub_dict[ses]['bvec'] = bvec[0][0]
+ sub_dict[ses]['metadata'] = jso[0][0]
+
+ return sub_dict
diff --git a/dmriprep/qc.py b/dmriprep/qc.py
index b6663f2..8e8774b 100644
--- a/dmriprep/qc.py
+++ b/dmriprep/qc.py
@@ -1,32 +1,276 @@
import base64
-import os.path as op
-from io import BytesIO
import matplotlib
-matplotlib.use('agg')
+
+from io import BytesIO
+
+matplotlib.use("agg")
import matplotlib.pyplot as plt
import nibabel as nib
import numpy as np
from dipy.segment.mask import median_otsu
-from nipype.utils.filemanip import save_json, load_json
-def reorient_array(data, aff):
- # rearrange the matrix to RAS orientation
- orientation = nib.orientations.io_orientation(aff)
- data_RAS = nib.orientations.apply_orientation(data, orientation)
- # In RAS
- return nib.orientations.apply_orientation(
- data_RAS,
- nib.orientations.axcodes2ornt("IPL")
- )
+def normalize_xform(img):
+ """ Set identical, valid qform and sform matrices in an image
+ Selects the best available affine (sform > qform > shape-based), and
+ coerces it to be qform-compatible (no shears).
+ The resulting image represents this same affine as both qform and sform,
+ and is marked as NIFTI_XFORM_ALIGNED_ANAT, indicating that it is valid,
+ not aligned to template, and not necessarily preserving the original
+ coordinates.
+ If header would be unchanged, returns input image.
+ """
+ # Let nibabel convert from affine to quaternions, and recover xform
+ tmp_header = img.header.copy()
+ tmp_header.set_qform(img.affine)
+ xform = tmp_header.get_qform()
+ xform_code = 2
+
+ # Check desired codes
+ qform, qform_code = img.get_qform(coded=True)
+ sform, sform_code = img.get_sform(coded=True)
+ if all(
+ (
+ qform is not None and np.allclose(qform, xform),
+ sform is not None and np.allclose(sform, xform),
+ int(qform_code) == xform_code,
+ int(sform_code) == xform_code,
+ )
+ ):
+ return img
+
+ new_img = img.__class__(img.get_data(), xform, img.header)
+ # Unconditionally set sform/qform
+ new_img.set_sform(xform, xform_code)
+ new_img.set_qform(xform, xform_code)
+
+ return new_img
+
+
+def reorient_dwi(dwi_prep, bvecs, out_dir):
+ """
+ A function to reorient any dwi image and associated bvecs to RAS+.
+
+ Parameters
+ ----------
+ dwi_prep : str
+ File path to a dwi Nifti1Image.
+ bvecs : str
+ File path to corresponding bvecs file.
+ out_dir : str
+ Path to output directory.
+
+ Returns
+ -------
+ out_fname : str
+ File path to the reoriented dwi Nifti1Image.
+ out_bvec_fname : str
+ File path to corresponding reoriented bvecs file.
+ """
+ from dmriprep.qc import normalize_xform
+
+ fname = dwi_prep
+ bvec_fname = bvecs
+ out_bvec_fname = "%s%s" % (out_dir, "/bvecs_reor.bvec")
+
+ input_img = nib.load(fname)
+ input_axcodes = nib.aff2axcodes(input_img.affine)
+ reoriented = nib.as_closest_canonical(input_img)
+ normalized = normalize_xform(reoriented)
+ # Is the input image oriented how we want?
+ new_axcodes = ("R", "A", "S")
+ if normalized is not input_img:
+ out_fname = "%s%s%s%s" % (
+ out_dir,
+ "/",
+ dwi_prep.split("/")[-1].split(".nii.gz")[0],
+ "_reor_RAS.nii.gz",
+ )
+ print("%s%s%s" % ("Reorienting ", dwi_prep, " to RAS+..."))
+
+ # Flip the bvecs
+ input_orientation = nib.orientations.axcodes2ornt(input_axcodes)
+ desired_orientation = nib.orientations.axcodes2ornt(new_axcodes)
+ transform_orientation = nib.orientations.ornt_transform(
+ input_orientation, desired_orientation
+ )
+ bvec_array = np.loadtxt(bvec_fname)
+ if bvec_array.shape[0] != 3:
+ bvec_array = bvec_array.T
+ if not bvec_array.shape[0] == transform_orientation.shape[0]:
+ raise ValueError("Unrecognized bvec format")
+ output_array = np.zeros_like(bvec_array)
+ for this_axnum, (axnum, flip) in enumerate(transform_orientation):
+ output_array[this_axnum] = bvec_array[int(axnum)] * float(flip)
+ np.savetxt(out_bvec_fname, output_array, fmt="%.8f ")
+ else:
+ out_fname = "%s%s%s%s" % (
+ out_dir,
+ "/",
+ dwi_prep.split("/")[-1].split(".nii.gz")[0],
+ "_noreor_RAS.nii.gz",
+ )
+ out_bvec_fname = bvec_fname
+
+ normalized.to_filename(out_fname)
+
+ return out_fname, out_bvec_fname
+
+
+def reorient_img(img, out_dir):
+ """
+ A function to reorient any non-dwi image to RAS+.
+
+ Parameters
+ ----------
+ img : str
+ File path to a Nifti1Image.
+ out_dir : str
+ Path to output directory.
+
+ Returns
+ -------
+ out_name : str
+ File path to reoriented Nifti1Image.
+ """
+ from dmriprep.qc import normalize_xform
+
+ # Load image, orient as RAS
+ orig_img = nib.load(img)
+ reoriented = nib.as_closest_canonical(orig_img)
+ normalized = normalize_xform(reoriented)
+
+ # Image may be reoriented
+ if normalized is not orig_img:
+ print("%s%s%s" % ("Reorienting ", img, " to RAS+..."))
+ out_name = "%s%s%s%s" % (
+ out_dir,
+ "/",
+ img.split("/")[-1].split(".nii.gz")[0],
+ "_reor_RAS.nii.gz",
+ )
+ else:
+ out_name = "%s%s%s%s" % (
+ out_dir,
+ "/",
+ img.split("/")[-1].split(".nii.gz")[0],
+ "_noreor_RAS.nii.gz",
+ )
+
+ normalized.to_filename(out_name)
+
+ return out_name
+
+
+def match_target_vox_res(img_file, vox_size, out_dir):
+ """
+ A function to resample an image to a given isotropic voxel resolution.
+
+ Parameters
+ ----------
+ img_file : str
+ File path to a Nifti1Image.
+ vox_size : str
+ Voxel size in mm. (e.g. 2mm).
+ out_dir : str
+ Path to output directory.
+
+ Returns
+ -------
+ img_file : str
+ File path to resampled Nifti1Image.
+ """
+ import os
+ from dipy.align.reslice import reslice
+
+ # Check dimensions
+ img = nib.load(img_file)
+ data = img.get_fdata()
+ affine = img.affine
+ hdr = img.header
+ zooms = hdr.get_zooms()[:3]
+ if vox_size == "1mm":
+ new_zooms = (1.0, 1.0, 1.0)
+ elif vox_size == "2mm":
+ new_zooms = (2.0, 2.0, 2.0)
+
+ if (abs(zooms[0]), abs(zooms[1]), abs(zooms[2])) != new_zooms:
+ print("Reslicing image " + img_file + " to " + vox_size + "...")
+ img_file_res = "%s%s%s%s%s%s" % (
+ out_dir,
+ "/",
+ os.path.basename(img_file).split(".nii.gz")[0],
+ "_res",
+ vox_size,
+ ".nii.gz",
+ )
+ data2, affine2 = reslice(data, affine, zooms, new_zooms)
+ img2 = nib.Nifti1Image(data2, affine=affine2)
+ nib.save(img2, img_file_res)
+ img_file = img_file_res
+ else:
+ img_file_nores = "%s%s%s%s%s%s" % (
+ out_dir,
+ "/",
+ os.path.basename(img_file).split(".nii.gz")[0],
+ "_nores",
+ vox_size,
+ ".nii.gz",
+ )
+ nib.save(img, img_file_nores)
+ img_file = img_file_nores
+
+ return img_file
+
+
+def check_orient_and_dims(infile, vox_size, bvecs, outdir, overwrite=True):
+ """
+ An API to reorient any image to RAS+ and resample any image to a given voxel resolution.
+
+ Parameters
+ ----------
+ infile : str
+ File path to a dwi Nifti1Image.
+ vox_size : str
+ Voxel size in mm. (e.g. 2mm).
+ bvecs : str
+ File path to corresponding bvecs file if infile is a dwi.
+ outdir : str
+ Path to output directory.
+ overwrite : bool
+ Boolean indicating whether to overwrite existing outputs. Default is True.
+
+ Returns
+ -------
+ outfile : str
+ File path to the reoriented and/or resample Nifti1Image.
+ bvecs : str
+ File path to corresponding reoriented bvecs file if outfile is a dwi.
+ """
+ import warnings
+
+ warnings.filterwarnings("ignore")
+ from dmriprep.qc import reorient_dwi, match_target_vox_res
+
+ # Check orientation
+ # dwi case
+ # Check orientation
+ if ("RAS" not in infile) or (overwrite is True):
+ [infile, bvecs] = reorient_dwi(infile, bvecs, outdir)
+ # Check dimensions
+ if ("reor" not in infile) or (overwrite is True):
+ outfile = match_target_vox_res(infile, vox_size, outdir)
+ print(outfile)
+
+ return outfile, bvecs
def mplfig(data, outfile=None, as_bytes=False):
fig = plt.figure(frameon=False, dpi=data.shape[0])
- fig.set_size_inches(float(data.shape[1])/data.shape[0], 1)
- ax = plt.Axes(fig, [0., 0., 1., 1.])
+ fig.set_size_inches(float(data.shape[1]) / data.shape[0], 1)
+ ax = plt.Axes(fig, [0.0, 0.0, 1.0, 1.0])
ax.set_axis_off()
fig.add_axes(ax)
ax.imshow(data, aspect=1, cmap=plt.cm.Greys_r) # previous aspect="normal"
@@ -36,7 +280,7 @@ def mplfig(data, outfile=None, as_bytes=False):
return outfile
if as_bytes:
IObytes = BytesIO()
- plt.savefig(IObytes, format='png', dpi=data.shape[0], transparent=True)
+ plt.savefig(IObytes, format="png", dpi=data.shape[0], transparent=True)
IObytes.seek(0)
base64_jpgData = base64.b64encode(IObytes.read())
return base64_jpgData.decode("ascii")
@@ -44,8 +288,8 @@ def mplfig(data, outfile=None, as_bytes=False):
def mplfigcontour(data, outfile=None, as_bytes=False):
fig = plt.figure(frameon=False)
- fig.set_size_inches(float(data.shape[1])/data.shape[0], 1)
- ax = plt.Axes(fig, [0., 0., 1., 1.])
+ fig.set_size_inches(float(data.shape[1]) / data.shape[0], 1)
+ ax = plt.Axes(fig, [0.0, 0.0, 1.0, 1.0])
ax.set_axis_off()
fig.add_axes(ax)
@@ -59,45 +303,49 @@ def mplfigcontour(data, outfile=None, as_bytes=False):
return outfile
if as_bytes:
IObytes = BytesIO()
- plt.savefig(IObytes, format='png', dpi=data.shape[0], transparent=True)
+ plt.savefig(IObytes, format="png", dpi=data.shape[0], transparent=True)
IObytes.seek(0)
base64_jpgData = base64.b64encode(IObytes.read())
return base64_jpgData.decode("ascii")
-def load_and_reorient(filename):
- img = nib.load(filename)
- data, aff = img.get_data(), img.affine
- data = reorient_array(data, aff)
- return data
-
-
def reshape3D(data, n=256):
- return np.pad(data, (
+ return np.pad(
+ data,
(
- (n-data.shape[0]) // 2,
- ((n-data.shape[0]) + (data.shape[0] % 2 > 0)) // 2
+ (
+ (n - data.shape[0]) // 2,
+ ((n - data.shape[0]) + (data.shape[0] % 2 > 0)) // 2,
+ ),
+ (
+ (n - data.shape[1]) // 2,
+ ((n - data.shape[1]) + (data.shape[1] % 2 > 0)) // 2,
+ ),
+ (0, 0),
),
- (
- (n-data.shape[1]) // 2,
- ((n-data.shape[1]) + (data.shape[1] % 2 > 0)) // 2
- ),
- (0, 0)
- ), "constant", constant_values=(0, 0))
+ "constant",
+ constant_values=(0, 0),
+ )
def reshape4D(data, n=256):
- return np.pad(data, (
+ return np.pad(
+ data,
(
- (n-data.shape[0]) // 2,
- ((n-data.shape[0]) + (data.shape[0] % 2 > 0)) // 2
+ (
+ (n - data.shape[0]) // 2,
+ ((n - data.shape[0]) + (data.shape[0] % 2 > 0)) // 2,
+ ),
+ (
+ (n - data.shape[1]) // 2,
+ ((n - data.shape[1]) + (data.shape[1] % 2 > 0)) // 2,
+ ),
+ (0, 0),
+ (0, 0),
),
- (
- (n-data.shape[1]) // 2,
- ((n-data.shape[1]) + (data.shape[1] % 2 > 0)) // 2
- ),
- (0, 0), (0, 0)
- ), "constant", constant_values=(0, 0))
+ "constant",
+ constant_values=(0, 0),
+ )
def get_middle_slices(data, slice_direction):
@@ -117,7 +365,7 @@ def get_middle_slices(data, slice_direction):
def nearest_square(limit):
answer = 0
- while (answer+1)**2 < limit:
+ while (answer + 1) ** 2 < limit:
answer += 1
if (answer ** 2) == limit:
return answer
@@ -128,17 +376,17 @@ def nearest_square(limit):
def create_sprite_from_tiles(tile, out_file=None, as_bytes=False):
num_slices = tile.shape[-1]
N = nearest_square(num_slices)
- M = int(np.ceil(num_slices/N))
+ M = int(np.ceil(num_slices / N))
# tile is square, so just make a big arr
pix = tile.shape[0]
if len(tile.shape) == 3:
- mosaic = np.zeros((N*tile.shape[0], M*tile.shape[0]))
+ mosaic = np.zeros((N * tile.shape[0], M * tile.shape[0]))
else:
- mosaic = np.zeros((N*tile.shape[0], M*tile.shape[0], tile.shape[-2]))
+ mosaic = np.zeros((N * tile.shape[0], M * tile.shape[0], tile.shape[-2]))
mosaic[:] = np.nan
- helper = np.arange(N*M).reshape((N, M))
+ helper = np.arange(N * M).reshape((N, M))
for t in range(num_slices):
x, y = np.nonzero(helper == t)
@@ -163,7 +411,6 @@ def create_sprite_from_tiles(tile, out_file=None, as_bytes=False):
def createSprite4D(dwi_file):
-
# initialize output dict
output = []
@@ -171,13 +418,13 @@ def createSprite4D(dwi_file):
dwi = load_and_reorient(dwi_file)[:, :, :, 1:]
# create tiles from center slice on each orientation
- for orient in ['sag', 'ax', 'cor']:
+ for orient in ["sag", "ax", "cor"]:
tile = get_middle_slices(dwi, orient)
# create sprite images for each
results = create_sprite_from_tiles(tile, as_bytes=True)
- results['img_type'] = '4dsprite'
- results['orientation'] = orient
+ results["img_type"] = "4dsprite"
+ results["orientation"] = orient
output.append(results)
return output
@@ -194,45 +441,45 @@ def createB0_ColorFA_Mask_Sprites(b0_file, colorFA_file, mask_file):
b0 = reshape3D(b0, N)
_, mask = median_otsu(b0)
outb0 = create_sprite_from_tiles(b0, as_bytes=True)
- outb0['img_type'] = 'brainsprite'
+ outb0["img_type"] = "brainsprite"
# make a colorFA sprite, masked by b0
Q = reshape4D(colorfa, N)
Q[np.logical_not(mask)] = np.nan
- Q = np.moveaxis(Q, -2, -1)
+ Q = np.moveaxis(Q, -2, -1)
outcolorFA = create_sprite_from_tiles(Q, as_bytes=True)
- outcolorFA['img_type'] = 'brainsprite'
+ outcolorFA["img_type"] = "brainsprite"
# make an anat mask contour sprite
outmask = create_sprite_from_tiles(reshape3D(anat_mask, N))
img = mplfigcontour(outmask.pop("mosaic"), as_bytes=True)
- outmask['img'] = img
+ outmask["img"] = img
return outb0, outcolorFA, outmask
-def create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
- color_fa_file, anat_mask_file,
- outlier_indices,
- eddy_qc_file,
- outpath=op.abspath('./report.json')):
-
- report = {}
- report['dwi_corrected'] = createSprite4D(dwi_corrected_file)
-
- b0, colorFA, mask = createB0_ColorFA_Mask_Sprites(dwi_corrected_file,
- color_fa_file,
- anat_mask_file)
- report['b0'] = b0
- report['colorFA'] = colorFA
- report['anat_mask'] = mask
- report['outlier_volumes'] = outlier_indices.tolist()
-
- with open(eddy_report, 'r') as f:
- report['eddy_report'] = f.readlines()
-
- report['eddy_params'] = np.genfromtxt(eddy_rms).tolist()
- eddy_qc = load_json(eddy_qc_file)
- report['eddy_quad'] = eddy_qc
- save_json(outpath, report)
- return outpath
+# def create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
+# color_fa_file, anat_mask_file,
+# outlier_indices,
+# eddy_qc_file,
+# outpath=op.abspath('./report.json')):
+#
+# report = {}
+# report['dwi_corrected'] = createSprite4D(dwi_corrected_file)
+#
+# b0, colorFA, mask = createB0_ColorFA_Mask_Sprites(dwi_corrected_file,
+# color_fa_file,
+# anat_mask_file)
+# report['b0'] = b0
+# report['colorFA'] = colorFA
+# report['anat_mask'] = mask
+# report['outlier_volumes'] = outlier_indices.tolist()
+#
+# with open(eddy_report, 'r') as f:
+# report['eddy_report'] = f.readlines()
+#
+# report['eddy_params'] = np.genfromtxt(eddy_rms).tolist()
+# eddy_qc = load_json(eddy_qc_file)
+# report['eddy_quad'] = eddy_qc
+# save_json(outpath, report)
+# return outpath
diff --git a/dmriprep/run.py b/dmriprep/run.py
index 9ac70d8..6a3c180 100644
--- a/dmriprep/run.py
+++ b/dmriprep/run.py
@@ -1,855 +1,422 @@
import os
-import os.path as op
-from shutil import copyfile
-def run_dmriprep(dwi_file, bvec_file, bval_file,
- subjects_dir, working_dir, out_dir):
+def init_single_subject_wf(
+ sub,
+ ses,
+ dwi,
+ fbval,
+ fbvec,
+ metadata,
+ out_dir,
+ strategy="mppca",
+ vox_size="2mm",
+ plugin_type="MultiProc",
+ outlier_thresh=0.10,
+ verbose=False
+):
+ import json
+ from pathlib import Path
+ from nipype.pipeline import engine as pe
+ from nipype.interfaces import utility as niu
+ from nipype.interfaces import fsl
+ from nipype.algorithms.rapidart import ArtifactDetect
+ from dmriprep import interfaces, core, qc
+
+ import_list = [
+ "import sys",
+ "import os",
+ "import numpy as np",
+ "import nibabel as nib",
+ "import warnings",
+ 'warnings.filterwarnings("ignore")',
+ ]
- """
- Runs dmriprep for acquisitions with just one PE direction.
+ subdir = "%s%s%s" % (out_dir, "/", sub)
+ if not os.path.isdir(subdir):
+ os.mkdir(subdir)
+ sesdir = "%s%s%s%s%s" % (out_dir, "/", sub, "/ses-", ses)
+ if not os.path.isdir(sesdir):
+ os.mkdir(sesdir)
+
+ eddy_cfg_file = "%s%s" % (str(Path(__file__).parent), "/eddy_params.json")
+ topup_config_odd = "%s%s" % (str(Path(__file__).parent), "/b02b0_1.cnf")
+
+ # Create dictionary of eddy args
+ with open(eddy_cfg_file, "r") as f:
+ eddy_args = json.load(f)
+
+ wf = pe.Workflow(name="single_subject_dmri")
+ wf.base_dir = sesdir
+ inputnode = pe.Node(
+ niu.IdentityInterface(
+ fields=[
+ "dwi",
+ "fbvec",
+ "fbval",
+ "metadata",
+ "sub",
+ "ses",
+ "sesdir",
+ "strategy",
+ "vox_size",
+ "outlier_thresh",
+ "eddy_cfg_file",
+ "topup_config_odd",
+ ]
+ ),
+ name="inputnode",
+ )
+ inputnode.inputs.dwi = dwi
+ inputnode.inputs.fbvec = fbvec
+ inputnode.inputs.fbval = fbval
+ inputnode.inputs.metadata = metadata
+ inputnode.inputs.sub = sub
+ inputnode.inputs.ses = ses
+ inputnode.inputs.sesdir = sesdir
+ inputnode.inputs.strategy = strategy
+ inputnode.inputs.vox_size = vox_size
+ inputnode.inputs.outlier_thresh = outlier_thresh
+ inputnode.inputs.eddy_cfg_file = eddy_cfg_file
+ inputnode.inputs.topup_config_odd = topup_config_odd
+
+ check_orient_and_dims_dwi_node = pe.Node(
+ niu.Function(
+ input_names=["infile", "vox_size", "bvecs", "outdir"],
+ output_names=["outfile", "bvecs"],
+ function=qc.check_orient_and_dims,
+ imports=import_list,
+ ),
+ name="check_orient_and_dims_dwi_node",
+ )
- """
- from glob import glob
- import nibabel as nib
- import nipype.interfaces.freesurfer as fs
- import nipype.interfaces.fsl as fsl
- import nipype.interfaces.io as nio
- import nipype.interfaces.utility as niu
- import nipype.pipeline.engine as pe
- import numpy as np
- from nipype.algorithms.rapidart import ArtifactDetect
- from nipype.interfaces.dipy import DTI
- from nipype.interfaces.fsl.utils import AvScale
- from nipype.utils.filemanip import fname_presuffix
- from nipype.workflows.dmri.fsl.epi import create_dmri_preprocessing
-
- wf = create_dmri_preprocessing(name='dmriprep',
- use_fieldmap=False,
- fieldmap_registration=False)
- wf.inputs.inputnode.ref_num = 0
- wf.inputs.inputnode.in_file = dwi_file
- wf.inputs.inputnode.in_bvec = bvec_file
-
- dwi_fname = op.split(dwi_file)[1].split(".nii.gz")[0]
- bids_sub_name = dwi_fname.split("_")[0]
- assert bids_sub_name.startswith("sub-")
-
- # inputnode = wf.get_node("inputnode")
- outputspec = wf.get_node("outputnode")
-
- # QC: FLIRT translation and rotation parameters
- flirt = wf.get_node("motion_correct.coregistration")
- # flirt.inputs.save_mats = True
-
- get_tensor = pe.Node(DTI(), name="dipy_tensor")
- wf.connect(outputspec, "dmri_corrected", get_tensor, "in_file")
- # wf.connect(inputspec2,"bvals", get_tensor, "in_bval")
- get_tensor.inputs.in_bval = bval_file
- wf.connect(outputspec, "bvec_rotated", get_tensor, "in_bvec")
-
- scale_tensor = pe.Node(name='scale_tensor', interface=fsl.BinaryMaths())
- scale_tensor.inputs.operation = 'mul'
- scale_tensor.inputs.operand_value = 1000
- wf.connect(get_tensor, 'out_file', scale_tensor, 'in_file')
-
- fslroi = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name="fslroi")
- wf.connect(outputspec, "dmri_corrected", fslroi, "in_file")
-
- bbreg = pe.Node(fs.BBRegister(contrast_type="t2", init="fsl",
- out_fsl_file=True, subjects_dir=subjects_dir,
- epi_mask=True), name="bbreg")
- # wf.connect(inputspec2,"fsid", bbreg,"subject_id")
- bbreg.inputs.subject_id = 'freesurfer' # bids_sub_name
- wf.connect(fslroi, "roi_file", bbreg, "source_file")
-
- voltransform = pe.Node(fs.ApplyVolTransform(inverse=True),
- iterfield=['source_file', 'reg_file'],
- name='transform')
- voltransform.inputs.subjects_dir = subjects_dir
-
- vt2 = voltransform.clone("transform_aparcaseg")
- vt2.inputs.interp = "nearest"
-
- def binarize_aparc(aparc_aseg):
- img = nib.load(aparc_aseg)
- data, aff = img.get_data(), img.affine
- outfile = fname_presuffix(
- aparc_aseg, suffix="bin.nii.gz",
- newpath=op.abspath("."), use_ext=False
- )
- nib.Nifti1Image((data > 0).astype(float), aff).to_filename(outfile)
- return outfile
-
- # wf.connect(inputspec2, "mask_nii", voltransform, "target_file")
- create_mask = pe.Node(niu.Function(input_names=["aparc_aseg"],
- output_names=["outfile"],
- function=binarize_aparc),
- name="bin_aparc")
-
- def get_aparc_aseg(subjects_dir, sub):
- return op.join(subjects_dir, sub, "mri", "aparc+aseg.mgz")
-
- create_mask.inputs.aparc_aseg = get_aparc_aseg(subjects_dir, 'freesurfer')
- wf.connect(create_mask, "outfile", voltransform, "target_file")
-
- wf.connect(fslroi, "roi_file", voltransform, "source_file")
- wf.connect(bbreg, "out_reg_file", voltransform, "reg_file")
-
- vt2.inputs.target_file = get_aparc_aseg(subjects_dir, 'freesurfer')
- # wf.connect(inputspec2, "aparc_aseg", vt2, "target_file")
- wf.connect(fslroi, "roi_file", vt2, "source_file")
- wf.connect(bbreg, "out_reg_file", vt2, "reg_file")
-
- # AK (2017): THIS NODE MIGHT NOT BE NECESSARY
- # AK (2018) doesn't know why she said that above..
- threshold2 = pe.Node(fs.Binarize(min=0.5, out_type='nii.gz', dilate=1),
- iterfield=['in_file'],
- name='threshold2')
- wf.connect(voltransform, "transformed_file", threshold2, "in_file")
-
- # wf.connect(getmotion, "motion_params", datasink, "dti.@motparams")
-
- def get_flirt_motion_parameters(flirt_out_mats):
- def get_params(A):
- """This is a copy of spm's spm_imatrix where
- we already know the rotations and translations matrix,
- shears and zooms (as outputs from fsl FLIRT/avscale)
- Let A = the 4x4 rotation and translation matrix
- R = [ c5*c6, c5*s6, s5]
- [-s4*s5*c6-c4*s6, -s4*s5*s6+c4*c6, s4*c5]
- [-c4*s5*c6+s4*s6, -c4*s5*s6-s4*c6, c4*c5]
- """
- def rang(b):
- a = min(max(b, -1), 1)
- return a
- Ry = np.arcsin(A[0, 2])
- # Rx = np.arcsin(A[1, 2] / np.cos(Ry))
- # Rz = np.arccos(A[0, 1] / np.sin(Ry))
-
- if (abs(Ry)-np.pi/2)**2 < 1e-9:
- Rx = 0
- Rz = np.arctan2(-rang(A[1, 0]), rang(-A[2, 0]/A[0, 2]))
- else:
- c = np.cos(Ry)
- Rx = np.arctan2(rang(A[1, 2]/c), rang(A[2, 2]/c))
- Rz = np.arctan2(rang(A[0, 1]/c), rang(A[0, 0]/c))
-
- rotations = [Rx, Ry, Rz]
- translations = [A[0, 3], A[1, 3], A[2, 3]]
-
- return rotations, translations
-
- motion_params = open(op.abspath('motion_parameters.par'), 'w')
- for mat in flirt_out_mats:
- res = AvScale(mat_file=mat).run()
- A = np.asarray(res.outputs.rotation_translation_matrix)
- rotations, translations = get_params(A)
- for i in rotations+translations:
- motion_params.write('%f ' % i)
- motion_params.write('\n')
- motion_params.close()
- motion_params = op.abspath('motion_parameters.par')
- return motion_params
-
- getmotion = pe.Node(
- niu.Function(input_names=["flirt_out_mats"],
- output_names=["motion_params"],
- function=get_flirt_motion_parameters),
- name="get_motion_parameters",
- iterfield="flirt_out_mats"
+ # Make gtab and get b0 indices
+ correct_vecs_and_make_b0s_node = pe.Node(
+ niu.Function(
+ input_names=["fbval", "fbvec", "dwi_file", "sesdir"],
+ output_names=["firstb0", "firstb0_file", "gtab_file", "b0_vols", "b0s"],
+ function=core.correct_vecs_and_make_b0s,
+ imports=import_list,
+ ),
+ name="correct_vecs_and_make_b0s",
)
- wf.connect(flirt, "out_matrix_file", getmotion, "flirt_out_mats")
-
- art = pe.Node(interface=ArtifactDetect(), name="art")
- art.inputs.use_differences = [True, True]
- art.inputs.save_plot = False
- art.inputs.use_norm = True
- art.inputs.norm_threshold = 3
- art.inputs.zintensity_threshold = 9
- art.inputs.mask_type = 'spm_global'
- art.inputs.parameter_source = 'FSL'
-
- wf.connect(getmotion, "motion_params", art, "realignment_parameters")
- wf.connect(outputspec, "dmri_corrected", art, "realigned_files")
-
- datasink = pe.Node(nio.DataSink(), name="sinker")
- datasink.inputs.base_directory = out_dir
- datasink.inputs.substitutions = [
- ("vol0000_flirt_merged.nii.gz", dwi_fname + '.nii.gz'),
- ("stats.vol0000_flirt_merged.txt", dwi_fname + ".art.json"),
- ("motion_parameters.par", dwi_fname + ".motion.txt"),
- ("_rotated.bvec", ".bvec"),
- ("aparc+aseg_warped_out", dwi_fname.replace("_dwi", "_aparc+aseg")),
- ("art.vol0000_flirt_merged_outliers.txt", dwi_fname + ".outliers.txt"),
- ("vol0000_flirt_merged", dwi_fname),
- ("_roi_bbreg_freesurfer", "_register"),
- ("aparc+asegbin_warped_thresh", dwi_fname.replace("_dwi", "_mask")),
- ("derivatives/dmriprep", "derivatives/{}/dmriprep".format(bids_sub_name))
- ]
+ btr_node_premoco = pe.Node(fsl.BET(), name="bet_pre_moco")
+ btr_node_premoco.inputs.mask = True
+ btr_node_premoco.inputs.frac = 0.2
- wf.connect(art, "statistic_files", datasink, "dmriprep.art.@artstat")
- wf.connect(art, "outlier_files", datasink, "dmriprep.art.@artoutlier")
- wf.connect(outputspec, "dmri_corrected", datasink, "dmriprep.dwi.@corrected")
- wf.connect(outputspec, "bvec_rotated", datasink, "dmriprep.dwi.@rotated")
- wf.connect(getmotion, "motion_params", datasink, "dmriprep.art.@motion")
-
- wf.connect(get_tensor, "out_file", datasink, "dmriprep.dti.@tensor")
- wf.connect(get_tensor, "fa_file", datasink, "dmriprep.dti.@fa")
- wf.connect(get_tensor, "md_file", datasink, "dmriprep.dti.@md")
- wf.connect(get_tensor, "ad_file", datasink, "dmriprep.dti.@ad")
- wf.connect(get_tensor, "rd_file", datasink, "dmriprep.dti.@rd")
- wf.connect(get_tensor, "out_file", datasink, "dmriprep.dti.@scaled_tensor")
-
- wf.connect(bbreg, "min_cost_file", datasink, "dmriprep.reg.@mincost")
- wf.connect(bbreg, "out_fsl_file", datasink, "dmriprep.reg.@fslfile")
- wf.connect(bbreg, "out_reg_file", datasink, "dmriprep.reg.@reg")
- wf.connect(threshold2, "binary_file", datasink, "dmriprep.anat.@mask")
- # wf.connect(vt2, "transformed_file", datasink, "dwi.@aparc_aseg")
-
- convert = pe.Node(fs.MRIConvert(out_type="niigz"), name="convert2nii")
- wf.connect(vt2, "transformed_file", convert, "in_file")
- wf.connect(convert, "out_file", datasink, "dmriprep.anat.@aparc_aseg")
-
- wf.base_dir = working_dir
- wf.run()
-
- copyfile(bval_file, op.join(
- out_dir, bids_sub_name, "dmriprep", "dwi",
- op.split(bval_file)[1]
- ))
-
- dmri_corrected = glob(op.join(out_dir, '*/dmriprep/dwi', '*.nii.gz'))[0]
- bvec_rotated = glob(op.join(out_dir, '*/dmriprep/dwi', '*.bvec'))[0]
- bval_file = glob(op.join(out_dir, '*/dmriprep/dwi', '*.bval'))[0]
- art_file = glob(op.join(out_dir, '*/dmriprep/art', '*.art.json'))[0]
- motion_file = glob(op.join(out_dir, '*/dmriprep/art', '*.motion.txt'))[0]
- outlier_file = glob(op.join(out_dir, '*/dmriprep/art', '*.outliers.txt'))[0]
- return dmri_corrected, bvec_rotated, art_file, motion_file, outlier_file
-
-
-def run_dmriprep_pe(subject_id, dwi_file, dwi_file_AP, dwi_file_PA,
- bvec_file, bval_file,
- subjects_dir, working_dir, out_dir,
- eddy_niter=5, slice_outlier_threshold=0.02):
- """Run the dmriprep (phase encoded) nipype workflow
-
- Parameters
- ----------
- subject_id : str
- Subject identifier
-
- dwi_file : str
- Path to dwi nifti file
-
- dwi_file_AP : str
- Path to EPI nifti file (anterior-posterior)
-
- dwi_file_PA : str
- Path to EPI nifti file (posterior-anterior)
-
- bvec_file : str
- Path to bvec file
-
- bval_file : str
- Path to bval file
-
- subjects_dir : str
- Path to subject's freesurfer directory
-
- working_dir : str
- Path to workflow working directory
-
- out_dir : str
- Path to output directory
-
- eddy_niter : int, default=5
- Fixed number of eddy iterations. See
- https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/eddy/UsersGuide#A--niter
-
- slice_outlier_threshold: int or float
- Number of allowed outlier slices per volume. If this is exceeded the
- volume is dropped from analysis. If `slice_outlier_threshold` is an
- int, it is treated as number of allowed outlier slices. If
- `slice_outlier_threshold` is a float between 0 and 1 (exclusive), it is
- treated the fraction of allowed outlier slices.
-
- Notes
- -----
- This assumes that there are scans with phase-encode directions AP/PA for
- topup.
-
- See Also
- --------
- dmriprep.run.get_dmriprep_pe_workflow
- """
- wf = get_dmriprep_pe_workflow()
- wf.base_dir = op.join(op.abspath(working_dir), subject_id)
-
- inputspec = wf.get_node('inputspec')
- inputspec.inputs.subject_id = subject_id
- inputspec.inputs.dwi_file = dwi_file
- inputspec.inputs.dwi_file_ap = dwi_file_AP
- inputspec.inputs.dwi_file_pa = dwi_file_PA
- inputspec.inputs.bvec_file = bvec_file
- inputspec.inputs.bval_file = bval_file
- inputspec.inputs.subjects_dir = subjects_dir
- inputspec.inputs.out_dir = op.abspath(out_dir)
- inputspec.inputs.eddy_niter = eddy_niter
- inputspec.inputs.slice_outlier_threshold = slice_outlier_threshold
-
- # write the graph (this is saved to the working dir)
- wf.write_graph()
- wf.config['execution']['remove_unnecessary_outputs'] = False
- wf.config['execution']['keep_inputs'] = True
- wf.run()
-
-
-def get_dmriprep_pe_workflow():
- """Return the dmriprep (phase encoded) nipype workflow
-
- Parameters
- ----------
-
-
- Returns
- -------
- wf : nipype.pipeline.engine.Workflow
- Nipype dmriprep workflow
-
- Notes
- -----
- This assumes that there are scans with phase-encode directions AP/PA for
- topup.
- """
- import nipype.interfaces.freesurfer as fs
- import nipype.interfaces.fsl as fsl
- import nipype.interfaces.io as nio
- import nipype.interfaces.utility as niu
- import nipype.pipeline.engine as pe
- from nipype.interfaces.dipy import DTI
- from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline
-
- inputspec = pe.Node(niu.IdentityInterface(fields=[
- 'subject_id',
- 'dwi_file',
- 'dwi_file_ap',
- 'dwi_file_pa',
- 'bvec_file',
- 'bval_file',
- 'subjects_dir',
- 'out_dir',
- 'eddy_niter',
- 'slice_outlier_threshold'
- ]), name="inputspec")
-
- # AK: watch out, other datasets might be encoded LR
- epi_ap = {'echospacing': 66.5e-3, 'enc_dir': 'y-'}
- epi_pa = {'echospacing': 66.5e-3, 'enc_dir': 'y'}
- prep = all_fsl_pipeline(epi_params=epi_ap, altepi_params=epi_pa)
-
- # initialize an overall workflow
- wf = pe.Workflow(name="dmriprep")
-
- wf.connect(inputspec, 'dwi_file', prep, 'inputnode.in_file')
- wf.connect(inputspec, 'bvec_file', prep, 'inputnode.in_bvec')
- wf.connect(inputspec, 'bval_file', prep, 'inputnode.in_bval')
- wf.connect(inputspec, 'eddy_niter', prep, 'fsl_eddy.niter')
-
- eddy = prep.get_node('fsl_eddy')
- eddy.inputs.repol = True
- eddy.inputs.cnr_maps = True
- eddy.inputs.residuals = True
- import multiprocessing
- eddy.inputs.num_threads = multiprocessing.cpu_count()
- import numba.cuda
- try:
- if numba.cuda.gpus:
- eddy.inputs.use_cuda = True
- except:
- eddy.inputs.use_cuda = False
-
- topup = prep.get_node('peb_correction.topup')
- topup.inputs.checksize = True
-
- applytopup = prep.get_node('peb_correction.unwarp')
- applytopup.inputs.checksize = True
-
- eddy.inputs.checksize = True
-
- eddy_quad = pe.Node(fsl.EddyQuad(verbose=True, checksize=True), name="eddy_quad")
- get_path = lambda x: x.split('.nii.gz')[0].split('_fix')[0]
- get_qc_path = lambda x: x.split('.nii.gz')[0] + '.qc'
- wf.connect(prep, ('fsl_eddy.out_corrected', get_path), eddy_quad, 'base_name')
- wf.connect(inputspec, 'bval_file', eddy_quad, 'bval_file')
- wf.connect(prep, 'Rotate_Bvec.out_file', eddy_quad, 'bvec_file')
- wf.connect(prep, 'peb_correction.topup.out_field', eddy_quad, 'field')
- wf.connect(prep, 'gen_index.out_file', eddy_quad, 'idx_file')
- wf.connect(prep, 'peb_correction.topup.out_enc_file', eddy_quad, 'param_file')
- wf.connect(prep, ('fsl_eddy.out_corrected', get_qc_path), eddy_quad, 'output_dir')
-
- # need a mask file for eddy_quad. lets get it from the B0.
- def get_b0_mask_fn(b0_file):
- import nibabel as nib
- from nipype.utils.filemanip import fname_presuffix
- from dipy.segment.mask import median_otsu
- import os
-
- mask_file = fname_presuffix(b0_file, suffix="_mask", newpath=os.path.abspath('.'))
- img = nib.load(b0_file)
- data, aff = img.get_data(), img.affine
- _, mask = median_otsu(data, 2, 1)
- nib.Nifti1Image(mask.astype(float), aff).to_filename(mask_file)
- return mask_file
-
-
- def id_outliers_fn(outlier_report, threshold, dwi_file):
- """Get list of scans that exceed threshold for number of outliers
-
- Parameters
- ----------
- outlier_report: string
- Path to the fsl_eddy outlier report
-
- threshold: int or float
- If threshold is an int, it is treated as number of allowed outlier
- slices. If threshold is a float between 0 and 1 (exclusive), it is
- treated the fraction of allowed outlier slices before we drop the
- whole volume.
-
- dwi_file: string
- Path to nii dwi file to determine total number of slices
-
- Returns
- -------
- drop_scans: numpy.ndarray
- List of scan indices to drop
- """
- import nibabel as nib
- import numpy as np
- import os.path as op
- import parse
-
- with open(op.abspath(outlier_report), 'r') as fp:
- lines = fp.readlines()
-
- p = parse.compile(
- "Slice {slice:d} in scan {scan:d} is an outlier with "
- "mean {mean_sd:f} standard deviations off, and mean "
- "squared {mean_sq_sd:f} standard deviations off."
- )
-
- outliers = [p.parse(l).named for l in lines]
- scans = {d['scan'] for d in outliers}
-
- def num_outliers(scan, outliers):
- return len([d for d in outliers if d['scan'] == scan])
-
- if 0 < threshold < 1:
- img = nib.load(dwi_file)
- try:
- threshold *= img.header.get_n_slices()
- except nib.spatialimages.HeaderDataError:
- print('WARNING. We are not sure which dimension has the '
- 'slices in this image. So we are using the 3rd dim.', img.shape)
- threshold *= img.shape[2]
-
- drop_scans = np.array([
- s for s in scans
- if num_outliers(s, outliers) > threshold
- ])
-
- outpath = op.abspath("dropped_scans.txt")
- np.savetxt(outpath, drop_scans, fmt="%d")
-
- return drop_scans, outpath
-
- id_outliers_node = pe.Node(niu.Function(
- input_names=["outlier_report", "threshold", "dwi_file"],
- output_names=["drop_scans", "outpath"],
- function=id_outliers_fn),
- name="id_outliers_node"
+ apply_mask_premoco_node = pe.Node(fsl.ApplyMask(), name="apply_mask_pre_moco")
+
+ # Detect and remove motion outliers
+ fsl_split_node = pe.Node(fsl.Split(dimension="t"), name="fsl_split")
+
+ pick_ref = pe.Node(niu.Select(), name="pick_ref")
+
+ coreg = pe.MapNode(
+ fsl.FLIRT(no_search=True, interp="spline", padding_size=1, dof=6),
+ name="coregistration",
+ iterfield=["in_file"],
+ )
+
+ get_motion_params_node = pe.Node(
+ niu.Function(
+ input_names=["flirt_mats"],
+ output_names=["motion_params"],
+ function=core.get_flirt_motion_parameters,
+ imports=import_list,
+ ),
+ name="get_motion_params",
+ )
+
+ fsl_merge_node = pe.Node(fsl.Merge(dimension="t"), name="fsl_merge")
+
+ art_node = pe.Node(interface=ArtifactDetect(), name="art")
+ art_node.inputs.use_differences = [True, True]
+ art_node.inputs.save_plot = False
+ art_node.inputs.use_norm = True
+ art_node.inputs.norm_threshold = 3
+ art_node.inputs.zintensity_threshold = 9
+ art_node.inputs.mask_type = "spm_global"
+ art_node.inputs.parameter_source = "FSL"
+
+ drop_outliers_fn_node = pe.Node(
+ niu.Function(
+ input_names=["in_file", "in_bval", "in_bvec", "drop_scans"],
+ output_names=["out_file", "out_bval", "out_bvec"],
+ function=core.drop_outliers_fn,
+ imports=import_list,
+ ),
+ name="drop_outliers_fn",
)
- wf.connect(inputspec, 'dwi_file', id_outliers_node, 'dwi_file')
- wf.connect(inputspec, 'slice_outlier_threshold', id_outliers_node, 'threshold')
-
- wf.connect(prep, "fsl_eddy.out_outlier_report",
- id_outliers_node, "outlier_report")
-
- list_merge = pe.Node(niu.Merge(numinputs=2), name="list_merge")
- wf.connect(inputspec, 'dwi_file_ap', list_merge, 'in1')
- wf.connect(inputspec, 'dwi_file_pa', list_merge, 'in2')
-
- merge = pe.Node(fsl.Merge(dimension='t'), name="mergeAPPA")
- wf.connect(merge, 'merged_file', prep, 'inputnode.alt_file')
- wf.connect(list_merge, 'out', merge, 'in_files')
-
- fslroi = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name="fslroi")
- wf.connect(prep, "outputnode.out_file", fslroi, "in_file")
-
- b0mask_node = pe.Node(niu.Function(input_names=['b0_file'],
- output_names=['mask_file'],
- function=get_b0_mask_fn),
- name="getB0Mask")
- wf.connect(fslroi, 'roi_file', b0mask_node, 'b0_file')
- wf.connect(b0mask_node, 'mask_file', eddy_quad, 'mask_file')
-
- bbreg = pe.Node(fs.BBRegister(contrast_type="t2", init="coreg",
- out_fsl_file=True,
- # subjects_dir=subjects_dir,
- epi_mask=True),
- name="bbreg")
- bbreg.inputs.subject_id = 'freesurfer' # bids_sub_name
- wf.connect(fslroi, "roi_file", bbreg, "source_file")
- wf.connect(inputspec, 'subjects_dir', bbreg, 'subjects_dir')
-
- def drop_outliers_fn(in_file, in_bval, in_bvec, drop_scans):
- """Drop outlier volumes from dwi file
-
- Parameters
- ----------
- in_file: string
- Path to nii dwi file to drop outlier volumes from
-
- in_bval: string
- Path to bval file to drop outlier volumes from
-
- in_bvec: string
- Path to bvec file to drop outlier volumes from
-
- drop_scans: numpy.ndarray
- List of scan indices to drop
-
- Returns
- -------
- out_file: string
- Path to "thinned" output dwi file
-
- out_bval: string
- Path to "thinned" output bval file
-
- out_bvec: string
- Path to "thinned" output bvec file
- """
- import nibabel as nib
- import numpy as np
- import os.path as op
- from nipype.utils.filemanip import fname_presuffix
-
- img = nib.load(op.abspath(in_file))
- img_data = img.get_data()
- img_data_thinned = np.delete(img_data,
- drop_scans,
- axis=3)
- if isinstance(img, nib.nifti1.Nifti1Image):
- img_thinned = nib.Nifti1Image(img_data_thinned.astype(np.float64),
- img.affine,
- header=img.header)
- elif isinstance(img, nib.nifti2.Nifti2Image):
- img_thinned = nib.Nifti2Image(img_data_thinned.astype(np.float64),
- img.affine,
- header=img.header)
- else:
- raise TypeError("in_file does not contain Nifti image datatype.")
-
- out_file = fname_presuffix(in_file, suffix="_thinned", newpath=op.abspath('.'))
- nib.save(img_thinned, op.abspath(out_file))
-
- bval = np.loadtxt(in_bval)
- bval_thinned = np.delete(bval, drop_scans, axis=0)
- out_bval = fname_presuffix(in_bval, suffix="_thinned", newpath=op.abspath('.'))
- np.savetxt(out_bval, bval_thinned)
-
- bvec = np.loadtxt(in_bvec)
- bvec_thinned = np.delete(bvec, drop_scans, axis=1)
- out_bvec = fname_presuffix(in_bvec, suffix="_thinned", newpath=op.abspath('.'))
- np.savetxt(out_bvec, bvec_thinned)
-
- return out_file, out_bval, out_bvec
-
- drop_outliers_node = pe.Node(niu.Function(
- input_names=["in_file", "in_bval", "in_bvec", "drop_scans"],
- output_names=["out_file", "out_bval", "out_bvec"],
- function=drop_outliers_fn),
- name="drop_outliers_node"
+ make_gtab_node = pe.Node(
+ niu.Function(
+ input_names=["fbval", "fbvec", "sesdir"],
+ output_names=["gtab_file", "gtab"],
+ function=core.make_gtab,
+ imports=import_list,
+ ),
+ name="make_gtab",
)
- # Align the output of drop_outliers_node & also the eddy corrected version to the anatomical space
- # without resampling. and then for aparc+aseg & the mask, resample to the larger voxel size of the B0 image from
- # fslroi. Also we need to apply the transformation to both bvecs (dropped & eddied) and I think we can just load
- # the affine from bbreg (sio.loadmat) and np.dot(coord, aff) for each coord in bvec
-
- def get_orig(subjects_dir, sub='freesurfer'):
- import os.path as op
- return op.join(subjects_dir, sub, "mri", "orig.mgz")
-
- def get_aparc_aseg(subjects_dir, sub='freesurfer'):
- import os.path as op
- return op.join(subjects_dir, sub, "mri", "aparc+aseg.mgz")
-
- # transform the dropped volume version to anat space w/ out resampling
- voltransform = pe.Node(fs.ApplyVolTransform(no_resample=True),
- iterfield=['source_file', 'reg_file'],
- name='transform')
-
- wf.connect(inputspec, 'subjects_dir', voltransform, 'subjects_dir')
- wf.connect(inputspec, ('subjects_dir', get_aparc_aseg), voltransform, 'target_file')
- wf.connect(prep, "outputnode.out_file", voltransform, "source_file")
- wf.connect(bbreg, "out_reg_file", voltransform, "reg_file")
-
- def apply_transform_to_bvecs_fn(bvec_file, reg_mat_file):
- import numpy as np
- import nipype.utils.filemanip as fm
- import os
-
- aff = np.loadtxt(reg_mat_file)
- bvecs = np.loadtxt(bvec_file)
- bvec_trans = []
- for bvec in bvecs.T:
- coord = np.hstack((bvec, [1]))
- coord_trans = np.dot(coord, aff)[:-1]
- bvec_trans.append(coord_trans)
- out_bvec = fm.fname_presuffix(bvec_file, suffix="anat_space", newpath=os.path.abspath('.'))
- np.savetxt(out_bvec, np.asarray(bvec_trans).T)
- return out_bvec
-
- apply_transform_to_bvecs_node = pe.Node(niu.Function(input_names=['bvec_file', 'reg_mat_file'],
- output_names=['out_bvec'],
- function=apply_transform_to_bvecs_fn),
- name="applyTransformToBvecs")
- wf.connect(bbreg, 'out_fsl_file', apply_transform_to_bvecs_node, 'reg_mat_file')
- wf.connect(prep, 'outputnode.out_bvec', apply_transform_to_bvecs_node, 'bvec_file')
-
- # ok cool, now lets do the thresholding.
-
- wf.connect(id_outliers_node, "drop_scans", drop_outliers_node, "drop_scans")
- wf.connect(voltransform, "transformed_file", drop_outliers_node, "in_file")
- wf.connect(inputspec, 'bval_file', drop_outliers_node, 'in_bval')
- wf.connect(apply_transform_to_bvecs_node, "out_bvec", drop_outliers_node, "in_bvec")
-
- # lets compute the tensor on both the dropped volume scan
- # and also the original, eddy corrected one.
- get_tensor = pe.Node(DTI(), name="dipy_tensor")
- wf.connect(drop_outliers_node, "out_file", get_tensor, "in_file")
- wf.connect(drop_outliers_node, "out_bval", get_tensor, "in_bval")
- wf.connect(drop_outliers_node, "out_bvec", get_tensor, "in_bvec")
-
- get_tensor_eddy = get_tensor.clone('dipy_tensor_eddy')
- wf.connect(voltransform, 'transformed_file', get_tensor_eddy, "in_file")
- wf.connect(apply_transform_to_bvecs_node, 'out_bvec', get_tensor_eddy, "in_bvec")
- wf.connect(inputspec, 'bval_file', get_tensor_eddy, 'in_bval')
-
- # AK: What is this, some vestigal node from a previous workflow?
- # I'm not sure why the tensor gets scaled. but i guess lets scale it for
- # both the dropped & eddy corrected versions.
- scale_tensor = pe.Node(name='scale_tensor', interface=fsl.BinaryMaths())
- scale_tensor.inputs.operation = 'mul'
- scale_tensor.inputs.operand_value = 1000
- wf.connect(get_tensor, 'out_file', scale_tensor, 'in_file')
-
- scale_tensor_eddy = scale_tensor.clone('scale_tensor_eddy')
- wf.connect(get_tensor_eddy, 'out_file', scale_tensor_eddy, 'in_file')
-
- # OK now that anatomical stuff (segmentation & mask)
- # We'll need:
- # 1. the B0 image in anat space (fslroi the 'transformed file' of voltransform
- # 2. the aparc aseg resampled-like the B0 image (mri_convert)
- # 3. the resample aparc_aseg binarized to be the mask image.
-
- def binarize_aparc(aparc_aseg):
- import nibabel as nib
- from nipype.utils.filemanip import fname_presuffix
- import os.path as op
-
- img = nib.load(aparc_aseg)
- data, aff = img.get_data(), img.affine
- outfile = fname_presuffix(
- aparc_aseg, suffix="bin.nii.gz",
- newpath=op.abspath("."), use_ext=False
- )
- nib.Nifti1Image((data > 0).astype(float), aff).to_filename(outfile)
- return outfile
-
- create_mask = pe.Node(niu.Function(input_names=["aparc_aseg"],
- output_names=["outfile"],
- function=binarize_aparc),
- name="bin_aparc")
-
- get_b0_anat = fslroi.clone('get_b0_anat')
- wf.connect(voltransform, 'transformed_file', get_b0_anat, 'in_file')
-
- # reslice the anat-space aparc+aseg to the DWI resolution
- reslice_to_dwi = pe.Node(fs.MRIConvert(resample_type="nearest"),
- name="reslice_to_dwi")
- wf.connect(get_b0_anat, 'roi_file', reslice_to_dwi, 'reslice_like')
- wf.connect(inputspec, ('subjects_dir', get_aparc_aseg), reslice_to_dwi, 'in_file')
-
- # also reslice the orig i suppose
- reslice_orig_to_dwi = reslice_to_dwi.clone('resliceT1wToDwi')
- wf.connect(inputspec, ('subjects_dir', get_orig), reslice_orig_to_dwi, 'in_file')
- # reslice_orig_to_dwi.inputs.in_file = get_orig(subjects_dir, 'freesurfer')
- reslice_orig_to_dwi.inputs.out_type = 'niigz'
- wf.connect(get_b0_anat, 'roi_file', reslice_orig_to_dwi, 'reslice_like')
-
- # we assume the freesurfer is the output of BIDS
- # so the freesurfer output is in /path/to/derivatives/sub-whatever/freesurfer
- # which means the subject_dir is /path/to/derivatives/sub-whatever
- # reslice_to_dwi.inputs.in_file = get_aparc_aseg(subjects_dir, 'freesurfer')
-
- # now we have a nice aparc+aseg still in anat space but resliced like the dwi file
- # lets create a mask file from it.
-
- wf.connect(reslice_to_dwi, 'out_file', create_mask, 'aparc_aseg')
-
- # save all the things
- datasink = pe.Node(nio.DataSink(), name="sinker")
- wf.connect(inputspec, 'out_dir', datasink, 'base_directory')
- wf.connect(inputspec, 'subject_id', datasink, 'container')
-
- wf.connect(drop_outliers_node, "out_file", datasink, "dmriprep.dwi.@thinned")
- wf.connect(drop_outliers_node, "out_bval", datasink, "dmriprep.dwi.@bval_thinned")
- wf.connect(drop_outliers_node, "out_bvec", datasink, "dmriprep.dwi.@bvec_thinned")
-
- # eddy corrected files
- wf.connect(prep, "outputnode.out_file", datasink, "dmriprep.dwi_eddy.@corrected")
- wf.connect(prep, "outputnode.out_bvec", datasink, "dmriprep.dwi_eddy.@rotated")
- wf.connect(inputspec, 'bval_file', datasink, 'dmriprep.dwi_eddy.@bval')
-
- # all the eddy stuff except the corrected files
- wf.connect(prep, "fsl_eddy.out_movement_rms",
- datasink, "dmriprep.qc.@eddyparamsrms")
- wf.connect(prep, "fsl_eddy.out_outlier_report",
- datasink, "dmriprep.qc.@eddyparamsreport")
- wf.connect(prep, "fsl_eddy.out_restricted_movement_rms",
- datasink, "dmriprep.qc.@eddyparamsrestrictrms")
- wf.connect(prep, "fsl_eddy.out_shell_alignment_parameters",
- datasink, "dmriprep.qc.@eddyparamsshellalign")
- wf.connect(prep, "fsl_eddy.out_parameter",
- datasink, "dmriprep.qc.@eddyparams")
- wf.connect(prep, "fsl_eddy.out_cnr_maps",
- datasink, "dmriprep.qc.@eddycndr")
- wf.connect(prep, "fsl_eddy.out_residuals",
- datasink, "dmriprep.qc.@eddyresid")
-
- # the file that told us which volumes to drop
- wf.connect(id_outliers_node, "outpath", datasink, "dmriprep.qc.@droppedscans")
-
- # the tensors of the dropped volumes dwi
- wf.connect(get_tensor, "out_file", datasink, "dmriprep.dti.@tensor")
- wf.connect(get_tensor, "fa_file", datasink, "dmriprep.dti.@fa")
- wf.connect(get_tensor, "md_file", datasink, "dmriprep.dti.@md")
- wf.connect(get_tensor, "ad_file", datasink, "dmriprep.dti.@ad")
- wf.connect(get_tensor, "rd_file", datasink, "dmriprep.dti.@rd")
- wf.connect(get_tensor, "color_fa_file", datasink, "dmriprep.dti.@colorfa")
- wf.connect(scale_tensor, "out_file", datasink, "dmriprep.dti.@scaled_tensor")
-
- # the tensors of the eddied volumes dwi
- wf.connect(get_tensor_eddy, "out_file", datasink, "dmriprep.dti_eddy.@tensor")
- wf.connect(get_tensor_eddy, "fa_file", datasink, "dmriprep.dti_eddy.@fa")
- wf.connect(get_tensor_eddy, "md_file", datasink, "dmriprep.dti_eddy.@md")
- wf.connect(get_tensor_eddy, "ad_file", datasink, "dmriprep.dti_eddy.@ad")
- wf.connect(get_tensor_eddy, "rd_file", datasink, "dmriprep.dti_eddy.@rd")
- wf.connect(get_tensor_eddy, "color_fa_file", datasink, "dmriprep.dti_eddy.@colorfa")
- wf.connect(scale_tensor_eddy, "out_file", datasink, "dmriprep.dti_eddy.@scaled_tensor")
-
- # all the eddy_quad stuff
- wf.connect(eddy_quad, 'qc_json', datasink, "dmriprep.qc.@eddyquad_json")
- wf.connect(eddy_quad, 'qc_pdf', datasink, "dmriprep.qc.@eddyquad_pdf")
- wf.connect(eddy_quad, 'avg_b_png', datasink, "dmriprep.qc.@eddyquad_bpng")
- wf.connect(eddy_quad, 'avg_b0_pe_png',
- datasink, "dmriprep.qc.@eddyquad_b0png")
- wf.connect(eddy_quad, 'cnr_png', datasink, "dmriprep.qc.@eddyquad_cnr")
- wf.connect(eddy_quad, 'vdm_png', datasink, "dmriprep.qc.@eddyquad_vdm")
- wf.connect(eddy_quad, 'residuals', datasink, 'dmriprep.qc.@eddyquad_resid')
-
- # anatomical registration stuff
- wf.connect(bbreg, "min_cost_file", datasink, "dmriprep.reg.@mincost")
- wf.connect(bbreg, "out_fsl_file", datasink, "dmriprep.reg.@fslfile")
- wf.connect(bbreg, "out_reg_file", datasink, "dmriprep.reg.@reg")
-
- # anatomical files resliced
- wf.connect(reslice_to_dwi, 'out_file', datasink, 'dmriprep.anat.@segmentation')
- wf.connect(create_mask, 'outfile', datasink, 'dmriprep.anat.@mask')
- wf.connect(reslice_orig_to_dwi, 'out_file', datasink, 'dmriprep.anat.@T1w')
-
- def report_fn(dwi_corrected_file, eddy_rms, eddy_report,
- color_fa_file, anat_mask_file, outlier_indices,
- eddy_qc_file):
- from dmriprep.qc import create_report_json
-
- report = create_report_json(dwi_corrected_file, eddy_rms, eddy_report,
- color_fa_file, anat_mask_file, outlier_indices,
- eddy_qc_file)
- return report
-
- report_node = pe.Node(niu.Function(
- input_names=['dwi_corrected_file', 'eddy_rms',
- 'eddy_report', 'color_fa_file',
- 'anat_mask_file', 'outlier_indices', 'eddy_qc_file'],
- output_names=['report'],
- function=report_fn
- ), name="reportJSON")
-
- # for the report, lets show the eddy corrected (full volume) image
- wf.connect(voltransform, "transformed_file", report_node, 'dwi_corrected_file')
- wf.connect(eddy_quad, 'qc_json', report_node, 'eddy_qc_file')
-
- # add the rms movement output from eddy
- wf.connect(prep, "fsl_eddy.out_movement_rms", report_node, 'eddy_rms')
- wf.connect(prep, "fsl_eddy.out_outlier_report", report_node, 'eddy_report')
- wf.connect(id_outliers_node, 'drop_scans', report_node, 'outlier_indices')
-
- # the mask file to check our registration, and the colorFA file go in the report
- wf.connect(create_mask, "outfile", report_node, 'anat_mask_file')
- wf.connect(get_tensor, "color_fa_file", report_node, 'color_fa_file')
-
- # save that report!
- wf.connect(report_node, 'report', datasink, 'dmriprep.report.@report')
-
- # this part is done last, to get the filenames *just right*
- # its super annoying.
- def name_files_nicely(dwi_file, subject_id):
- import os.path as op
-
- dwi_fname = op.split(dwi_file)[1].split(".nii.gz")[0]
- substitutions = [
- ("vol0000_flirt_merged.nii.gz", dwi_fname + '.nii.gz'),
- ("stats.vol0000_flirt_merged.txt", dwi_fname + ".art.json"),
- ("motion_parameters.par", dwi_fname + ".motion.txt"),
- ("_rotated.bvec", ".bvec"),
- ("art.vol0000_flirt_merged_outliers.txt", dwi_fname + ".outliers.txt"),
- ("vol0000_flirt_merged", dwi_fname),
- ("_roi_bbreg_freesurfer", "_register"),
- ("dwi/eddy_corrected", "dwi/%s" % dwi_fname),
- ("dti/eddy_corrected", "dti/%s" % dwi_fname.replace("_dwi", "")),
- ("reg/eddy_corrected", "reg/%s" % dwi_fname.replace("_dwi", "")),
- ("aparc+aseg_outbin", dwi_fname.replace("_dwi", "_mask")),
- ("aparc+aseg_out", dwi_fname.replace("_dwi", "_aparc+aseg")),
- ("art.eddy_corrected_outliers", dwi_fname.replace("dwi", "outliers")),
- ("color_fa", "colorfa"),
- ("orig_out", dwi_fname.replace("_dwi", "_T1w")),
- ("stats.eddy_corrected", dwi_fname.replace("dwi", "artStats")),
- ("eddy_corrected.eddy_parameters", dwi_fname + ".eddy_parameters"),
- ("qc/eddy_corrected", "qc/" + dwi_fname),
- ("derivatives/dmriprep", "derivatives/{}/dmriprep".format(subject_id)),
- ("_rotatedanat_space_thinned", ""),
- ("_thinned", ""),
- ("eddy_corrected", dwi_fname),
- ("_warped", ""),
- ("_maths", "_scaled"),
- ("dropped_scans", dwi_fname.replace("_dwi", "_dwi_dropped_scans")),
- ("report.json", dwi_fname.replace("_dwi", "_dwi_report.json"))
- ]
- return substitutions
-
- node_name_files_nicely = pe.Node(niu.Function(input_names=['dwi_file', 'subject_id'],
- output_names=['substitutions'],
- function=name_files_nicely),
- name="name_files_nicely")
- wf.connect(inputspec, 'dwi_file', node_name_files_nicely, 'dwi_file')
- wf.connect(inputspec, 'subject_id', node_name_files_nicely, 'subject_id')
- wf.connect(node_name_files_nicely, 'substitutions', datasink, 'substitutions')
+ extract_metadata_node = pe.Node(
+ niu.Function(
+ input_names=["metadata"],
+ output_names=["spec_line", "spec_acqp", "spec_slice"],
+ function=core.extract_metadata,
+ imports=import_list,
+ ),
+ name="extract_metadata",
+ )
+
+ # Gather TOPUP/EDDY inputs
+ check_shelled_node = pe.Node(
+ niu.Function(
+ input_names=["gtab_file"],
+ output_names=["check_shelled"],
+ function=core.check_shelled,
+ imports=import_list,
+ ),
+ name="check_shelled",
+ )
+
+ get_topup_inputs_node = pe.Node(
+ niu.Function(
+ input_names=["dwi", "sesdir", "spec_acqp", "b0_vols", "topup_config_odd"],
+ output_names=[
+ "datain_file",
+ "imain_output",
+ "example_b0",
+ "datain_lines",
+ "topup_config",
+ ],
+ function=core.topup_inputs_from_dwi_files,
+ imports=import_list,
+ ),
+ name="get_topup_inputs",
+ )
+
+ get_eddy_inputs_node = pe.Node(
+ niu.Function(
+ input_names=["sesdir", "gtab_file"],
+ output_names=["index_file"],
+ function=core.eddy_inputs_from_dwi_files,
+ imports=import_list,
+ ),
+ name="get_eddy_inputs",
+ )
+
+ # Run TOPUP
+ topup_node = pe.Node(fsl.TOPUP(), name="topup")
+ topup_node._mem_gb = 4
+ topup_node.n_procs = 1
+ topup_node.interface.mem_gb = 4
+ topup_node.interface.n_procs = 1
+
+ # Run BET on mean b0 of topup-corrected output
+ make_mean_b0_node = pe.Node(
+ niu.Function(
+ input_names=["in_file"],
+ output_names=["mean_file_out"],
+ function=core.make_mean_b0,
+ imports=import_list,
+ ),
+ name="make_mean_b0",
+ )
+ btr_node = pe.Node(fsl.BET(), name="bet")
+ btr_node.inputs.mask = True
+ btr_node.inputs.frac = 0.2
+
+ # Run EDDY
+ eddy_node = pe.Node(interfaces.ExtendedEddy(**eddy_args), name="eddy")
+ eddy_node.inputs.num_threads = 4
+ eddy_node._mem_gb = 8
+ eddy_node.n_procs = 4
+ eddy_node.interface.mem_gb = 8
+ eddy_node.interface.n_procs = 4
+
+ # Drop outlier volumes
+ id_outliers_fn_node = pe.Node(
+ niu.Function(
+ input_names=["outlier_report", "threshold", "dwi_file"],
+ output_names=["drop_scans", "outpath"],
+ function=core.id_outliers_fn,
+ imports=import_list,
+ ),
+ name="id_outliers_fn",
+ )
+
+ drop_outliers_fn_posteddy_node = pe.Node(
+ niu.Function(
+ input_names=["in_file", "in_bval", "in_bvec", "drop_scans"],
+ output_names=["out_file", "out_bval", "out_bvec"],
+ function=core.drop_outliers_fn,
+ imports=import_list,
+ ),
+ name="drop_outliers_fn_posteddy",
+ )
+
+ make_gtab_node_final = pe.Node(
+ niu.Function(
+ input_names=["fbval", "fbvec", "sesdir"],
+ output_names=["gtab_file", "gtab"],
+ function=core.make_gtab,
+ imports=import_list,
+ ),
+ name="make_gtab_final",
+ )
+
+ apply_mask_node = pe.Node(fsl.ApplyMask(), name="apply_mask")
+
+ # Suppress gibbs ringing and denoise
+ suppress_gibbs_node = pe.Node(
+ niu.Function(
+ input_names=["in_file", "sesdir"],
+ output_names=["gibbs_corr_data", "gibbs_free_file"],
+ function=core.suppress_gibbs,
+ imports=import_list,
+ ),
+ name="suppress_gibbs",
+ )
+ suppress_gibbs_node._mem_gb = 2
+ suppress_gibbs_node.n_procs = 1
+
+ denoise_node = pe.Node(
+ niu.Function(
+ input_names=["in_file", "sesdir", "gtab_file", "mask", "strategy"],
+ output_names=["img_data_den", "denoised_file"],
+ function=core.denoise,
+ imports=import_list,
+ ),
+ name="denoise",
+ )
+ denoise_node._mem_gb = 4
+ denoise_node.n_procs = 1
+
+ outputnode = pe.Node(
+ niu.IdentityInterface(fields=["preprocessed_data", "final_bvec", "final_bval"]),
+ name="outputnode",
+ )
+
+ wf.connect([
+ (inputnode, check_orient_and_dims_dwi_node, [("fbvec", "bvecs"),
+ ("dwi", "infile"),
+ ("vox_size", "vox_size"),
+ ("sesdir", "outdir")]),
+ (inputnode, correct_vecs_and_make_b0s_node, [("fbval", "fbval"),
+ ("sesdir", "sesdir")]),
+ (check_orient_and_dims_dwi_node, correct_vecs_and_make_b0s_node, [("bvecs", "fbvec"),
+ ("outfile", "dwi_file")]),
+ (correct_vecs_and_make_b0s_node, btr_node_premoco, [("firstb0_file", "in_file")]),
+ (btr_node_premoco, apply_mask_premoco_node, [("mask_file", "mask_file")]),
+ (check_orient_and_dims_dwi_node, apply_mask_premoco_node, [("outfile", "in_file")]),
+ (apply_mask_premoco_node, fsl_split_node, [("out_file", "in_file")]),
+ (correct_vecs_and_make_b0s_node, pick_ref, [("firstb0", "index")]),
+ (fsl_split_node, pick_ref, [("out_files", "inlist")]),
+ (pick_ref, coreg, [("out", "reference")]),
+ (fsl_split_node, coreg, [("out_files", "in_file")]),
+ (coreg, get_motion_params_node, [("out_matrix_file", "flirt_mats")]),
+ (coreg, fsl_merge_node, [("out_file", "in_files")]),
+ (get_motion_params_node, art_node, [("motion_params", "realignment_parameters")]),
+ (fsl_merge_node, art_node, [("merged_file", "realigned_files")]),
+ (check_orient_and_dims_dwi_node, drop_outliers_fn_node, [("bvecs", "in_bvec"),
+ ("outfile", "in_file")]),
+ (inputnode, drop_outliers_fn_node, [("fbval", "in_bval")]),
+ (art_node, drop_outliers_fn_node, [("outlier_files", "drop_scans")]),
+ (inputnode, extract_metadata_node, [("metadata", "metadata")]),
+ (correct_vecs_and_make_b0s_node, check_shelled_node, [("gtab_file", "gtab_file")]),
+ (drop_outliers_fn_node, get_topup_inputs_node, [("out_file", "dwi")]),
+ (inputnode, get_topup_inputs_node, [("sesdir", "sesdir"),
+ ("topup_config_odd", "topup_config_odd")]),
+ (correct_vecs_and_make_b0s_node, get_topup_inputs_node, [("b0_vols", "b0_vols")]),
+ (extract_metadata_node, get_topup_inputs_node, [("spec_acqp", "spec_acqp")]),
+ (inputnode, get_eddy_inputs_node, [("sesdir", "sesdir")]),
+ (drop_outliers_fn_node, make_gtab_node, [("out_bvec", "fbvec"),
+ ("out_bval", "fbval")]),
+ (inputnode, make_gtab_node, [("sesdir", "sesdir")]),
+ (make_gtab_node, get_eddy_inputs_node, [("gtab_file", "gtab_file")]),
+ (extract_metadata_node, get_eddy_inputs_node, [("spec_acqp", "spec_acqp")]),
+ (get_topup_inputs_node, topup_node, [("datain_file", "encoding_file"),
+ ("imain_output", "in_file"),
+ ("topup_config", "config")]),
+ (topup_node, make_mean_b0_node, [("out_corrected", "in_file")]),
+ (make_mean_b0_node, btr_node, [("mean_file_out", "in_file")]),
+ (check_shelled_node, eddy_node, [("check_shelled", "is_shelled")]),
+ (btr_node, eddy_node, [("mask_file", "in_mask")]),
+ (get_eddy_inputs_node, eddy_node, [("index_file", "in_index")]),
+ (get_topup_inputs_node, eddy_node, [("datain_file", "in_acqp")]),
+ (topup_node, eddy_node, [("out_movpar", "in_topup_movpar"),
+ ("out_fieldcoef", "in_topup_fieldcoef")]),
+ (drop_outliers_fn_node, eddy_node, [("out_file", "in_file"),
+ ("out_bval", "in_bval"),
+ ("out_bvec", "in_bvec")]),
+ (eddy_node, id_outliers_fn_node, [("out_outlier_report", "outlier_report"),
+ ("out_corrected", "dwi_file")]),
+ (inputnode, id_outliers_fn_node, [("outlier_thresh", "threshold")]),
+ (drop_outliers_fn_node, drop_outliers_fn_posteddy_node, [("out_bval", "in_bval")]),
+ (eddy_node, drop_outliers_fn_posteddy_node, [("out_corrected", "in_file"),
+ ("out_rotated_bvecs", "in_bvec")]),
+ (id_outliers_fn_node, drop_outliers_fn_posteddy_node, [("drop_scans", "drop_scans")]),
+ (drop_outliers_fn_posteddy_node, apply_mask_node, [("out_file", "in_file")]),
+ (btr_node, apply_mask_node, [("mask_file", "mask_file")]),
+ (apply_mask_node, suppress_gibbs_node, [("out_file", "in_file")]),
+ (inputnode, denoise_node, [("strategy", "strategy")]),
+ (btr_node, denoise_node, [("mask_file", "mask")]),
+ (drop_outliers_fn_posteddy_node, make_gtab_node_final, [("out_bvec", "fbvec"),
+ ("out_bval", "fbval")]),
+ (inputnode, make_gtab_node_final, [("sesdir", "sesdir")]),
+ (make_gtab_node_final, denoise_node, [("gtab_file", "gtab_file")]),
+ (suppress_gibbs_node, denoise_node, [("gibbs_free_file", "in_file")]),
+ (inputnode, suppress_gibbs_node, [("sesdir", "sesdir")]),
+ (inputnode, denoise_node, [("sesdir", "sesdir")]),
+ (denoise_node, outputnode, [("denoised_file", "preprocessed_data")]),
+ (drop_outliers_fn_posteddy_node, outputnode, [("out_bvec", "final_bvec"),
+ ("out_bval", "final_bval")])
+ ])
+
+ if verbose is True:
+ from nipype import config, logging
+ cfg_v = dict(logging={'workflow_level': 'DEBUG', 'utils_level': 'DEBUG', 'interface_level': 'DEBUG',
+ 'log_directory': str(wf.base_dir), 'log_to_file': True},
+ monitoring={'enabled': True, 'sample_frequency': '0.1', 'summary_append': True,
+ 'summary_file': str(wf.base_dir)})
+ logging.update_logging(config)
+ config.update_config(cfg_v)
+ config.enable_debug_mode()
+ config.enable_resource_monitor()
+
+ import logging
+ callback_log_path = "%s%s" % (wf.base_dir, '/run_stats.log')
+ logger = logging.getLogger('callback')
+ logger.setLevel(logging.DEBUG)
+ handler = logging.FileHandler(callback_log_path)
+ logger.addHandler(handler)
+
+ # Set runtime/logging configurations
+ cfg = dict(
+ execution={
+ "stop_on_first_crash": True,
+ "hash_method": "content",
+ "crashfile_format": "txt",
+ "display_variable": ":0",
+ "job_finished_timeout": 65,
+ "matplotlib_backend": "Agg",
+ "plugin": str(plugin_type),
+ "use_relative_paths": True,
+ "parameterize_dirs": True,
+ "remove_unnecessary_outputs": False,
+ "remove_node_directories": False,
+ "raise_insufficient": True,
+ "poll_sleep_duration": 0.01,
+ }
+ )
+ for key in cfg.keys():
+ for setting, value in cfg[key].items():
+ wf.config[key][setting] = value
+
+ try:
+ wf.write_graph(graph2use="colored", format='png')
+ except:
+ pass
return wf
diff --git a/dmriprep/utils.py b/dmriprep/utils.py
index cd92cc1..7e90d39 100644
--- a/dmriprep/utils.py
+++ b/dmriprep/utils.py
@@ -2,10 +2,8 @@
Utility functions for other submodules
"""
-import itertools
-
import numpy as np
-
+from nipype import logging
mod_logger = logging.getLogger(__name__)
@@ -33,6 +31,8 @@ def is_hemispherical(vecs):
----------
https://rstudio-pubs-static.s3.amazonaws.com/27121_a22e51b47c544980bad594d5e0bb2d04.html # noqa
"""
+ import itertools
+
if vecs.shape[1] != 3:
raise ValueError("Input vectors must be 3D vectors")
if not np.allclose(1, np.linalg.norm(vecs, axis=1)):
@@ -64,3 +64,14 @@ def is_hemispherical(vecs):
else:
pole = np.array([0.0, 0.0, 0.0])
return is_hemi, pole
+
+
+def merge_dicts(x, y):
+ """
+ A function to merge two dictionaries, making it easier for us to make
+ modality specific queries for dwi images (since they have variable
+ extensions due to having an nii.gz, bval, and bvec file).
+ """
+ z = x.copy()
+ z.update(y)
+ return z
diff --git a/requirements.txt b/requirements.txt
index b780e72..e6e47c6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,11 +1,11 @@
Click>=6.0
-dask==1.2.2
-dipy==0.16.0
-nipype==1.2.0
-pandas==0.24.2
-parse==1.12.0
-tqdm==4.32.1
-pybids==0.8.0
-matplotlib==3.1.0
-cytoolz==0.9.0.1
-numba==0.43.1
\ No newline at end of file
+dask>=1.2.2
+dipy>=1.0.0
+nipype>=1.2.0
+pandas>=0.24.2
+parse>=1.12.0
+tqdm>=4.32.1
+pybids>=0.8.0
+matplotlib>=3.1.0
+cytoolz>=0.9.0.1
+numba>=0.43.1