Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions docs/notebooks/data_structures.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
}
],
"source": [
"plot_gradients(dwi.gradients.T);"
"plot_gradients(dwi.gradients);"
]
},
{
Expand Down Expand Up @@ -192,7 +192,7 @@
}
],
"source": [
"plot_gradients(dwi.gradients.T);"
"plot_gradients(dwi.gradients);"
]
},
{
Expand All @@ -213,7 +213,7 @@
],
"source": [
"# Select a b-value\n",
"b2000_gradientmask = dwi.gradients[-1, ...] == 2000\n",
"b2000_gradientmask = dwi.gradients[:, -1] == 2000\n",
"\n",
"# Select b=2000\n",
"data, _, grad = dwi[b2000_gradientmask]\n",
Expand Down
38 changes: 19 additions & 19 deletions docs/notebooks/pet_motion_estimation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2428,10 +2428,11 @@
]
},
{
"metadata": {},
"cell_type": "code",
"outputs": [],
"execution_count": null,
"id": "d3627d44376b27f4",
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import pandas as pd\n",
Expand All @@ -2441,20 +2442,20 @@
"# Assume `affines` is the list of affine matrices computed earlier\n",
"motion_parameters = []\n",
"\n",
"for idx, affine in enumerate(affines):\n",
"for _idx, affine in enumerate(affines):\n",
" tx, ty, tz, rx, ry, rz = extract_motion_parameters(affine)\n",
" motion_parameters.append([tx, ty, tz, rx, ry, rz])\n",
"\n",
"motion_parameters = np.array(motion_parameters)\n",
"estimated_fd = compute_fd_from_motion(motion_parameters)"
],
"id": "d3627d44376b27f4"
]
},
{
"metadata": {},
"cell_type": "code",
"outputs": [],
"execution_count": null,
"id": "4f141ebdb1643673",
"metadata": {},
"outputs": [],
"source": [
"# Set up the matplotlib figure\n",
"import matplotlib.pyplot as plt\n",
Expand All @@ -2466,20 +2467,20 @@
"plot_volumewise_motion(np.arange(len(estimated_fd)), motion_parameters)\n",
"\n",
"plt.show()"
],
"id": "4f141ebdb1643673"
]
},
{
"metadata": {},
"cell_type": "markdown",
"source": "For the dataset used in this example, we have access to the ground truth motion parameters that were used to corrupt the motion-free dataset. Let's now plot the ground truth motion to enable a visual comparison with the estimated motion.",
"id": "e3f45164598d16f0"
"id": "e3f45164598d16f0",
"metadata": {},
"source": "For the dataset used in this example, we have access to the ground truth motion parameters that were used to corrupt the motion-free dataset. Let's now plot the ground truth motion to enable a visual comparison with the estimated motion."
},
{
"metadata": {},
"cell_type": "code",
"outputs": [],
"execution_count": null,
"id": "1009ea77e1bdd0ee",
"metadata": {},
"outputs": [],
"source": [
"from nifreeze.viz.motion_viz import plot_volumewise_motion\n",
"\n",
Expand All @@ -2505,14 +2506,13 @@
"\n",
"plt.tight_layout()\n",
"plt.show()"
],
"id": "1009ea77e1bdd0ee"
]
},
{
"metadata": {},
"cell_type": "markdown",
"source": "Let's plot the estimated and the ground truth framewise displacement.",
"id": "113b4b4d1361b5ec"
"id": "113b4b4d1361b5ec",
"metadata": {},
"source": "Let's plot the estimated and the ground truth framewise displacement."
},
{
"cell_type": "code",
Expand Down
9 changes: 5 additions & 4 deletions docs/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@ To utilize *NiFreeze* functionalities within your Python module or script, follo
Use the appropriate parameters for the particular imaging modality (e.g.
dMRI, fMRI, or PET) that you are using.

For example, for dMRI data, ensure the gradient table is provided. It
should have one column per diffusion-weighted image. The first three rows
represent the gradient directions, and the last row indicates the timing
and strength of the gradients in units of s/mm² ``[ R A S+ b ]``.
For example, for dMRI data, ensure the gradient table is provided. NiFreeze
expects the table to have one row per diffusion-weighted image, with the
first three columns storing the gradient direction components and the last
column indicating the timing and strength of the gradients in units of
s/mm² ``[ R A S+ b ]``.

.. code-block:: python

Expand Down
2 changes: 1 addition & 1 deletion src/nifreeze/data/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def __len__(self) -> int:

def _getextra(self, idx: int | slice | tuple | np.ndarray) -> tuple[Unpack[Ts]]:
"""
Extracts extra fields synchronized with the indexed access of the corresponding data object.
Extract extra fields for a given index of the corresponding data object.

Parameters
----------
Expand Down
115 changes: 80 additions & 35 deletions src/nifreeze/data/dmri.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,43 @@ class DWI(BaseDataset[np.ndarray]):
bzero: np.ndarray = attrs.field(default=None, repr=_data_repr, eq=attrs.cmp_using(eq=_cmp))
"""A *b=0* reference map, preferably obtained by some smart averaging."""
gradients: np.ndarray = attrs.field(default=None, repr=_data_repr, eq=attrs.cmp_using(eq=_cmp))
"""A 2D numpy array of the gradient table (4xN)."""
"""A 2D numpy array of the gradient table (``N`` orientations x ``C`` components)."""
eddy_xfms: list = attrs.field(default=None)
"""List of transforms to correct for estimated eddy current distortions."""

def __attrs_post_init__(self) -> None:
self._normalize_gradients()

def _normalize_gradients(self) -> None:
if self.gradients is None:
return

gradients = np.asarray(self.gradients)
if gradients.ndim != 2:
raise ValueError("Gradient table must be a 2D array")

n_volumes = None
if self.dataobj is not None:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jhlegarreta I think we disallowed this right (dataobj to be None)?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not fully yet: see #302 and PR #305.

try:
n_volumes = self.dataobj.shape[-1]
except Exception: # pragma: no cover - extremely defensive
n_volumes = None

if n_volumes is not None and gradients.shape[0] != n_volumes:
if gradients.shape[1] == n_volumes:
gradients = gradients.T
else:
raise ValueError(
"Gradient table shape does not match the number of diffusion volumes: "
f"expected {n_volumes} rows, found {gradients.shape[0]}"
)
elif n_volumes is None and gradients.shape[1] > gradients.shape[0]:
gradients = gradients.T

self.gradients = gradients

def _getextra(self, idx: int | slice | tuple | np.ndarray) -> tuple[np.ndarray]:
return (self.gradients[..., idx],)
return (self.gradients[idx, ...],)

# For the sake of the docstring
def __getitem__(
Expand All @@ -99,8 +130,8 @@ def __getitem__(
motion_affine : :obj:`~numpy.ndarray` or ``None``
The corresponding per-volume motion affine(s) or ``None`` if identity transform(s).
gradient : :obj:`~numpy.ndarray`
The corresponding gradient(s), which may have shape ``(4,)`` if a single volume
or ``(4, k)`` if multiple volumes, or ``None`` if gradients are not available.
The corresponding gradient(s), which may have shape ``(C,)`` if a single volume
or ``(k, C)`` if multiple volumes, or ``None`` if gradients are not available.

"""

Expand All @@ -126,11 +157,11 @@ def from_filename(cls, filename: Path | str) -> Self:

@property
def bvals(self):
return self.gradients[-1, ...]
return self.gradients[..., -1]

@property
def bvecs(self):
return self.gradients[:-1, ...]
return self.gradients[..., :-1]

def get_shells(
self,
Expand Down Expand Up @@ -160,14 +191,12 @@ def get_shells(
"""

_, bval_groups, bval_estimated = find_shelling_scheme(
self.gradients[-1, ...],
self.bvals,
num_bins=num_bins,
multishell_nonempty_bin_count_thr=multishell_nonempty_bin_count_thr,
bval_cap=bval_cap,
)
indices = [
np.hstack(np.where(np.isin(self.gradients[-1, ...], bvals))) for bvals in bval_groups
]
indices = [np.where(np.isin(self.bvals, bvals))[0] for bvals in bval_groups]
return list(zip(bval_estimated, indices, strict=True))

def to_filename(
Expand Down Expand Up @@ -232,16 +261,14 @@ def to_nifti(
bvals = self.bvals

# Rotate b-vectors if self.motion_affines is not None
bvecs = (
np.array(
[
transform_fsl_bvec(bvec, affine, self.affine, invert=True)
for bvec, affine in zip(self.gradients.T, self.motion_affines, strict=True)
]
).T
if self.motion_affines is not None
else self.bvecs
)
if self.motion_affines is not None:
rotated = [
transform_fsl_bvec(gradient, affine, self.affine, invert=True)
for gradient, affine in zip(self.gradients, self.motion_affines, strict=True)
]
bvecs = np.asarray(rotated)
else:
bvecs = self.bvecs

# Parent's to_nifti to handle the primary NIfTI export.
nii = super().to_nifti(
Expand All @@ -266,7 +293,7 @@ def to_nifti(
# If inserting a b0 volume is requested, add the corresponding null
# gradient value to the bval/bvec pair
bvals = np.concatenate((np.zeros(1), bvals))
bvecs = np.concatenate((np.zeros(3)[:, np.newaxis], bvecs), axis=-1)
bvecs = np.vstack((np.zeros((1, bvecs.shape[1])), bvecs))

if filename is not None:
# Convert filename to a Path object.
Expand All @@ -279,9 +306,8 @@ def to_nifti(
bvecs_file = out_root.with_suffix(".bvec")
bvals_file = out_root.with_suffix(".bval")

# Save bvecs and bvals to text files
# Each row of bvecs is one direction (3 rows, N columns).
np.savetxt(bvecs_file, bvecs, fmt=f"%.{bvecs_dec_places}f")
# Save bvecs and bvals to text files. BIDS expects 3 rows x N columns.
np.savetxt(bvecs_file, bvecs.T, fmt=f"%.{bvecs_dec_places}f")
np.savetxt(bvals_file, bvals[np.newaxis, :], fmt=f"%.{bvals_dec_places}f")

return nii
Expand Down Expand Up @@ -313,10 +339,12 @@ def from_nii(
motion_file : :obj:`os.pathlike`, optional
A file containing head motion affine matrices (linear)
gradients_file : :obj:`os.pathlike`, optional
A text file containing the gradients table, shape (4, N) or (N, 4).
If provided, it supersedes any .bvec / .bval combination.
A text file containing the gradients table, shape (N, C) where the last column
stores the b-values. The legacy column-major layout (C, N) is also accepted and
will be transposed automatically. If provided, it supersedes any .bvec / .bval
combination.
bvec_file : :obj:`os.pathlike`, optional
A text file containing b-vectors, shape (3, N).
A text file containing b-vectors, shape (N, 3) or (3, N).
bval_file : :obj:`os.pathlike`, optional
A text file containing b-values, shape (N,).
b0_file : :obj:`os.pathlike`, optional
Expand Down Expand Up @@ -359,31 +387,48 @@ def from_nii(
stacklevel=2,
)
elif bvec_file and bval_file:
bvecs = np.loadtxt(bvec_file, dtype="float32") # shape (3, N)
if bvecs.shape[0] != 3 and bvecs.shape[1] == 3:
bvecs = np.loadtxt(bvec_file, dtype="float32")
if bvecs.ndim == 1:
bvecs = bvecs[np.newaxis, :]
if bvecs.shape[1] != 3 and bvecs.shape[0] == 3:
bvecs = bvecs.T

bvals = np.loadtxt(bval_file, dtype="float32") # shape (N,)
# Stack to shape (4, N)
grad = np.vstack((bvecs, bvals))
bvals = np.loadtxt(bval_file, dtype="float32")
if bvals.ndim > 1:
bvals = np.squeeze(bvals)
grad = np.column_stack((bvecs, bvals))
else:
raise RuntimeError(
"No gradient data provided. "
"Please specify either a gradients_file or (bvec_file & bval_file)."
)

if grad.ndim == 1:
grad = grad[np.newaxis, :]

if grad.shape[1] < 2:
raise ValueError("Gradient table must have at least two columns (direction + b-value).")

if grad.shape[1] != 4:
if grad.shape[0] == 4:
grad = grad.T
else:
raise ValueError(
"Gradient table must have four columns (3 direction components and one b-value)."
)

# 3) Create the DWI instance. We'll filter out volumes where b-value > b0_thres
# as "DW volumes" if the user wants to store only the high-b volumes here
gradmsk = (grad[-1] if grad.shape[0] == 4 else grad[:, -1]) > b0_thres
gradmsk = grad[:, -1] > b0_thres

# The shape checking is somewhat flexible: (4, N) or (N, 4)
dwi_obj = DWI(
dataobj=fulldata[..., gradmsk],
affine=img.affine,
# We'll assign the filtered gradients below.
)

dwi_obj.gradients = grad[:, gradmsk] if grad.shape[0] == 4 else grad[gradmsk, :].T
dwi_obj.gradients = grad[gradmsk, :]
dwi_obj._normalize_gradients()

# 4) b=0 volume (bzero)
# If the user provided a b0_file, load it
Expand Down
2 changes: 1 addition & 1 deletion src/nifreeze/data/filtering.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def dwi_select_shells(
Parameters
----------
gradients : :obj:`~numpy.ndarray`
Gradients.
Gradients arranged as ``(N, C)`` with the last column storing b-values.
index : :obj:`int`
Index of the shell data.
atol_low : :obj:`float`, optional
Expand Down
18 changes: 16 additions & 2 deletions src/nifreeze/model/_dipy.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,20 @@
)


def _cartesian_components(gtab: GradientTable | np.ndarray) -> np.ndarray:
"""Return the gradient directions as Cartesian components."""

if hasattr(gtab, "bvecs"):
components = gtab.bvecs
else:
components = np.asarray(gtab)
if components.ndim == 1:
components = components[np.newaxis, :]
if components.shape[-1] > 3:
components = components[:, :-1]
return components


def gp_prediction(
model: GaussianProcessRegressor,
gtab: GradientTable | np.ndarray,
Expand Down Expand Up @@ -69,7 +83,7 @@ def gp_prediction(

"""

X = gtab.bvecs.T if hasattr(gtab, "bvecs") else np.asarray(gtab)
X = _cartesian_components(gtab)

# Check it's fitted as they do in sklearn internally
# https://github.com/scikit-learn/scikit-learn/blob/972e17fe1aa12d481b120ad4a3dc076bae736931/\
Expand Down Expand Up @@ -167,7 +181,7 @@ def fit(
# Extract b-vecs: scikit-learn wants (n_samples, n_features)
# where n_features is 3, and n_samples the different diffusion-encoding
# gradient orientations.
X = gtab.bvecs if hasattr(gtab, "bvecs") else np.asarray(gtab)
X = _cartesian_components(gtab)

# Data must have shape (n_samples, n_targets) where n_samples is
# the number of diffusion-encoding gradient orientations, and n_targets
Expand Down
Loading