From b766072b6813c5888225d6046262e09c3a0b36a9 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Fri, 5 Jul 2024 09:32:38 +0200 Subject: [PATCH 01/19] Added src --- pyproject.toml | 1 + src/mri/cli/reconstruct.py | 21 +++- src/mri/cli/retro_recon.py | 115 ++++++++++++++++++ src/mri/operators/fourier/utils/processing.py | 9 +- src/mri/optimizers/utils/metrics.py | 19 +++ 5 files changed, 156 insertions(+), 9 deletions(-) create mode 100644 src/mri/cli/retro_recon.py create mode 100644 src/mri/optimizers/utils/metrics.py diff --git a/pyproject.toml b/pyproject.toml index 3e9ed8c8..5f0070d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ scripts=["hydra-zen", "pymrt", "nibabel", "hydra-callbacks"] [project.scripts] recon = "mri.cli.reconstruct:run_recon" dc_adjoint = "mri.cli.reconstruct:run_adjoint" +retro_recon = "mri.cli.retro_recon:run_retro_recon" [project.urls] Homepage = "https://github.com/CEA-COSMIC/pysap-mri" diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 55a3a621..3152d34c 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -17,14 +17,14 @@ save_data_hydra = lambda x, *args, **kwargs: save_data(get_outdir_path(x), *args, **kwargs) -def dc_adjoint(obs_file: str, traj_file: str, coil_compress: str|int, debug: int, +def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, debug: int, obs_reader, traj_reader, fourier, output_filename: str = "dc_adjoint.pkl"): """ Reconstructs an image using the adjoint operator. Parameters ---------- - obs_file : str + obs_file : str or np.ndarray Path to the observed kspace data file. traj_file : str Path to the trajectory file or the folder holding trajectory file. @@ -138,7 +138,7 @@ def dc_adjoint(obs_file: str, traj_file: str, coil_compress: str|int, debug: int def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, algorithm: str, debug: int, obs_reader, traj_reader, fourier, linear, sparsity, - output_filename: str = "recon.pkl"): + output_filename: str = "recon.pkl", validation_recon: np.ndarray = None, metrics: dict = None): """Reconstructs an MRI image using the given parameters. Parameters @@ -169,6 +169,10 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co Object representing the sparsity operator. output_filename : str, optional Path to save the reconstructed image, by default "recon.pkl" + validation_recon: np.ndarray, optional + The validation reconstruction to compare the results with, by default None + metrics: dict, optional + List of metrics to evaluate the reconstruction, by default None """ recon_adjoint, additional_data = dc_adjoint( obs_file, @@ -193,14 +197,21 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co lipschitz_cst=fourier_op.impl.get_lipschitz_cst(), ) log.info("Starting reconstruction") - recon, costs, metrics = reconstructor.reconstruct( + recon, costs, metrics_iter = reconstructor.reconstruct( kspace_data=kspace_data, optimization_alg=algorithm, x_init=recon_adjoint, # gain back the first step by initializing with DC Adjoint num_iterations=num_iterations, ) + if validation_recon is not None: + log.info("getting metrics of the reconstruction") + final_metrics = {} + for metric, function in metrics.items(): + final_metrics[metric] = function(recon, validation_recon) + log.info(f"Final Metrics: {final_metrics}") + data_header['metrics'] = final_metrics data_header['costs'] = costs - data_header['metrics'] = metrics + data_header['metrics_iter'] = metrics_iter log.info("Saving reconstruction results") save_data_hydra(output_filename, recon, data_header) diff --git a/src/mri/cli/retro_recon.py b/src/mri/cli/retro_recon.py new file mode 100644 index 00000000..6d47ce87 --- /dev/null +++ b/src/mri/cli/retro_recon.py @@ -0,0 +1,115 @@ +from hydra_zen import store, zen +import numpy as np + +from mri.cli.utils import traj_config +from mri.operators.fourier.utils import discard_frequency_outliers +import nibabel as nib +import logging +from mri.cli.reconstruct import recon +from mri.optimizers.utils.metrics import box_psnr, box_ssim + + +log = logging.getLogger(__name__) + + +def retro(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, + algorithm: str, debug: int, traj_reader, fourier, forward, linear, sparsity, + output_filename: str = "recon.pkl"): + """Perform retrospective reconstruction on MRI data. + This function takes MRI data and performs retrospective reconstruction using the specified parameters. + + Parameters + ---------- + obs_file : str + Path to the observed MRI data file. + traj_file : str + Path to the trajectory file. + mu : float + Regularization parameter. + num_iterations : int + Number of iterations for the reconstruction algorithm. + coil_compress : str | int + Coil compression method or factor. + algorithm : str + Reconstruction algorithm to use. + debug : int + Debug level for the reconstruction process. + traj_reader : callable + Function to read the trajectory file. + fourier : callable + Fourier transform function. + forward : callable + Forward operator function. + linear : callable + Linear operator function. + sparsity : callable + Sparsity operator function. + output_filename : str, optional + Output filename for the reconstructed data, by default "recon.pkl". + """ + image = nib.load(obs_file).get_fdata(dtype=np.complex64) + shots, traj_params = traj_reader( + traj_file, + dwell_time='min_osf', + ) + kspace_loc = discard_frequency_outliers(shots.reshape(-1, traj_params["dimension"])) + forward_op = forward(kspace_loc, traj_params["img_size"], n_coils=image.shape[0]) + kspace_data = forward_op.op(image) + data_header = { + "n_coils": image.shape[0], + "shifts": [0, 0, 0], + "type": "retro_recon", + "n_adc_samples": shots.shape[1]*traj_params['min_osf'], + "n_slices": 1, + "n_contrasts": 1, + } + recon( + obs_file="", + traj_file=traj_file, + mu=mu, + num_iterations=num_iterations, + coil_compress=coil_compress, + algorithm=algorithm, + debug=debug, + obs_reader=lambda x: (kspace_data, data_header), + traj_reader=traj_reader, + fourier=fourier, + linear=linear, + sparsity=sparsity, + output_filename=output_filename, + validation_recon=np.linalg.norm(image, axis=0), + metrics={ + "psnr": box_psnr, + "ssim": box_ssim, + } + ) + +store( + retro, + traj_reader=traj_config, + algorithm="pogm", + num_iterations=30, + coil_compress=5, + debug=1, + hydra_defaults=[ + "_self_", + {"forward": "gpu"}, + {"fourier": "gpu"}, + {"fourier/density_comp": "pipe_lowmem"}, + {"fourier/smaps": "low_frequency"}, + + ], + name="retro_recon", +) + +# Setup the Hydra Config and callbacks. +store.add_to_hydra_store() + + +def run_retro_recon(): + zen(retro).hydra_main( + config_name="retro_recon", + config_path=None, + version_base="1.3", + ) + diff --git a/src/mri/operators/fourier/utils/processing.py b/src/mri/operators/fourier/utils/processing.py index 10dc2865..a13f3a80 100644 --- a/src/mri/operators/fourier/utils/processing.py +++ b/src/mri/operators/fourier/utils/processing.py @@ -102,7 +102,7 @@ def normalize_frequency_locations(samples, Kmax=None): return samples_locations -def discard_frequency_outliers(kspace_loc, kspace_data): +def discard_frequency_outliers(kspace_loc, kspace_data=None): """ This function discards the samples outside [-0.5; 0.5[ for the non-Cartesian case. @@ -125,9 +125,10 @@ def discard_frequency_outliers(kspace_loc, kspace_data): """ kspace_mask = np.all((kspace_loc < 0.5) & (kspace_loc >= -0.5), axis=-1) kspace_loc = kspace_loc[kspace_mask] - kspace_data = kspace_data[:, kspace_mask] - return np.ascontiguousarray(kspace_loc), np.ascontiguousarray(kspace_data) - + if kspace_data is not None: + kspace_data = kspace_data[:, kspace_mask] + return np.ascontiguousarray(kspace_loc), np.ascontiguousarray(kspace_data) + return np.ascontiguousarray(kspace_loc) def gridded_inverse_fourier_transform_nd(kspace_loc, diff --git a/src/mri/optimizers/utils/metrics.py b/src/mri/optimizers/utils/metrics.py new file mode 100644 index 00000000..7edd8b03 --- /dev/null +++ b/src/mri/optimizers/utils/metrics.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +########################################################################## +# pySAP - Copyright (C) CEA, 2017 - 2018 +# Distributed under the terms of the CeCILL-B license, as published by +# the CEA-CNRS-INRIA. Refer to the LICENSE file or to +# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html +# for details. +########################################################################## + +from modopt.math.metrics import ssim, psnr +import numpy as np + + +def box_ssim(y_true, y_pred, mean_factor=0.7): + return ssim(y_true, y_pred, y_true > mean_factor * np.mean(y_true)) + + +def box_psnr(y_true, y_pred, mean_factor=0.7): + return psnr(y_true, y_pred, y_true > mean_factor * np.mean(y_true)) \ No newline at end of file From 363808fc42703309955d9e63fa291dac7caac81d Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 8 Jul 2024 08:55:01 +0200 Subject: [PATCH 02/19] Added all results --- src/mri/cli/reconstruct.py | 10 +++++++--- src/mri/cli/retro_recon.py | 14 ++++++++------ src/mri/cli/utils.py | 11 ++++++----- 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 3152d34c..ba1ef586 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -18,7 +18,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, debug: int, - obs_reader, traj_reader, fourier, output_filename: str = "dc_adjoint.pkl"): + obs_reader, traj_reader, fourier, output_filename: str = "dc_adjoint.nii"): """ Reconstructs an image using the adjoint operator. @@ -92,7 +92,11 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, ) if kspace_loc.max() > 0.5 or kspace_loc.min() < 0.5: log.warn(f"K-space locations are above the unity range, discarding the outlier data") - kspace_loc, kspace_data = discard_frequency_outliers(kspace_loc, np.squeeze(raw_data)) + if data_header["type"] == "retro_recon": + kspace_loc = discard_frequency_outliers(kspace_loc) + kspace_data = np.squeeze(raw_data) + else: + kspace_loc, kspace_data = discard_frequency_outliers(kspace_loc, np.squeeze(raw_data)) kspace_data = kspace_data.astype(np.complex64) kspace_loc = kspace_loc.astype(np.float32) log.info(f"Phase shifting raw data for Normalized shifts: {normalized_shifts}") @@ -138,7 +142,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, algorithm: str, debug: int, obs_reader, traj_reader, fourier, linear, sparsity, - output_filename: str = "recon.pkl", validation_recon: np.ndarray = None, metrics: dict = None): + output_filename: str = "recon.nii", validation_recon: np.ndarray = None, metrics: dict = None): """Reconstructs an MRI image using the given parameters. Parameters diff --git a/src/mri/cli/retro_recon.py b/src/mri/cli/retro_recon.py index 6d47ce87..48ae4adc 100644 --- a/src/mri/cli/retro_recon.py +++ b/src/mri/cli/retro_recon.py @@ -1,20 +1,19 @@ from hydra_zen import store, zen import numpy as np -from mri.cli.utils import traj_config +from mri.cli.utils import traj_config, fourier_op_config from mri.operators.fourier.utils import discard_frequency_outliers import nibabel as nib -import logging +import logging, os from mri.cli.reconstruct import recon from mri.optimizers.utils.metrics import box_psnr, box_ssim - log = logging.getLogger(__name__) def retro(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, algorithm: str, debug: int, traj_reader, fourier, forward, linear, sparsity, - output_filename: str = "recon.pkl"): + output_filename: str = "recon.nii"): """Perform retrospective reconstruction on MRI data. This function takes MRI data and performs retrospective reconstruction using the specified parameters. @@ -62,6 +61,8 @@ def retro(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co "n_adc_samples": shots.shape[1]*traj_params['min_osf'], "n_slices": 1, "n_contrasts": 1, + "oversampling_factor": traj_params['min_osf'], + "trajectory_name": os.path.basename(traj_file), } recon( obs_file="", @@ -89,15 +90,16 @@ def retro(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co traj_reader=traj_config, algorithm="pogm", num_iterations=30, + forward=fourier_op_config, coil_compress=5, debug=1, hydra_defaults=[ "_self_", - {"forward": "gpu"}, {"fourier": "gpu"}, {"fourier/density_comp": "pipe_lowmem"}, {"fourier/smaps": "low_frequency"}, - + {"linear": "gpu"}, + {"sparsity": "weighted_sparse"}, ], name="retro_recon", ) diff --git a/src/mri/cli/utils.py b/src/mri/cli/utils.py index d126579c..e8864a76 100644 --- a/src/mri/cli/utils.py +++ b/src/mri/cli/utils.py @@ -38,6 +38,7 @@ fourier_op_config = builds( NonCartesianFFT, populate_full_signature=True, + implementation="gpuNUFFT", zen_exclude=["n_coils"], zen_partial=True, ) @@ -65,11 +66,11 @@ ) fourier_store = store(group="fourier") -fourier_store(fourier_op_config, name="cpu") +fourier_store(fourier_op_config, name="gpu") fourier_store( fourier_op_config, - implementation="gpuNUFFT", - name="gpu", + implementation="finufft", + name="cpu", ) fourier_store( fourier_op_config, @@ -89,7 +90,7 @@ sparsity_store = store(group="sparsity") sparsity_store(sparsity_config, name="weighted_sparse") -def setup_hydra_config(): +def setup_hydra_config(verbose=False): """ Set up the configuration for Hydra. @@ -117,7 +118,7 @@ def setup_hydra_config(): '_target_': "hydra_callbacks.RuntimePerformance" }, }, - verbose=True, + verbose=verbose, ) ) From 48cace79f4b1a3a55d46c330b65bd560dbba6371 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 8 Jul 2024 13:43:47 +0200 Subject: [PATCH 03/19] Added reader --- src/mri/io/readers.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 src/mri/io/readers.py diff --git a/src/mri/io/readers.py b/src/mri/io/readers.py new file mode 100644 index 00000000..5e4a8a10 --- /dev/null +++ b/src/mri/io/readers.py @@ -0,0 +1,13 @@ +import nibabel as nib +import numpy as np +import pickle as pkl +from mrinufft.io.siemens import read_siemens_rawdat + + +def load_input_data(obs_file): + if obs_file[:-4] == ".nii": + image = nib.load(obs_file).get_fdata(dtype=np.complex64) + elif obs_file[:-4] == ".pkl": + image = pkl.load(open(obs_file, "rb"))['recon'] + elif obs_file[:-4] == ".dat": + raw_data, data_header = read_siemens_rawdat(obs_file) \ No newline at end of file From 9f1583fbe08d9df5d5476f95ecff5bfdd43f1481 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Fri, 12 Jul 2024 09:26:43 +0200 Subject: [PATCH 04/19] Save the meterics also as a json file --- src/mri/cli/reconstruct.py | 3 +++ src/mri/io/readers.py | 13 ------------- 2 files changed, 3 insertions(+), 13 deletions(-) delete mode 100644 src/mri/io/readers.py diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index ba1ef586..7efac90f 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -7,6 +7,7 @@ from pymrt.recipes.coils import compress_svd from mri.reconstructors import SelfCalibrationReconstructor +import json import numpy as np import pickle as pkl import logging, os, glob @@ -213,6 +214,8 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co for metric, function in metrics.items(): final_metrics[metric] = function(recon, validation_recon) log.info(f"Final Metrics: {final_metrics}") + with open(get_outdir_path('metrics.json'), 'w') as f: + f.write(json.dumps(final_metrics, indent=4)) data_header['metrics'] = final_metrics data_header['costs'] = costs data_header['metrics_iter'] = metrics_iter diff --git a/src/mri/io/readers.py b/src/mri/io/readers.py deleted file mode 100644 index 5e4a8a10..00000000 --- a/src/mri/io/readers.py +++ /dev/null @@ -1,13 +0,0 @@ -import nibabel as nib -import numpy as np -import pickle as pkl -from mrinufft.io.siemens import read_siemens_rawdat - - -def load_input_data(obs_file): - if obs_file[:-4] == ".nii": - image = nib.load(obs_file).get_fdata(dtype=np.complex64) - elif obs_file[:-4] == ".pkl": - image = pkl.load(open(obs_file, "rb"))['recon'] - elif obs_file[:-4] == ".dat": - raw_data, data_header = read_siemens_rawdat(obs_file) \ No newline at end of file From c2ec7365f4dc6d76080872dad5c4ecb1ffa9fac4 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Wed, 24 Jul 2024 09:43:01 +0200 Subject: [PATCH 05/19] Fix noise stuff --- src/mri/cli/reconstruct.py | 1 + src/mri/cli/retro_recon.py | 18 +++++++++++++++- src/mri/cli/utils.py | 44 +++++++++++++++++++++++++------------- 3 files changed, 47 insertions(+), 16 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 7efac90f..40803525 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -215,6 +215,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co final_metrics[metric] = function(recon, validation_recon) log.info(f"Final Metrics: {final_metrics}") with open(get_outdir_path('metrics.json'), 'w') as f: + final_metrics["traj"] = data_header["trajectory_name"] f.write(json.dumps(final_metrics, indent=4)) data_header['metrics'] = final_metrics data_header['costs'] = costs diff --git a/src/mri/cli/retro_recon.py b/src/mri/cli/retro_recon.py index 48ae4adc..3280c2e6 100644 --- a/src/mri/cli/retro_recon.py +++ b/src/mri/cli/retro_recon.py @@ -13,7 +13,7 @@ def retro(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, algorithm: str, debug: int, traj_reader, fourier, forward, linear, sparsity, - output_filename: str = "recon.nii"): + noise_cov: str = None, output_filename: str = "recon.nii"): """Perform retrospective reconstruction on MRI data. This function takes MRI data and performs retrospective reconstruction using the specified parameters. @@ -54,6 +54,22 @@ def retro(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co kspace_loc = discard_frequency_outliers(shots.reshape(-1, traj_params["dimension"])) forward_op = forward(kspace_loc, traj_params["img_size"], n_coils=image.shape[0]) kspace_data = forward_op.op(image) + + # Add noise + if noise_cov is not None: + log.info("Adding noise to the k-space data") + noise_cov_matrix = np.load(noise_cov) + # FIXME: Remove this, this is temporary + noise_cov_matrix = noise_cov_matrix[:kspace_data.shape[0], :kspace_data.shape[0]] + noise_args = { + "mean": np.zeros(image.shape[0]), + "cov": noise_cov_matrix*traj_params['min_osf'], + "size": kspace_data.shape[1:], + "check_valid": "warn", + } + noise = np.random.multivariate_normal(**noise_args) + 1j*np.random.multivariate_normal(**noise_args) + kspace_data += np.moveaxis(noise.astype(np.complex64), -1, 0) + data_header = { "n_coils": image.shape[0], "shifts": [0, 0, 0], diff --git a/src/mri/cli/utils.py b/src/mri/cli/utils.py index e8864a76..b58d9c80 100644 --- a/src/mri/cli/utils.py +++ b/src/mri/cli/utils.py @@ -90,34 +90,48 @@ sparsity_store = store(group="sparsity") sparsity_store(sparsity_config, name="weighted_sparse") -def setup_hydra_config(verbose=False): + +def setup_hydra_config(verbose=False, multirun_gather=False): """ Set up the configuration for Hydra. + Parameters + ---------- + verbose : bool, optional + If True, the verbose mode is enabled, by default False + multirun_gather : bool, optional + If True, the multirun gather is enabled, by default False + Returns ------- None This function does not return anything. """ outdir = os.environ.get('RECON_OUTDIR', 'recon') + callbacks = { + 'git_infos': { + '_target_': "hydra_callbacks.GitInfo", + 'clean': True + }, + 'resource_monitor': { + '_target_': "hydra_callbacks.ResourceMonitor", + 'sample_interval': 1, + 'gpu_monit': True, + }, + 'runtime_perf': { + '_target_': "hydra_callbacks.RuntimePerformance" + }, + } + if multirun_gather: + callbacks['multirun_gather'] = { + '_target_': "hydra_callbacks.MultiRunGatherer", + 'result_file': "metrics.json", + } store( HydraConf( job=JobConf(name="recon"), sweep=SweepDir(dir=os.path.join(outdir, "${hydra.job.name}") + "/${now:%Y-%m-%d-%H-%M-%S}"), - callbacks={ - 'git_infos': { - '_target_': "hydra_callbacks.GitInfo", - 'clean': True - }, - 'resource_monitor': { - '_target_': "hydra_callbacks.ResourceMonitor", - 'sample_interval': 1, - 'gpu_monit': True, - }, - 'runtime_perf': { - '_target_': "hydra_callbacks.RuntimePerformance" - }, - }, + callbacks=callbacks, verbose=verbose, ) ) From 7225035109bfee8aabdccd20d60bb43bb8cbd9cb Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Fri, 26 Jul 2024 12:30:16 +0200 Subject: [PATCH 06/19] All setup --- src/mri/cli/reconstruct.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 40803525..50ec7d64 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -213,6 +213,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co final_metrics = {} for metric, function in metrics.items(): final_metrics[metric] = function(recon, validation_recon) + final_metrics[f"dc_{metric}"] = function(recon_adjoint, validation_recon) log.info(f"Final Metrics: {final_metrics}") with open(get_outdir_path('metrics.json'), 'w') as f: final_metrics["traj"] = data_header["trajectory_name"] From 5a53587e322959d7f787915f800ef899833e8cc6 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Wed, 31 Jul 2024 11:35:13 +0200 Subject: [PATCH 07/19] Fix eye --- src/mri/cli/reconstruct.py | 1 + src/mri/io/output.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 50ec7d64..a3ec9beb 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -13,6 +13,7 @@ import logging, os, glob from functools import partial + log = logging.getLogger(__name__) save_data_hydra = lambda x, *args, **kwargs: save_data(get_outdir_path(x), *args, **kwargs) diff --git a/src/mri/io/output.py b/src/mri/io/output.py index 370ce391..076fb54b 100644 --- a/src/mri/io/output.py +++ b/src/mri/io/output.py @@ -25,7 +25,7 @@ def save_data(filename, recon, header=None): elif extension == 'mat': savemat(filename, save_dict) elif extension == 'nii': - orient = np.ones((4, 4)) + orient = np.eye(4) if 'orientation' in header: orient = header['orientation'] recon = np.abs(recon) From 709ccbc1e93b277facbc6bd52408e9554bb22483 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Thu, 12 Sep 2024 10:46:43 +0200 Subject: [PATCH 08/19] added support for dc --- src/mri/cli/reconstruct.py | 12 ++++++++++-- src/mri/cli/utils.py | 2 +- src/mri/io/output.py | 2 +- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index a3ec9beb..1d7390bf 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -123,10 +123,10 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, if debug > 0: intermediate = { 'density_comp': fourier_op.impl.density, - 'smaps': fourier_op.impl.smaps, 'traj_params': traj_params, 'data_header': data_header, } + save_data_hydra('smaps.nii', fourier_op.impl.smaps) if coil_compress != -1: intermediate['kspace_data'] = kspace_data log.info("Saving Smaps and denisty_comp as intermediates") @@ -144,7 +144,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, algorithm: str, debug: int, obs_reader, traj_reader, fourier, linear, sparsity, - output_filename: str = "recon.nii", validation_recon: np.ndarray = None, metrics: dict = None): + output_filename: str = "recon.nii", remove_dc_for_recon: bool = True, validation_recon: np.ndarray = None, metrics: dict = None): """Reconstructs an MRI image using the given parameters. Parameters @@ -175,6 +175,9 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co Object representing the sparsity operator. output_filename : str, optional Path to save the reconstructed image, by default "recon.pkl" + remove_dc_for_recon: bool, optional + Whether to remove the density compensation for reconstruction, by default True + Note that it will still be used to estimate x_init validation_recon: np.ndarray, optional The validation reconstruction to compare the results with, by default None metrics: dict, optional @@ -191,6 +194,11 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co output_filename='dc_adjoint' + output_filename[-4:], ) fourier_op, kspace_data, traj_params, data_header = additional_data + if remove_dc_for_recon: + fourier_op.impl.density = None + K = fourier_op.op(recon_adjoint) + alpha = np.mean(np.linalg.norm(kspace_data, axis=0)) / np.mean(np.linalg.norm(K, axis=0)) + recon_adjoint *= alpha linear_op = linear(shape=tuple(traj_params["img_size"]), dim=traj_params['dimension']) linear_op.op(recon_adjoint) sparse_op = sparsity(coeffs_shape=linear_op.coeffs_shape, weights=mu) diff --git a/src/mri/cli/utils.py b/src/mri/cli/utils.py index b58d9c80..7092c7e7 100644 --- a/src/mri/cli/utils.py +++ b/src/mri/cli/utils.py @@ -47,7 +47,7 @@ populate_full_signature=True, zen_partial=True, wavelet_name="sym8", - nb_scale=4, + nb_scale=3, zen_exclude=["shape"] ) sparsity_config = builds( diff --git a/src/mri/io/output.py b/src/mri/io/output.py index 076fb54b..2bd6ce53 100644 --- a/src/mri/io/output.py +++ b/src/mri/io/output.py @@ -26,7 +26,7 @@ def save_data(filename, recon, header=None): savemat(filename, save_dict) elif extension == 'nii': orient = np.eye(4) - if 'orientation' in header: + if header is not None and 'orientation' in header: orient = header['orientation'] recon = np.abs(recon) recon /= np.max(recon) From 46015c65f0d26c6b5ae54ad945cf65599d25ef34 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 7 Oct 2024 16:03:45 +0200 Subject: [PATCH 09/19] Added dcm --- src/mri/cli/reconstruct.py | 4 ++- src/mri/io/output.py | 50 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 1d7390bf..2c9df3e4 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -55,6 +55,8 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, The reconstructed image is saved as 'dc_adjoint.pkl' file. """ raw_data, data_header = obs_reader(obs_file) + if obs_reader.keywords['slice_num'] is not None: + data_header['slice_num'] = obs_reader.keywords['slice_num'] log.info(f"Data Header: {data_header}") try: if not os.path.isdir(traj_file) and data_header["trajectory_name"] != os.path.basename(traj_file): @@ -191,7 +193,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co obs_reader, traj_reader, fourier, - output_filename='dc_adjoint' + output_filename[-4:], + output_filename='dc_adj_' + output_filename, ) fourier_op, kspace_data, traj_params, data_header = additional_data if remove_dc_for_recon: diff --git a/src/mri/io/output.py b/src/mri/io/output.py index 2bd6ce53..2cf3c235 100644 --- a/src/mri/io/output.py +++ b/src/mri/io/output.py @@ -2,8 +2,53 @@ import pickle as pkl import nibabel as nib import numpy as np +from pydicom.dataset import Dataset, FileDataset +from pydicom import dcmwrite +import pydicom +import datetime +def create_dicom_from_matrix(matrix, filename, slice_index=1): + # Create a new dataset (DICOM object) + ds = Dataset() + + # Populate the required DICOM metadata (minimal example) + ds.PatientName = "Test^Patient" + ds.PatientID = "123456" + ds.Modality = "MR" + ds.StudyInstanceUID = "1.2.3.4.5.6.7.8.9" + ds.SeriesInstanceUID = "1.2.3.4.5.6.7.8.9.1" + ds.SOPInstanceUID = "1.2.3.4.5.6.7.8.9.1.{}".format(slice_index) + ds.SOPClassUID = pydicom.uid.MRImageStorage + ds.InstanceNumber = slice_index + + # Set slice location and spacing if available + ds.SliceLocation = float(slice_index) # Optional: for multi-slice datasets + + # Add the Pixel Data (must be bytes) + ds.Rows, ds.Columns = matrix.shape + ds.PhotometricInterpretation = "MONOCHROME2" + ds.SamplesPerPixel = 1 + ds.BitsAllocated = 16 + ds.BitsStored = 16 + ds.HighBit = 15 + ds.PixelRepresentation = 1 # 1 for signed integers + ds.PixelData = matrix.tobytes() + + # Optional: Add additional DICOM metadata, like image orientation, pixel spacing, etc. + ds.ImagePositionPatient = [0.0, 0.0, float(slice_index)] # Example for slice position + + # Add date and time + dt = datetime.datetime.now() + ds.StudyDate = dt.strftime('%Y%m%d') + ds.StudyTime = dt.strftime('%H%M%S') + # Set DICOM file transfer syntax attributes + ds.is_little_endian = True # Standard for most DICOM files + ds.is_implicit_VR = True # True for implicit VR, False for explicit VR + + # Save the DICOM file + dcmwrite(filename, ds) + def save_data(filename, recon, header=None): """Save reconstructed data to a file. @@ -32,5 +77,10 @@ def save_data(filename, recon, header=None): recon /= np.max(recon) img = nib.Nifti1Image(recon, orient) nib.save(img, filename) + elif extension == 'dcm': + if header is not None and 'slice_num' in header: + create_dicom_from_matrix(recon, filename, header['slice_num']) + else: + raise ValueError("DICOM save is reserved for 2D images.") else: raise ValueError("Unsupported file extension.") From d4713b9be9e04e10c2becb10523b0986f35516e2 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 7 Oct 2024 16:48:21 +0200 Subject: [PATCH 10/19] Added support for ggrappa --- src/mri/cli/reconstruct.py | 36 +++++++++++++++++++++++-------- src/mri/cli/utils.py | 13 +++++++++++ src/mri/reconstructors/ggrappa.py | 25 +++++++++++++++++++++ 3 files changed, 65 insertions(+), 9 deletions(-) create mode 100644 src/mri/reconstructors/ggrappa.py diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 2c9df3e4..15f56ba3 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -1,17 +1,19 @@ from hydra_zen import store, zen from mri.io.output import save_data -from mri.cli.utils import raw_config, traj_config, setup_hydra_config, get_outdir_path +from mri.cli.utils import raw_config, traj_config, grappa_config, setup_hydra_config, get_outdir_path from mri.operators.fourier.utils import discard_frequency_outliers from mrinufft.io.utils import add_phase_to_kspace_with_shifts from pymrt.recipes.coils import compress_svd from mri.reconstructors import SelfCalibrationReconstructor +from mri.reconstructors.ggrappa import do_grappa_and_append_data, GRAPPA_RECON_AVAILABLE import json import numpy as np import pickle as pkl import logging, os, glob from functools import partial +from typing import Union log = logging.getLogger(__name__) @@ -20,7 +22,8 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, debug: int, - obs_reader, traj_reader, fourier, output_filename: str = "dc_adjoint.nii"): + obs_reader, traj_reader, fourier, grappa_recon=None, output_filename: str = "dc_adjoint.nii", + ): """ Reconstructs an image using the adjoint operator. @@ -48,6 +51,8 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, It can be: 1) *.pkl / *.mat: Holds the reconstructed results saved in dictionary as `recon`. 2) *.nii : NIFTI file holding the reconstructed images. + grappa_af: Union[list[int], tuple[int, ...]], optional default 1 + The acceleration factor for the GRAPPA reconstruction. Returns ------- @@ -86,6 +91,20 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, for size in traj_params['img_size'] ]) log.info(f"Trajectory Parameters: {traj_params}") + kspace_data = kspace_data.astype(np.complex64) + kspace_loc = kspace_loc.astype(np.float32) + log.info(f"Phase shifting raw data for Normalized shifts: {normalized_shifts}") + kspace_data = add_phase_to_kspace_with_shifts( + kspace_data, kspace_loc.reshape(-1, traj_params["dimension"]), normalized_shifts + ) + if grappa_recon is not None: + log.info("Performing GRAPPA Reconstruction") + kspace_loc, kspace_data = do_grappa_and_append_data( + kspace_loc, + kspace_data, + traj_params, + grappa_recon, + ) kspace_loc = shots.reshape(-1, traj_params["dimension"]) data_header["shifts"] = data_header['shifts'][:traj_params["dimension"]] normalized_shifts = ( @@ -101,12 +120,6 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, kspace_data = np.squeeze(raw_data) else: kspace_loc, kspace_data = discard_frequency_outliers(kspace_loc, np.squeeze(raw_data)) - kspace_data = kspace_data.astype(np.complex64) - kspace_loc = kspace_loc.astype(np.float32) - log.info(f"Phase shifting raw data for Normalized shifts: {normalized_shifts}") - kspace_data = add_phase_to_kspace_with_shifts( - kspace_data, kspace_loc, normalized_shifts - ) if coil_compress != -1: kspace_data = np.ascontiguousarray(compress_svd( kspace_data, @@ -146,7 +159,8 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_compress: str|int, algorithm: str, debug: int, obs_reader, traj_reader, fourier, linear, sparsity, - output_filename: str = "recon.nii", remove_dc_for_recon: bool = True, validation_recon: np.ndarray = None, metrics: dict = None): + output_filename: str = "recon.nii", remove_dc_for_recon: bool = True, validation_recon: np.ndarray = None, metrics: dict = None, + grappa_recon=None): """Reconstructs an MRI image using the given parameters. Parameters @@ -246,6 +260,8 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co "_self_", {"fourier": "gpu"}, {"fourier/density_comp": "pipe"}, + {"grappa_recon": "disable"} if GRAPPA_RECON_AVAILABLE else {}, + {}, {"fourier/smaps": "low_frequency"}, ], name="dc_adjoint", @@ -262,6 +278,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co "_self_", {"fourier": "gpu"}, {"fourier/density_comp": "pipe"}, + {"grappa_recon": "disable"} if GRAPPA_RECON_AVAILABLE else {}, {"fourier/smaps": "low_frequency"}, {"linear": "gpu"}, {"sparsity": "weighted_sparse"}, @@ -280,6 +297,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co hydra_defaults=[ "_self_", {"fourier": "gpu_lowmem"}, + {"grappa_recon": "disable"} if GRAPPA_RECON_AVAILABLE else {}, {"fourier/density_comp": "pipe_lowmem"}, {"fourier/smaps": "low_frequency"}, ], diff --git a/src/mri/cli/utils.py b/src/mri/cli/utils.py index 7092c7e7..b7ae63df 100644 --- a/src/mri/cli/utils.py +++ b/src/mri/cli/utils.py @@ -12,6 +12,19 @@ from modopt.opt.linear import Identity import os +try: + from ggrappa.grappaND import GRAPPA_Recon + grappa_config = builds( + GRAPPA_Recon, + af=1, + zen_exclude=["sig", "acs", "isGolfSparks", "quiet"], + zen_partial=True, + populate_full_signature=True, + ) + grappa_store = store(group="grappa_recon") + grappa_store(grappa_config, name="disable") +except: + pass raw_config = builds(read_arbgrad_rawdat, populate_full_signature=True, zen_partial=True) diff --git a/src/mri/reconstructors/ggrappa.py b/src/mri/reconstructors/ggrappa.py new file mode 100644 index 00000000..af63d4e3 --- /dev/null +++ b/src/mri/reconstructors/ggrappa.py @@ -0,0 +1,25 @@ +import numpy as np + +GRAPPA_RECON_AVAILABLE = False +try: + from ggrappa.grappaND import GRAPPA_Recon + import torch + from ggrappa.utils import get_cart_portion_sparkling, get_grappa_filled_data_and_loc +except: + GRAPPA_RECON_AVAILABLE = False + + +def do_grappa_and_append_data(kspace_shots, kspace_data, traj_params, grappa_maker): + if not GRAPPA_RECON_AVAILABLE: + raise ValueError("GRAPPA is not available") + gridded_center = get_grappa_filled_data_and_loc(kspace_shots, kspace_data, traj_params) + grappa_recon, grappa_kernel = grappa_maker( + sig=torch.tensor(gridded_center).permute(0, 2, 3, 1), + acs=None, + isGolfSparks=True, + ) + rec = rec.permute(0, 3, 1, 2).numpy() + extra_loc, extra_data = get_grappa_filled_data_and_loc(gridded_center, grappa_recon, traj_params) + kspace_loc = np.concatenate([kspace_loc.reshape(-1, kspace_loc.shape[-1]), extra_loc], axis=0) + kspace_data = np.concatenate([kspace_data, extra_data], axis=0) + return kspace_loc, kspace_data \ No newline at end of file From 6b843cb84eeb09d43c4bacf887b92c0109da0370 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Fri, 11 Oct 2024 13:55:37 +0200 Subject: [PATCH 11/19] Added fixes for estimating Grappa --- src/mri/cli/reconstruct.py | 41 +++++++++++++++++-------------- src/mri/cli/utils.py | 3 +-- src/mri/reconstructors/ggrappa.py | 15 ++++++----- 3 files changed, 32 insertions(+), 27 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 15f56ba3..4a190c2f 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -91,41 +91,44 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, for size in traj_params['img_size'] ]) log.info(f"Trajectory Parameters: {traj_params}") - kspace_data = kspace_data.astype(np.complex64) - kspace_loc = kspace_loc.astype(np.float32) + data_header["shifts"] = data_header['shifts'][:traj_params["dimension"]] + normalized_shifts = ( + np.array(data_header["shifts"]) + / np.array(traj_params["FOV"]) + * np.array(traj_params["img_size"]) + / 1000 + ) + kspace_data = np.squeeze(raw_data).astype(np.complex64) + kspace_loc = shots.reshape(-1, traj_params["dimension"]).astype(np.float32) log.info(f"Phase shifting raw data for Normalized shifts: {normalized_shifts}") kspace_data = add_phase_to_kspace_with_shifts( kspace_data, kspace_loc.reshape(-1, traj_params["dimension"]), normalized_shifts ) + if coil_compress != -1: + log.info("Compressing coils") + kspace_data = np.ascontiguousarray(compress_svd( + kspace_data, + k_svd=coil_compress, + coil_axis=0 + )).astype(np.complex64) if grappa_recon is not None: - log.info("Performing GRAPPA Reconstruction") + af_string = data_header['trajectory_name'].split('_G')[1].split('_')[0].split('x') + log.info("Performing GRAPPA Reconstruction: AF: %s", af_string) + grappa_recon.keywords['af'] = tuple([int(float(af)) for af in af_string]) + log.info("GRAPPA AF: %s", grappa_recon.keywords['af']) kspace_loc, kspace_data = do_grappa_and_append_data( kspace_loc, kspace_data, traj_params, grappa_recon, ) - kspace_loc = shots.reshape(-1, traj_params["dimension"]) - data_header["shifts"] = data_header['shifts'][:traj_params["dimension"]] - normalized_shifts = ( - np.array(data_header["shifts"]) - / np.array(traj_params["FOV"]) - * np.array(traj_params["img_size"]) - / 1000 - ) if kspace_loc.max() > 0.5 or kspace_loc.min() < 0.5: log.warn(f"K-space locations are above the unity range, discarding the outlier data") if data_header["type"] == "retro_recon": kspace_loc = discard_frequency_outliers(kspace_loc) kspace_data = np.squeeze(raw_data) else: - kspace_loc, kspace_data = discard_frequency_outliers(kspace_loc, np.squeeze(raw_data)) - if coil_compress != -1: - kspace_data = np.ascontiguousarray(compress_svd( - kspace_data, - k_svd=coil_compress, - coil_axis=0 - )) + kspace_loc, kspace_data = discard_frequency_outliers(kspace_loc, kspace_data) fourier.keywords['smaps'] = partial( fourier.keywords['smaps'], kspace_data=kspace_data, @@ -207,6 +210,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co obs_reader, traj_reader, fourier, + grappa_recon=grappa_recon, output_filename='dc_adj_' + output_filename, ) fourier_op, kspace_data, traj_params, data_header = additional_data @@ -261,7 +265,6 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co {"fourier": "gpu"}, {"fourier/density_comp": "pipe"}, {"grappa_recon": "disable"} if GRAPPA_RECON_AVAILABLE else {}, - {}, {"fourier/smaps": "low_frequency"}, ], name="dc_adjoint", diff --git a/src/mri/cli/utils.py b/src/mri/cli/utils.py index b7ae63df..1428f506 100644 --- a/src/mri/cli/utils.py +++ b/src/mri/cli/utils.py @@ -16,8 +16,7 @@ from ggrappa.grappaND import GRAPPA_Recon grappa_config = builds( GRAPPA_Recon, - af=1, - zen_exclude=["sig", "acs", "isGolfSparks", "quiet"], + zen_exclude=["sig", "acs", "isGolfSparks", "quiet", "af"], zen_partial=True, populate_full_signature=True, ) diff --git a/src/mri/reconstructors/ggrappa.py b/src/mri/reconstructors/ggrappa.py index af63d4e3..0683d873 100644 --- a/src/mri/reconstructors/ggrappa.py +++ b/src/mri/reconstructors/ggrappa.py @@ -5,21 +5,24 @@ from ggrappa.grappaND import GRAPPA_Recon import torch from ggrappa.utils import get_cart_portion_sparkling, get_grappa_filled_data_and_loc + GRAPPA_RECON_AVAILABLE = True except: - GRAPPA_RECON_AVAILABLE = False + pass -def do_grappa_and_append_data(kspace_shots, kspace_data, traj_params, grappa_maker): +def do_grappa_and_append_data(kspace_loc, kspace_data, traj_params, grappa_maker): + kspace_shots = kspace_loc.reshape(traj_params['num_shots'], -1, traj_params['dimension']) + osf = kspace_shots.shape[1] / traj_params['num_samples_per_shot'] if not GRAPPA_RECON_AVAILABLE: raise ValueError("GRAPPA is not available") - gridded_center = get_grappa_filled_data_and_loc(kspace_shots, kspace_data, traj_params) + gridded_center = get_cart_portion_sparkling(kspace_shots, traj_params, kspace_data, osf=int(osf)) grappa_recon, grappa_kernel = grappa_maker( sig=torch.tensor(gridded_center).permute(0, 2, 3, 1), acs=None, isGolfSparks=True, ) - rec = rec.permute(0, 3, 1, 2).numpy() + grappa_recon = grappa_recon.permute(0, 3, 1, 2).numpy() extra_loc, extra_data = get_grappa_filled_data_and_loc(gridded_center, grappa_recon, traj_params) - kspace_loc = np.concatenate([kspace_loc.reshape(-1, kspace_loc.shape[-1]), extra_loc], axis=0) - kspace_data = np.concatenate([kspace_data, extra_data], axis=0) + kspace_loc = np.concatenate([kspace_loc, extra_loc], axis=0) + kspace_data = np.hstack([kspace_data, extra_data]) return kspace_loc, kspace_data \ No newline at end of file From 8ae3d40e0e8c17c1ac22d461101cf3826625a1fc Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Fri, 11 Oct 2024 13:56:42 +0200 Subject: [PATCH 12/19] Also save data --- src/mri/cli/reconstruct.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 4a190c2f..2c7ed608 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -143,6 +143,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, 'density_comp': fourier_op.impl.density, 'traj_params': traj_params, 'data_header': data_header, + 'kspace_loc': kspace_loc, } save_data_hydra('smaps.nii', fourier_op.impl.smaps) if coil_compress != -1: From 6971516fd869fd3a9d4b2a302b85f26a49ee9ea3 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Fri, 11 Oct 2024 16:24:53 +0200 Subject: [PATCH 13/19] A couple of fixes --- src/mri/cli/reconstruct.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 2c7ed608..f073692f 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -111,10 +111,13 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, k_svd=coil_compress, coil_axis=0 )).astype(np.complex64) - if grappa_recon is not None: + try: af_string = data_header['trajectory_name'].split('_G')[1].split('_')[0].split('x') - log.info("Performing GRAPPA Reconstruction: AF: %s", af_string) grappa_recon.keywords['af'] = tuple([int(float(af)) for af in af_string]) + except: + grappa_recon.keywords['af'] = (1, ) + if grappa_recon is not None and np.prod(grappa_recon.keywords['af'])>1: + log.info("Performing GRAPPA Reconstruction: AF: %s", af_string) log.info("GRAPPA AF: %s", grappa_recon.keywords['af']) kspace_loc, kspace_data = do_grappa_and_append_data( kspace_loc, From cc32592000237aaf022045c32e0cd67969bfb50e Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 14 Oct 2024 16:52:43 +0200 Subject: [PATCH 14/19] Fix ggrappa --- src/mri/reconstructors/ggrappa.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/mri/reconstructors/ggrappa.py b/src/mri/reconstructors/ggrappa.py index 0683d873..0edd616a 100644 --- a/src/mri/reconstructors/ggrappa.py +++ b/src/mri/reconstructors/ggrappa.py @@ -12,10 +12,9 @@ def do_grappa_and_append_data(kspace_loc, kspace_data, traj_params, grappa_maker): kspace_shots = kspace_loc.reshape(traj_params['num_shots'], -1, traj_params['dimension']) - osf = kspace_shots.shape[1] / traj_params['num_samples_per_shot'] if not GRAPPA_RECON_AVAILABLE: raise ValueError("GRAPPA is not available") - gridded_center = get_cart_portion_sparkling(kspace_shots, traj_params, kspace_data, osf=int(osf)) + gridded_center = get_cart_portion_sparkling(kspace_shots, traj_params, kspace_data) grappa_recon, grappa_kernel = grappa_maker( sig=torch.tensor(gridded_center).permute(0, 2, 3, 1), acs=None, From 1cd6c0e96e678de097cdda90e35aeeb417a35fc4 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Thu, 23 Jan 2025 15:11:11 +0100 Subject: [PATCH 15/19] Add some defaults --- src/mri/cli/reconstruct.py | 4 ++-- src/mri/cli/utils.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index f073692f..491ede66 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -13,8 +13,6 @@ import pickle as pkl import logging, os, glob from functools import partial -from typing import Union - log = logging.getLogger(__name__) @@ -280,6 +278,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co algorithm="pogm", num_iterations=30, coil_compress=10, + mu=1e-7, debug=1, hydra_defaults=[ "_self_", @@ -300,6 +299,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co algorithm="pogm", num_iterations=30, coil_compress=5, + mu=1e-7, debug=1, hydra_defaults=[ "_self_", diff --git a/src/mri/cli/utils.py b/src/mri/cli/utils.py index 1428f506..e51c69d2 100644 --- a/src/mri/cli/utils.py +++ b/src/mri/cli/utils.py @@ -43,6 +43,7 @@ smaps_config = builds( get_smaps("low_frequency"), populate_full_signature=True, + blurr_factor=30.0, # We estimate density, with separate args. It is passed by compute_smaps in mri-nufft zen_exclude=["density"], zen_partial=True, From f4450c6a2d44210acc2d6f97eeeca73bf6161989 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 3 Feb 2025 13:52:29 +0100 Subject: [PATCH 16/19] Added function --- src/mri/cli/reconstruct.py | 7 +++++-- src/mri/reconstructors/ggrappa.py | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 491ede66..f11d9b8e 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -1,9 +1,9 @@ from hydra_zen import store, zen from mri.io.output import save_data -from mri.cli.utils import raw_config, traj_config, grappa_config, setup_hydra_config, get_outdir_path +from mri.cli.utils import raw_config, traj_config, setup_hydra_config, get_outdir_path from mri.operators.fourier.utils import discard_frequency_outliers -from mrinufft.io.utils import add_phase_to_kspace_with_shifts +from mrinufft.io.utils import add_phase_to_kspace_with_shifts, remove_extra_kspace_samples from pymrt.recipes.coils import compress_svd from mri.reconstructors import SelfCalibrationReconstructor from mri.reconstructors.ggrappa import do_grappa_and_append_data, GRAPPA_RECON_AVAILABLE @@ -98,6 +98,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, ) kspace_data = np.squeeze(raw_data).astype(np.complex64) kspace_loc = shots.reshape(-1, traj_params["dimension"]).astype(np.float32) + kspace_data = remove_extra_kspace_samples(kspace_data, shots.shape[1]) log.info(f"Phase shifting raw data for Normalized shifts: {normalized_shifts}") kspace_data = add_phase_to_kspace_with_shifts( kspace_data, kspace_loc.reshape(-1, traj_params["dimension"]), normalized_shifts @@ -114,6 +115,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, grappa_recon.keywords['af'] = tuple([int(float(af)) for af in af_string]) except: grappa_recon.keywords['af'] = (1, ) + grappa_recon.keywords['delta'] = 0 if grappa_recon is not None and np.prod(grappa_recon.keywords['af'])>1: log.info("Performing GRAPPA Reconstruction: AF: %s", af_string) log.info("GRAPPA AF: %s", grappa_recon.keywords['af']) @@ -122,6 +124,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, kspace_data, traj_params, grappa_recon, + acs=data_header["acs"], # Pass ACS if read in data (external) ) if kspace_loc.max() > 0.5 or kspace_loc.min() < 0.5: log.warn(f"K-space locations are above the unity range, discarding the outlier data") diff --git a/src/mri/reconstructors/ggrappa.py b/src/mri/reconstructors/ggrappa.py index 0edd616a..c0c030cb 100644 --- a/src/mri/reconstructors/ggrappa.py +++ b/src/mri/reconstructors/ggrappa.py @@ -10,14 +10,14 @@ pass -def do_grappa_and_append_data(kspace_loc, kspace_data, traj_params, grappa_maker): +def do_grappa_and_append_data(kspace_loc, kspace_data, traj_params, grappa_maker, acs=None): kspace_shots = kspace_loc.reshape(traj_params['num_shots'], -1, traj_params['dimension']) if not GRAPPA_RECON_AVAILABLE: raise ValueError("GRAPPA is not available") gridded_center = get_cart_portion_sparkling(kspace_shots, traj_params, kspace_data) grappa_recon, grappa_kernel = grappa_maker( sig=torch.tensor(gridded_center).permute(0, 2, 3, 1), - acs=None, + acs=torch.tensor(acs).permute(0, 2, 3, 1) if acs is not None else None, isGolfSparks=True, ) grappa_recon = grappa_recon.permute(0, 3, 1, 2).numpy() From 0001cb71cbfe4addff6ca02054c65efa175873e7 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 3 Feb 2025 14:01:15 +0100 Subject: [PATCH 17/19] Fixes on dc_adjioint --- src/mri/cli/reconstruct.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index f11d9b8e..11f6c3e6 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -21,7 +21,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, debug: int, obs_reader, traj_reader, fourier, grappa_recon=None, output_filename: str = "dc_adjoint.nii", - ): + return_data=False): """ Reconstructs an image using the adjoint operator. @@ -161,7 +161,8 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, log.info("Saving DC Adjoint") data_header['traj_params'] = traj_params save_data_hydra(output_filename, dc_adjoint, data_header) - return dc_adjoint, (fourier_op, kspace_data, traj_params, data_header) + if return_data: + return dc_adjoint, (fourier_op, kspace_data, traj_params, data_header) @@ -217,6 +218,7 @@ def recon(obs_file: str, traj_file: str, mu: float, num_iterations: int, coil_co fourier, grappa_recon=grappa_recon, output_filename='dc_adj_' + output_filename, + return_data=True, ) fourier_op, kspace_data, traj_params, data_header = additional_data if remove_dc_for_recon: From 7643a235b9110389510aab74b656e6a9d75eedac Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Mon, 3 Feb 2025 16:48:20 +0100 Subject: [PATCH 18/19] Added fixes --- src/mri/cli/reconstruct.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index 11f6c3e6..a364e64a 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -99,6 +99,7 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, kspace_data = np.squeeze(raw_data).astype(np.complex64) kspace_loc = shots.reshape(-1, traj_params["dimension"]).astype(np.float32) kspace_data = remove_extra_kspace_samples(kspace_data, shots.shape[1]) + kspace_data = kspace_data.reshape(kspace_data.shape[0], -1) log.info(f"Phase shifting raw data for Normalized shifts: {normalized_shifts}") kspace_data = add_phase_to_kspace_with_shifts( kspace_data, kspace_loc.reshape(-1, traj_params["dimension"]), normalized_shifts @@ -112,6 +113,10 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, )).astype(np.complex64) try: af_string = data_header['trajectory_name'].split('_G')[1].split('_')[0].split('x') + if len(af_string) > 1 and 'd' in af_string[1]: + af_caipi = af_string[1].split('d') + af_string[1] = af_caipi[0] + grappa_recon.keywords['delta'] = int(af_caipi[1]) grappa_recon.keywords['af'] = tuple([int(float(af)) for af in af_string]) except: grappa_recon.keywords['af'] = (1, ) From f9707158eb2bc432c5984a2d60410000871ad769 Mon Sep 17 00:00:00 2001 From: chaithyagr Date: Tue, 4 Feb 2025 14:37:20 +0100 Subject: [PATCH 19/19] recon --- src/mri/cli/reconstruct.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/mri/cli/reconstruct.py b/src/mri/cli/reconstruct.py index a364e64a..55c2a852 100644 --- a/src/mri/cli/reconstruct.py +++ b/src/mri/cli/reconstruct.py @@ -104,13 +104,6 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, kspace_data = add_phase_to_kspace_with_shifts( kspace_data, kspace_loc.reshape(-1, traj_params["dimension"]), normalized_shifts ) - if coil_compress != -1: - log.info("Compressing coils") - kspace_data = np.ascontiguousarray(compress_svd( - kspace_data, - k_svd=coil_compress, - coil_axis=0 - )).astype(np.complex64) try: af_string = data_header['trajectory_name'].split('_G')[1].split('_')[0].split('x') if len(af_string) > 1 and 'd' in af_string[1]: @@ -131,6 +124,13 @@ def dc_adjoint(obs_file: str|np.ndarray, traj_file: str, coil_compress: str|int, grappa_recon, acs=data_header["acs"], # Pass ACS if read in data (external) ) + if coil_compress != -1: + log.info("Compressing coils") + kspace_data = np.ascontiguousarray(compress_svd( + kspace_data, + k_svd=coil_compress, + coil_axis=0 + )).astype(np.complex64) if kspace_loc.max() > 0.5 or kspace_loc.min() < 0.5: log.warn(f"K-space locations are above the unity range, discarding the outlier data") if data_header["type"] == "retro_recon":