Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
f950146
Add magic_enum library in CMake
daljit46 Mar 16, 2026
c69c1cb
Add wrapper functions for enum <--> string conversion
daljit46 Mar 16, 2026
ca8cdea
Replace string with enums in cmd code
daljit46 Mar 16, 2026
87ad065
Address clang-tidy warnings
daljit46 Mar 16, 2026
333ba84
Add a new helper to concatenate enums as a string
daljit46 Mar 17, 2026
ff14df8
Add template version of type_choice()
daljit46 Mar 17, 2026
cbc3663
Add helper function get_option_choice for enums
daljit46 Mar 17, 2026
885b8c6
Use enum templated type_choice() for commands + review feedback
daljit46 Mar 17, 2026
470133d
Rename lower_case_enums -> lower_case_enum_names
daljit46 Mar 17, 2026
8489bad
Add comments for new functions in mrtrix.h
daljit46 Mar 17, 2026
8787cc0
Use switch instead of if statement for -weighted option
daljit46 Mar 17, 2026
f769a30
Address clang-tidy const correctness warning
daljit46 Mar 17, 2026
baf6de0
Fix typo
daljit46 Mar 17, 2026
1fe17bc
Fix formatting
daljit46 Mar 17, 2026
4f05359
Fix compilation errors
daljit46 Mar 17, 2026
c0935dd
mrdegibbs: change -mode to use integers rather than choices
daljit46 Mar 18, 2026
8797ef7
mtnormalise: change -number to use integers rather than choices
daljit46 Mar 18, 2026
770f4ec
peaksconvert: use enums instead of strings for choices
daljit46 Mar 18, 2026
0ec0ac9
Change join_enum delimiter to comma followed by space
daljit46 Mar 19, 2026
a977dd1
Update documentation following CLI changes
daljit46 Mar 19, 2026
511fd5e
Move enum functions to new namespace and file
daljit46 Mar 20, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ option(MRTRIX_USE_SYSTEM_GTEST "Use system-installed Google Test library" OFF)
option(MRTRIX_USE_SYSTEM_DAWN "Use system-installed Dawn library" OFF)
option(MRTRIX_USE_SYSTEM_SLANG "Use system-installed Slang library" OFF)
option(MRTRIX_USE_SYSTEM_TCB_SPAN "Use system-installed TCB Span library" OFF)
option(MRTRIX_USE_SYSTEM_MAGIC_ENUM "Use system-installed Magic Enum library" OFF)

if(MRTRIX_BUILD_TESTS)
list(APPEND CMAKE_CTEST_ARGUMENTS "--output-on-failure")
Expand Down
13 changes: 13 additions & 0 deletions cmake/Dependencies.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -230,3 +230,16 @@ else()
add_library(tcb::span ALIAS tcb_span)
endif()

# magic_enum
set(MAGIC_ENUM_VERSION 0.9.7)
if(MRTRIX_USE_SYSTEM_MAGIC_ENUM)
find_package(magic_enum ${MAGIC_ENUM_VERSION} CONFIG REQUIRED)
else()
set(magic_enum_url "https://github.com/Neargye/magic_enum/archive/refs/tags/v${MAGIC_ENUM_VERSION}.tar.gz")
FetchContent_Declare(
magic_enum
DOWNLOAD_EXTRACT_TIMESTAMP ON
URL ${magic_enum_url}
)
FetchContent_MakeAvailable(magic_enum)
endif()
2 changes: 1 addition & 1 deletion cpp/cmd/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ if(MRTRIX_USE_PCH)
file(GENERATE OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/pch_cmd.cpp CONTENT "int main(){}")
add_executable(pch_cmd ${CMAKE_CURRENT_BINARY_DIR}/pch_cmd.cpp)
target_include_directories(pch_cmd PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/../core)
target_link_libraries(pch_cmd PRIVATE Eigen3::Eigen mrtrix::common)
target_link_libraries(pch_cmd PRIVATE Eigen3::Eigen mrtrix::common magic_enum::magic_enum)
target_precompile_headers(pch_cmd PRIVATE
[["app.h"]]
[["image.h"]]
Expand Down
18 changes: 10 additions & 8 deletions cpp/cmd/connectome2tck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ using namespace MR::DWI;
using namespace MR::DWI::Tractography;
using namespace MR::DWI::Tractography::Connectome;

const std::vector<std::string> file_outputs = {"per_edge", "per_node", "single"};
enum class FileOutput { PER_EDGE, PER_NODE, SINGLE };
const std::vector<std::string> file_outputs = lower_case_enums<FileOutput>();

// clang-format off
const OptionGroup TrackOutputOptions = OptionGroup ("Options for determining the content / format of output files")
Expand Down Expand Up @@ -252,7 +253,8 @@ void run() {
if (exclusive && !manual_node_list)
WARN("List of nodes of interest not provided; -exclusive option will have no effect");

const int file_format = get_option_value("files", 0);
opt = get_options("files");
const FileOutput file_format = opt.empty() ? FileOutput::PER_EDGE : enum_from_name<FileOutput>(opt[0][0]);

opt = get_options("exemplars");
if (!opt.empty()) {
Expand Down Expand Up @@ -335,7 +337,7 @@ void run() {
generator.finalize();

// Get exemplars to the output file(s), depending on the requested format
if (file_format == 0) { // One file per edge
if (file_format == FileOutput::PER_EDGE) { // One file per edge
if (exclusive) {
ProgressBar progress("writing exemplars to files", nodes.size() * (nodes.size() - 1) / 2);
for (size_t i = 0; i != nodes.size(); ++i) {
Expand All @@ -362,14 +364,14 @@ void run() {
}
}
}
} else if (file_format == 1) { // One file per node
} else if (file_format == FileOutput::PER_NODE) { // One file per node
ProgressBar progress("writing exemplars to files", nodes.size());
for (std::vector<node_t>::const_iterator n = nodes.begin(); n != nodes.end(); ++n) {
generator.write(
*n, prefix + str(*n) + ".tck", !weights_prefix.empty() ? (weights_prefix + str(*n) + ".csv") : "");
++progress;
}
} else if (file_format == 2) { // Single file
} else if (file_format == FileOutput::SINGLE) { // Single file
std::string path = prefix;
if (path.rfind(".tck") != path.size() - 4)
path += ".tck";
Expand All @@ -384,7 +386,7 @@ void run() {
WriterExtraction writer(properties, nodes, exclusive, keep_self);

switch (file_format) {
case 0: // One file per edge
case FileOutput::PER_EDGE: // One file per edge
for (size_t i = 0; i != nodes.size(); ++i) {
const node_t one = nodes[i];
if (exclusive) {
Expand All @@ -406,12 +408,12 @@ void run() {
}
INFO("A total of " + str(writer.file_count()) + " output track files will be generated (one for each edge)");
break;
case 1: // One file per node
case FileOutput::PER_NODE: // One file per node
for (std::vector<node_t>::const_iterator i = nodes.begin(); i != nodes.end(); ++i)
writer.add(*i, prefix + str(*i) + ".tck", !weights_prefix.empty() ? (weights_prefix + str(*i) + ".csv") : "");
INFO("A total of " + str(writer.file_count()) + " output track files will be generated (one for each node)");
break;
case 2: // Single file
case FileOutput::SINGLE: // Single file
std::string path = prefix;
if (path.rfind(".tck") != path.size() - 4)
path += ".tck";
Expand Down
18 changes: 9 additions & 9 deletions cpp/cmd/connectomeedit.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ using namespace MR::Connectome;
using namespace MR::Math;
using namespace App;

const std::vector<std::string> operations = {
"to_symmetric", "upper_triangular", "lower_triangular", "transpose", "zero_diagonal"};
enum class Operation { TO_SYMMETRIC, UPPER_TRIANGULAR, LOWER_TRIANGULAR, TRANSPOSE, ZERO_DIAGONAL };
const std::vector<std::string> operations = lower_case_enums<Operation>();

// clang-format off
void usage() {
Expand All @@ -47,26 +47,26 @@ void usage() {
void run() {
MR::Connectome::matrix_type connectome = File::Matrix::load_matrix(argument[0]);
MR::Connectome::check(connectome);
const int op = argument[1];
const Operation op = enum_from_name<Operation>(argument[1]);
const std::string_view output_path = argument[2];

INFO("Applying \'" + str(operations[op]) + "\' transformation to the input connectome.");
INFO("Applying \'" + lowercase_enum_name(op) + "\' transformation to the input connectome.");

switch (op) {
case 0:
case Operation::TO_SYMMETRIC:
MR::Connectome::to_symmetric(connectome);
break;
case 1:
case Operation::UPPER_TRIANGULAR:
MR::Connectome::to_upper(connectome);
break;
case 2:
case Operation::LOWER_TRIANGULAR:
MR::Connectome::to_upper(connectome);
connectome.transposeInPlace();
break;
case 3:
case Operation::TRANSPOSE:
connectome.transposeInPlace();
break;
case 4:
case Operation::ZERO_DIAGONAL:
connectome.matrix().diagonal().setZero();
break;
default:
Expand Down
14 changes: 8 additions & 6 deletions cpp/cmd/connectomestats.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ using Math::Stats::matrix_type;
using Math::Stats::vector_type;
using Stats::PermTest::count_matrix_type;

const std::vector<std::string> algorithms = {"nbs", "tfnbs", "none"};
enum class Algorithm { NBS, TFNBS, None };

const std::vector<std::string> algorithms = lower_case_enums<Algorithm>();

constexpr default_type default_tfnbs_dh = 0.1;
constexpr default_type default_tfnbs_e = 0.4;
Expand Down Expand Up @@ -184,21 +186,21 @@ void run() {

// Initialise enhancement algorithm
std::shared_ptr<Stats::EnhancerBase> enhancer;
switch (static_cast<MR::App::ParsedArgument::IntType>(argument[1])) {
case 0: {
switch (enum_from_name<Algorithm>(argument[1])) {
case Algorithm::NBS: {
auto opt = get_options("threshold");
if (opt.empty())
throw Exception("For NBS algorithm, -threshold option must be provided");
enhancer.reset(new MR::Connectome::Enhance::NBS(num_nodes, opt[0][0]));
} break;
case 1: {
case Algorithm::TFNBS: {
std::shared_ptr<Stats::TFCE::EnhancerBase> base(new MR::Connectome::Enhance::NBS(num_nodes));
enhancer.reset(new Stats::TFCE::Wrapper(base));
load_tfce_parameters(*(dynamic_cast<Stats::TFCE::Wrapper *>(enhancer.get())));
if (!get_options("threshold").empty())
WARN(std::string(argument[1]) + " is a threshold-free algorithm; -threshold option ignored");
WARN(lowercase_enum_name(Algorithm::TFNBS) + " is a threshold-free algorithm; -threshold option ignored");
} break;
case 2: {
case Algorithm::None: {
enhancer.reset(new MR::Connectome::Enhance::PassThrough());
if (!get_options("threshold").empty())
WARN("No enhancement algorithm being used; -threshold option ignored");
Expand Down
22 changes: 12 additions & 10 deletions cpp/cmd/dwi2fod.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
using namespace MR;
using namespace App;

const std::vector<std::string> algorithms = {"csd", "msmt_csd"};
enum class Algorithm { CSD, MSMT_CSD };
const std::vector<std::string> algorithms = lower_case_enums<Algorithm>();

// clang-format off
const OptionGroup CommonOptions = OptionGroup ("Options common to more than one algorithm")
Expand Down Expand Up @@ -269,9 +270,9 @@ void run() {
if (opt.size())
dwi_modelled = Image<float>::create(opt[0][0], header_out);

int algorithm = argument[0];
if (algorithm == 0) {

const Algorithm algorithm = enum_from_name<Algorithm>(argument[0]);
switch (algorithm) {
case Algorithm::CSD: {
if (argument.size() != 4)
throw Exception("CSD algorithm expects a single input response function and single output FOD image");

Expand All @@ -294,9 +295,9 @@ void run() {
CSD_Processor processor(shared, mask, dwi_modelled);
auto dwi = header_in.get_image<float>().with_direct_io(3);
ThreadedLoop("performing constrained spherical deconvolution", dwi, 0, 3).run(processor, dwi, fod);

} else if (algorithm == 1) {

break;
}
case Algorithm::MSMT_CSD: {
if (argument.size() % 2)
throw Exception(
"MSMT_CSD algorithm expects pairs of (input response function & output FOD image) to be provided");
Expand Down Expand Up @@ -337,8 +338,9 @@ void run() {
0,
3)
.run(processor, dwi);

} else {
assert(0);
break;
}
default:
throw Exception("Unsupported deconvolution algorithm");
}
}
19 changes: 13 additions & 6 deletions cpp/cmd/dwidenoise.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,10 @@
using namespace MR;
using namespace App;

const std::vector<std::string> dtypes = {"float32", "float64"};
const std::vector<std::string> estimators = {"exp1", "exp2"};
enum class DType { FLOAT32, FLOAT64 };
enum class Estimator { EXP1, EXP2 };
const std::vector<std::string> dtypes = lower_case_enums<DType>();
const std::vector<std::string> estimators = lower_case_enums<Estimator>();

// clang-format off
void usage() {
Expand Down Expand Up @@ -339,7 +341,10 @@ void run() {
}
INFO("selected patch size: " + str(extent[0]) + " x " + str(extent[1]) + " x " + str(extent[2]) + ".");

bool exp1 = get_option_value("estimator", 1) == 0; // default: Exp2 (unbiased estimator)
opt = get_options("estimator");
const Estimator estimator =
opt.empty() ? Estimator::EXP2 : enum_from_name<Estimator>(opt[0][0]); // default: Exp2 (unbiased estimator
const bool exp1 = estimator == Estimator::EXP1;

if (std::min<uint32_t>(dwi.size(3), extent[0] * extent[1] * extent[2]) < 15) {
WARN("The number of volumes or the patch size is small. This may lead to discretisation effects "
Expand All @@ -365,15 +370,17 @@ void run() {
rank = Image<uint16_t>::create(opt[0][0], header);
}

int prec = get_option_value("datatype", 0); // default: single precision
opt = get_options("datatype");
const DType precision = opt.empty() ? DType::FLOAT32 : enum_from_name<DType>(opt[0][0]); // default: single precision
int prec = static_cast<int>(precision);
if (dwi.datatype().is_complex())
prec += 2; // support complex input data
switch (prec) {
case 0:
case static_cast<int>(DType::FLOAT32):
INFO("select real float32 for processing");
process_image<float>(dwi, mask, noise, rank, argument[1], extent, exp1);
break;
case 1:
case static_cast<int>(DType::FLOAT64):
INFO("select real float64 for processing");
process_image<double>(dwi, mask, noise, rank, argument[1], extent, exp1);
break;
Expand Down
9 changes: 5 additions & 4 deletions cpp/cmd/dwirecon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ constexpr default_type condition_number_product_threshold = 1000.0;
// Other future operations that might be applicable here:
// - "leave-one-out": Predict each intensity based on all observations excluding that one
// - SHARD recon
const std::vector<std::string> operations = {"combine_pairs", "combine_predicted"};
enum class Operation { COMBINE_PAIRS, COMBINE_PREDICTED };
const std::vector<std::string> operations = lower_case_enums<Operation>();
constexpr default_type default_combinepredicted_exponent = 1.0;

// clang-format off
Expand Down Expand Up @@ -928,14 +929,14 @@ void run() {
header_out.datatype().set_byte_order_native();
header_out.name() = std::string(argument[2]);

switch (static_cast<uint64_t>(argument[1])) {
switch (enum_from_name<Operation>(argument[1])) {

case 0:
case Operation::COMBINE_PAIRS:
Metadata::PhaseEncoding::clear_scheme(header_out.keyval());
run_combine_pairs(dwi_in, grad_in, pe_in, header_out);
break;

case 1:
case Operation::COMBINE_PREDICTED:
// TODO Diagnose errors with predictions
// Getting very wacky results in test data
// Consider adding command-line options to save:
Expand Down
Loading
Loading