Skip to content

Commit 1b5c373

Browse files
committed
implemented tests for newly added tools
1 parent 759d542 commit 1b5c373

26 files changed

+1352
-4
lines changed

src/styles/workflowMenuItem.css

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
box-shadow 0.2s ease, border 0.2s ease;
1414
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
1515
border: 1px solid rgba(255, 255, 255, 0.1);
16+
overflow: hidden;
1617
}
1718

1819
/* Dynamic font sizing for long tool names */
@@ -24,6 +25,13 @@
2425
font-size: clamp(0.5rem, 0.75vw, 0.65rem);
2526
}
2627

28+
.workflow-menu-item .tool-name {
29+
display: block;
30+
overflow: hidden;
31+
text-overflow: ellipsis;
32+
white-space: nowrap;
33+
}
34+
2735
.workflow-menu-item:hover {
2836
background-color: #505050;
2937
transform: scale(1.05);

utils/amico_tests/.gitignore

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
jobs/
2+
out/
3+
logs/
4+
data/
5+
derived/
6+
summary.tsv

utils/amico_tests/README.md

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
# AMICO CWL Test Suite
2+
3+
CWL test script for AMICO NODDI (Neurite Orientation Dispersion and Density Imaging).
4+
5+
## Prerequisites
6+
7+
| Dependency | Install |
8+
|-----------|---------|
9+
| `cwltool` | `pip install cwltool` |
10+
| `python3` + `nibabel` + `numpy` | `pip install nibabel numpy` |
11+
| `docker` | [docs.docker.com](https://docs.docker.com/get-docker/) |
12+
13+
Docker images are pulled automatically on first run:
14+
- `cookpa/amico-noddi:latest`
15+
16+
## Test Data
17+
18+
Synthetic multi-shell DWI data is generated automatically on first run:
19+
- **DWI**: 16x16x8 volume, 35 directions (5x b=0, 15x b=1000, 15x b=2000)
20+
- **bvals/bvecs**: Matching b-value and b-vector files
21+
- **Mask**: All-ones brain mask
22+
23+
No manual downloads are required. Data generation uses nibabel and numpy.
24+
25+
## Running Tests
26+
27+
```bash
28+
bash utils/amico_tests/test_amico_noddi.sh
29+
```
30+
31+
## What the Test Does
32+
33+
1. Sources `_common.sh` (shared functions, data prep)
34+
2. Generates synthetic multi-shell DWI data (if not present)
35+
3. Generates a YAML template via `cwltool --make-template`
36+
4. Writes a job YAML with concrete parameter values
37+
5. Validates the CWL file (`cwltool --validate`)
38+
6. Runs AMICO NODDI via Docker (`cwltool --outdir`)
39+
7. Verifies expected outputs: `FIT_ICVF.nii.gz`, `FIT_OD.nii.gz`, `FIT_ISOVF.nii.gz`
40+
41+
## Output Structure
42+
43+
All runtime artifacts are gitignored:
44+
45+
```
46+
utils/amico_tests/
47+
├── jobs/ # Generated YAML files (templates + job inputs)
48+
├── out/amico_noddi/# Tool outputs + outputs.json
49+
├── logs/ # cwltool stderr
50+
├── derived/ # Synthetic DWI data (dwi.nii.gz, bvals, bvecs, mask)
51+
└── summary.tsv # PASS/FAIL results
52+
```
53+
54+
## Expected Outputs
55+
56+
| File | Description |
57+
|------|-------------|
58+
| `FIT_ICVF.nii.gz` | Neurite Density Index (intracellular volume fraction) |
59+
| `FIT_OD.nii.gz` | Orientation Dispersion Index |
60+
| `FIT_ISOVF.nii.gz` | Isotropic Volume Fraction (CSF compartment) |
61+
62+
## Environment Variables
63+
64+
| Variable | Default | Description |
65+
|----------|---------|-------------|
66+
| `AMICO_DOCKER_IMAGE` | `cookpa/amico-noddi:latest` | AMICO Docker image |
67+
| `DOCKER_PLATFORM` | *(empty)* | Docker platform override |
68+
| `CWLTOOL_BIN` | `cwltool` | Path to cwltool binary |

utils/amico_tests/_common.sh

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
#!/usr/bin/env bash
2+
# Shared infrastructure for AMICO CWL test scripts.
3+
# Source this file at the top of every test_*.sh script.
4+
5+
# Chain to the structural MRI common infrastructure
6+
source "$(cd "$(dirname "${BASH_SOURCE[1]:-${BASH_SOURCE[0]}"}")/../structural_mri_tests" && pwd)/_common.sh"
7+
8+
# Docker image
9+
AMICO_IMAGE="${AMICO_DOCKER_IMAGE:-cookpa/amico-noddi:latest}"
10+
11+
docker_amico() {
12+
_docker_run "$AMICO_IMAGE" "$@"
13+
}
14+
15+
# ── Synthetic multi-shell DWI data generation ──────────────────
16+
17+
prepare_amico_data() {
18+
local amico_data="${DERIVED_DIR}"
19+
local dwi="${amico_data}/dwi.nii.gz"
20+
local bvals="${amico_data}/dwi.bval"
21+
local bvecs="${amico_data}/dwi.bvec"
22+
local mask="${amico_data}/mask.nii.gz"
23+
24+
if [[ -f "$dwi" && -f "$bvals" && -f "$bvecs" && -f "$mask" ]]; then
25+
AMICO_DWI="$dwi"
26+
AMICO_BVALS="$bvals"
27+
AMICO_BVECS="$bvecs"
28+
AMICO_MASK="$mask"
29+
return 0
30+
fi
31+
32+
echo "Generating synthetic multi-shell DWI data for AMICO..."
33+
34+
python3 - "$amico_data" <<'PY'
35+
import sys
36+
import os
37+
import numpy as np
38+
39+
outdir = sys.argv[1]
40+
os.makedirs(outdir, exist_ok=True)
41+
42+
# Volume dimensions: 16x16x8, 35 directions
43+
# b-values: 5x b=0, 15x b=1000, 15x b=2000
44+
nx, ny, nz = 16, 16, 8
45+
nb0, nb1, nb2 = 5, 15, 15
46+
nvols = nb0 + nb1 + nb2
47+
48+
# Generate b-values
49+
bvals = [0]*nb0 + [1000]*nb1 + [2000]*nb2
50+
51+
# Generate b-vectors (zero for b=0, random unit vectors for others)
52+
bvecs = np.zeros((3, nvols))
53+
for i in range(nb0, nvols):
54+
v = np.random.randn(3)
55+
v /= np.linalg.norm(v)
56+
bvecs[:, i] = v
57+
58+
# Generate DWI signal: S = S0 * exp(-b * D)
59+
D = 0.001 # diffusion coefficient
60+
S0 = 1000.0
61+
data = np.zeros((nx, ny, nz, nvols), dtype=np.float32)
62+
for v in range(nvols):
63+
signal = S0 * np.exp(-bvals[v] * D)
64+
# Add some spatial variation and noise
65+
base = np.random.normal(signal, signal * 0.05, (nx, ny, nz)).astype(np.float32)
66+
base = np.clip(base, 0, None)
67+
data[:, :, :, v] = base
68+
69+
# Create mask (all ones)
70+
mask = np.ones((nx, ny, nz), dtype=np.uint8)
71+
72+
# Save using nibabel
73+
try:
74+
import nibabel as nib
75+
affine = np.eye(4) * 2.0
76+
affine[3, 3] = 1.0
77+
78+
dwi_img = nib.Nifti1Image(data, affine)
79+
nib.save(dwi_img, os.path.join(outdir, "dwi.nii.gz"))
80+
81+
mask_img = nib.Nifti1Image(mask, affine)
82+
nib.save(mask_img, os.path.join(outdir, "mask.nii.gz"))
83+
84+
# Save bvals (space-separated, single line)
85+
with open(os.path.join(outdir, "dwi.bval"), "w") as f:
86+
f.write(" ".join(str(b) for b in bvals) + "\n")
87+
88+
# Save bvecs (3 rows)
89+
with open(os.path.join(outdir, "dwi.bvec"), "w") as f:
90+
for row in range(3):
91+
f.write(" ".join(f"{bvecs[row, i]:.6f}" for i in range(nvols)) + "\n")
92+
93+
print(f" Created DWI: {nx}x{ny}x{nz}x{nvols}")
94+
print(f" b-values: {nb0}x b=0, {nb1}x b=1000, {nb2}x b=2000")
95+
96+
except ImportError:
97+
print("ERROR: nibabel is required for AMICO test data generation")
98+
sys.exit(1)
99+
PY
100+
101+
AMICO_DWI="$dwi"
102+
AMICO_BVALS="$bvals"
103+
AMICO_BVECS="$bvecs"
104+
AMICO_MASK="$mask"
105+
}
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
#!/usr/bin/env bash
2+
# Test: AMICO NODDI (Neurite Orientation Dispersion and Density Imaging)
3+
set -euo pipefail
4+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
5+
source "${SCRIPT_DIR}/_common.sh"
6+
7+
TOOL="amico_noddi"
8+
LIB="amico"
9+
CWL="${CWL_DIR}/${LIB}/${TOOL}.cwl"
10+
11+
setup_dirs
12+
prepare_amico_data
13+
14+
make_template "$CWL" "$TOOL"
15+
16+
cat > "${JOB_DIR}/${TOOL}.yml" <<EOF
17+
dwi:
18+
class: File
19+
path: ${AMICO_DWI}
20+
bvals:
21+
class: File
22+
path: ${AMICO_BVALS}
23+
bvecs:
24+
class: File
25+
path: ${AMICO_BVECS}
26+
mask:
27+
class: File
28+
path: ${AMICO_MASK}
29+
EOF
30+
31+
run_tool "$TOOL" "${JOB_DIR}/${TOOL}.yml" "$CWL"
32+
33+
# ── Verify outputs ─────────────────────────────────────────────
34+
dir="${OUT_DIR}/${TOOL}"
35+
expected_outputs=("FIT_ICVF.nii.gz" "FIT_OD.nii.gz" "FIT_ISOVF.nii.gz")
36+
37+
for expected in "${expected_outputs[@]}"; do
38+
# Check in both AMICO/NODDI/ subdirectory and top-level
39+
found_file=""
40+
for candidate in "${dir}/AMICO/NODDI/${expected}" "${dir}/${expected}"; do
41+
if [[ -f "$candidate" ]]; then
42+
found_file="$candidate"
43+
break
44+
fi
45+
done
46+
47+
if [[ -n "$found_file" ]]; then
48+
if [[ ! -s "$found_file" ]]; then
49+
echo " FAIL: zero-byte output: $found_file"; exit 1
50+
fi
51+
echo " OK: ${expected} ($(wc -c < "$found_file") bytes)"
52+
else
53+
echo " WARN: ${expected} not found"
54+
fi
55+
done

utils/dmri_tests/README.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# dMRI CWL Tool Tests
22

3-
Validation tests for all 18 diffusion MRI CWL tool definitions across FSL, MRtrix3, and FreeSurfer.
3+
Validation tests for all 19 diffusion MRI CWL tool definitions across FSL, MRtrix3, and FreeSurfer.
44

55
## Prerequisites
66

@@ -62,6 +62,7 @@ bash test_tbss_1_preproc.sh # standalone
6262
bash test_tbss_2_reg.sh # depends on tbss_1
6363
bash test_tbss_3_postreg.sh # depends on tbss_2
6464
bash test_tbss_4_prestats.sh # depends on tbss_3
65+
bash test_tbss_non_FA.sh # depends on tbss_4 (projects non-FA data onto skeleton)
6566
```
6667

6768
### Run FSL diffusion pipeline in order
@@ -84,7 +85,7 @@ FSL Diffusion:
8485
bedpostx ──────> probtrackx2
8586
8687
FSL TBSS:
87-
tbss_1_preproc > tbss_2_reg > tbss_3_postreg > tbss_4_prestats
88+
tbss_1_preproc > tbss_2_reg > tbss_3_postreg > tbss_4_prestats > tbss_non_FA
8889
8990
MRtrix3:
9091
dwidenoise ────┐
Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
#!/usr/bin/env bash
2+
set -euo pipefail
3+
4+
# Test: FSL tbss_non_FA - TBSS Step 5: Project non-FA data onto FA skeleton
5+
# CWL: public/cwl/fsl/tbss_non_FA.cwl
6+
# DEPENDS: tbss_4_prestats output (FA/ and stats/ directories)
7+
8+
source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
9+
10+
check_prerequisites
11+
check_test_data
12+
13+
TOOL_NAME="tbss_non_FA"
14+
CWL_FILE="$CWL_DIR/fsl/tbss_non_FA.cwl"
15+
OUTPUT_DIR="$(setup_output_dir "$TOOL_NAME")"
16+
RESULTS_FILE="$OUTPUT_DIR/results.txt"
17+
18+
echo "=== Testing $TOOL_NAME ===" | tee "$RESULTS_FILE"
19+
echo "Date: $(date)" | tee -a "$RESULTS_FILE"
20+
21+
# Ensure tbss_4 output exists (FA and stats directories with skeleton)
22+
FA_INPUT="$INTERMEDIATE_DIR/tbss_FA_step4"
23+
STATS_INPUT="$INTERMEDIATE_DIR/tbss_stats_step4"
24+
if [[ ! -d "$FA_INPUT" ]]; then
25+
FA_INPUT="$OUTPUT_BASE/tbss_4_prestats/FA"
26+
fi
27+
if [[ ! -d "$STATS_INPUT" ]]; then
28+
STATS_INPUT="$OUTPUT_BASE/tbss_4_prestats/stats"
29+
fi
30+
if [[ ! -d "$FA_INPUT" || ! -d "$STATS_INPUT" ]]; then
31+
echo "Running tbss_4_prestats first..." | tee -a "$RESULTS_FILE"
32+
bash "$SCRIPT_DIR/test_tbss_4_prestats.sh"
33+
FA_INPUT="$INTERMEDIATE_DIR/tbss_FA_step4"
34+
STATS_INPUT="$INTERMEDIATE_DIR/tbss_stats_step4"
35+
fi
36+
37+
# If still not found, try the direct output
38+
if [[ ! -d "$FA_INPUT" ]]; then
39+
FA_INPUT="$OUTPUT_BASE/tbss_4_prestats/FA"
40+
fi
41+
if [[ ! -d "$STATS_INPUT" ]]; then
42+
STATS_INPUT="$OUTPUT_BASE/tbss_4_prestats/stats"
43+
fi
44+
45+
if [[ ! -d "$FA_INPUT" || ! -d "$STATS_INPUT" ]]; then
46+
echo -e "${RED}FAIL: Cannot find TBSS step 4 output (FA/ and stats/)${NC}" | tee -a "$RESULTS_FILE"
47+
exit 1
48+
fi
49+
50+
# Create synthetic MD data: copy FA data and rename
51+
# tbss_non_FA expects all_<measure>.nii.gz in the stats directory
52+
SYNTH_STATS="$INTERMEDIATE_DIR/tbss_stats_nonFA"
53+
if [[ ! -d "$SYNTH_STATS" ]]; then
54+
echo "Creating synthetic MD data for tbss_non_FA..." | tee -a "$RESULTS_FILE"
55+
cp -r "$STATS_INPUT" "$SYNTH_STATS"
56+
# Copy all_FA as all_MD (synthetic substitute)
57+
if [[ -f "$SYNTH_STATS/all_FA.nii.gz" ]]; then
58+
cp "$SYNTH_STATS/all_FA.nii.gz" "$SYNTH_STATS/all_MD.nii.gz"
59+
fi
60+
fi
61+
62+
# Step 1: Validate CWL
63+
validate_cwl "$CWL_FILE" "$RESULTS_FILE" || exit 1
64+
65+
# Step 2: Generate template
66+
echo "--- Generating template ---" | tee -a "$RESULTS_FILE"
67+
cwltool --make-template "$CWL_FILE" > "$OUTPUT_DIR/template.yml" 2>/dev/null
68+
echo "Template saved to $OUTPUT_DIR/template.yml" | tee -a "$RESULTS_FILE"
69+
70+
# Step 3: Create job YAML
71+
cat > "$OUTPUT_DIR/job.yml" << EOF
72+
measure: MD
73+
fa_directory:
74+
class: Directory
75+
path: $FA_INPUT
76+
stats_directory:
77+
class: Directory
78+
path: $SYNTH_STATS
79+
EOF
80+
81+
# Step 4: Run tool
82+
echo "--- Running $TOOL_NAME ---" | tee -a "$RESULTS_FILE"
83+
PASS=true
84+
if cwltool --outdir "$OUTPUT_DIR" "$CWL_FILE" "$OUTPUT_DIR/job.yml" >> "$RESULTS_FILE" 2>&1; then
85+
echo -e "${GREEN}PASS: $TOOL_NAME execution${NC}" | tee -a "$RESULTS_FILE"
86+
else
87+
echo -e "${RED}FAIL: $TOOL_NAME execution${NC}" | tee -a "$RESULTS_FILE"
88+
PASS=false
89+
fi
90+
91+
# Step 5: Check outputs
92+
echo "--- Output validation ---" | tee -a "$RESULTS_FILE"
93+
SKEL_FILE="$OUTPUT_DIR/stats/all_MD_skeletonised.nii.gz"
94+
if [[ ! -f "$SKEL_FILE" ]]; then
95+
SKEL_FILE="$OUTPUT_DIR/all_MD_skeletonised.nii.gz"
96+
fi
97+
check_file_exists "$SKEL_FILE" "all_MD_skeletonised" "$RESULTS_FILE" || PASS=false
98+
check_file_nonempty "$SKEL_FILE" "all_MD_skeletonised" "$RESULTS_FILE" || PASS=false
99+
100+
# Step 6: Header checks
101+
echo "--- Header checks ---" | tee -a "$RESULTS_FILE"
102+
if [[ -f "$SKEL_FILE" ]]; then
103+
check_nifti_header "$SKEL_FILE" "all_MD_skeletonised" "$RESULTS_FILE" || PASS=false
104+
fi
105+
106+
# Summary
107+
echo "" | tee -a "$RESULTS_FILE"
108+
if $PASS; then
109+
echo -e "${GREEN}=== $TOOL_NAME: ALL TESTS PASSED ===${NC}" | tee -a "$RESULTS_FILE"
110+
else
111+
echo -e "${RED}=== $TOOL_NAME: SOME TESTS FAILED ===${NC}" | tee -a "$RESULTS_FILE"
112+
fi

0 commit comments

Comments
 (0)