Skip to content

Commit d69ec05

Browse files
Implement assertive algorithm detection with skip_missing_algs flag
- Add skip_missing_algs parameter (default false) to autotune_setup - By default, error when expected algorithms are missing from compatible systems - RFLUFactorization errors if missing (hard dependency) - GPU algorithms error if hardware detected but packages not loaded - Platform-specific warnings for Apple Accelerate on macOS - Pass skip_missing_algs=true to get warnings instead of errors - Update documentation with missing algorithm handling section - More assertive approach ensures users get all compatible algorithms This makes autotune more assertive about finding all available algorithms instead of silently skipping them, improving benchmark comprehensiveness. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <[email protected]>
1 parent 8dc9f84 commit d69ec05

File tree

3 files changed

+70
-16
lines changed

3 files changed

+70
-16
lines changed

docs/src/tutorials/autotune.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,24 @@ results = autotune_setup(set_preferences = false)
9494
results = autotune_setup(make_plot = false)
9595
```
9696

97+
### Missing Algorithm Handling
98+
99+
By default, autotune is assertive about finding all expected algorithms:
100+
101+
```julia
102+
# Default behavior: error if expected algorithms are missing
103+
results = autotune_setup() # Will error if RFLUFactorization missing
104+
105+
# Allow missing algorithms (useful for incomplete setups)
106+
results = autotune_setup(skip_missing_algs = true) # Will warn instead of error
107+
```
108+
109+
**When algorithms might be missing:**
110+
- RFLUFactorization should always be available (hard dependency)
111+
- GPU algorithms require CUDA.jl or Metal.jl to be loaded
112+
- Apple Accelerate should work on macOS systems
113+
- MKL algorithms require MKL.jl package
114+
97115
## Understanding Algorithm Compatibility
98116

99117
The autotuner automatically detects which algorithms work with which element types:

lib/LinearSolveAutotune/src/LinearSolveAutotune.jl

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@ include("preferences.jl")
3333
set_preferences::Bool = true,
3434
samples::Int = 5,
3535
seconds::Float64 = 0.5,
36-
eltypes = (Float32, Float64, ComplexF32, ComplexF64))
36+
eltypes = (Float32, Float64, ComplexF32, ComplexF64),
37+
skip_missing_algs::Bool = false)
3738
3839
Run a comprehensive benchmark of all available LU factorization methods and optionally:
3940
@@ -52,6 +53,7 @@ Run a comprehensive benchmark of all available LU factorization methods and opti
5253
- `samples::Int = 5`: Number of benchmark samples per algorithm/size
5354
- `seconds::Float64 = 0.5`: Maximum time per benchmark
5455
- `eltypes = (Float32, Float64, ComplexF32, ComplexF64)`: Element types to benchmark
56+
- `skip_missing_algs::Bool = false`: If false, error when expected algorithms are missing; if true, warn instead
5557
5658
# Returns
5759
@@ -75,6 +77,9 @@ results = autotune_setup(eltypes = (Float64, ComplexF64))
7577
7678
# Test with BigFloat (note: most BLAS algorithms will be excluded)
7779
results = autotune_setup(eltypes = (BigFloat,), telemetry = false)
80+
81+
# Allow missing algorithms (useful for incomplete setups)
82+
results = autotune_setup(skip_missing_algs = true)
7883
```
7984
"""
8085
function autotune_setup(;
@@ -84,7 +89,8 @@ function autotune_setup(;
8489
set_preferences::Bool = true,
8590
samples::Int = 5,
8691
seconds::Float64 = 0.5,
87-
eltypes = (Float32, Float64, ComplexF32, ComplexF64))
92+
eltypes = (Float32, Float64, ComplexF32, ComplexF64),
93+
skip_missing_algs::Bool = false)
8894
@info "Starting LinearSolve.jl autotune setup..."
8995
@info "Configuration: large_matrices=$large_matrices, telemetry=$telemetry, make_plot=$make_plot, set_preferences=$set_preferences"
9096
@info "Element types to benchmark: $(join(eltypes, ", "))"
@@ -104,11 +110,11 @@ function autotune_setup(;
104110
@info "System detected: $(system_info["os"]) $(system_info["arch"]) with $(system_info["num_cores"]) cores"
105111

106112
# Get available algorithms
107-
cpu_algs, cpu_names = get_available_algorithms()
113+
cpu_algs, cpu_names = get_available_algorithms(; skip_missing_algs = skip_missing_algs)
108114
@info "Found $(length(cpu_algs)) CPU algorithms: $(join(cpu_names, ", "))"
109115

110116
# Add GPU algorithms if available
111-
gpu_algs, gpu_names = get_gpu_algorithms()
117+
gpu_algs, gpu_names = get_gpu_algorithms(; skip_missing_algs = skip_missing_algs)
112118
if !isempty(gpu_algs)
113119
@info "Found $(length(gpu_algs)) GPU algorithms: $(join(gpu_names, ", "))"
114120
end

lib/LinearSolveAutotune/src/algorithms.jl

Lines changed: 42 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
# Algorithm detection and creation functions
22

33
"""
4-
get_available_algorithms()
4+
get_available_algorithms(; skip_missing_algs::Bool = false)
55
66
Returns a list of available LU factorization algorithms based on the system and loaded packages.
7+
If skip_missing_algs=false, errors when expected algorithms are missing; if true, warns instead.
78
"""
8-
function get_available_algorithms()
9+
function get_available_algorithms(; skip_missing_algs::Bool = false)
910
algs = []
1011
alg_names = String[]
1112

@@ -22,20 +23,38 @@ function get_available_algorithms()
2223
push!(alg_names, "MKLLUFactorization")
2324
end
2425

25-
# Apple Accelerate if available
26+
# Apple Accelerate if available (should be available on macOS)
2627
if LinearSolve.appleaccelerate_isavailable()
2728
push!(algs, AppleAccelerateLUFactorization())
2829
push!(alg_names, "AppleAccelerateLUFactorization")
30+
else
31+
# Check if we're on macOS and Apple Accelerate should be available
32+
if Sys.isapple() && !skip_missing_algs
33+
msg = "macOS system detected but Apple Accelerate not available. This is unexpected."
34+
@warn msg
35+
end
2936
end
3037

31-
# RecursiveFactorization if loaded
38+
# RecursiveFactorization - should always be available as it's a hard dependency
3239
try
3340
if LinearSolve.userecursivefactorization(nothing)
3441
push!(algs, RFLUFactorization())
3542
push!(alg_names, "RFLUFactorization")
43+
else
44+
msg = "RFLUFactorization should be available (RecursiveFactorization.jl is a hard dependency)"
45+
if skip_missing_algs
46+
@warn msg
47+
else
48+
error(msg * ". Pass `skip_missing_algs=true` to continue with warning instead.")
49+
end
50+
end
51+
catch e
52+
msg = "RFLUFactorization failed to load: $e"
53+
if skip_missing_algs
54+
@warn msg
55+
else
56+
error(msg * ". Pass `skip_missing_algs=true` to continue with warning instead.")
3657
end
37-
catch
38-
# RFLUFactorization not available
3958
end
4059

4160
# SimpleLU always available
@@ -46,11 +65,12 @@ function get_available_algorithms()
4665
end
4766

4867
"""
49-
get_gpu_algorithms()
68+
get_gpu_algorithms(; skip_missing_algs::Bool = false)
5069
5170
Returns GPU-specific algorithms if GPU hardware and packages are available.
71+
If skip_missing_algs=false, errors when GPU hardware is detected but algorithms are missing; if true, warns instead.
5272
"""
53-
function get_gpu_algorithms()
73+
function get_gpu_algorithms(; skip_missing_algs::Bool = false)
5474
gpu_algs = []
5575
gpu_names = String[]
5676

@@ -59,8 +79,13 @@ function get_gpu_algorithms()
5979
try
6080
push!(gpu_algs, CudaOffloadFactorization())
6181
push!(gpu_names, "CudaOffloadFactorization")
62-
catch
63-
# CUDA extension not loaded
82+
catch e
83+
msg = "CUDA hardware detected but CudaOffloadFactorization not available: $e. Load CUDA.jl package."
84+
if skip_missing_algs
85+
@warn msg
86+
else
87+
error(msg * " Pass `skip_missing_algs=true` to continue with warning instead.")
88+
end
6489
end
6590
end
6691

@@ -69,8 +94,13 @@ function get_gpu_algorithms()
6994
try
7095
push!(gpu_algs, MetalLUFactorization())
7196
push!(gpu_names, "MetalLUFactorization")
72-
catch
73-
# Metal extension not loaded
97+
catch e
98+
msg = "Metal hardware detected but MetalLUFactorization not available: $e. Load Metal.jl package."
99+
if skip_missing_algs
100+
@warn msg
101+
else
102+
error(msg * " Pass `skip_missing_algs=true` to continue with warning instead.")
103+
end
74104
end
75105
end
76106

0 commit comments

Comments
 (0)