Skip to content

Commit 57bcc04

Browse files
Fix NLopt crash with gradient-based algorithms when no AD backend specified
This fixes the issue reported in https://discourse.julialang.org/t/error-when-using-multistart-optimization/133174 ## Problem When using NLopt's gradient-based algorithms (like LD_LBFGS) without specifying an AD backend in OptimizationFunction, the code would crash with: `MethodError: objects of type Nothing are not callable` This occurred because the NLopt wrapper tried to call `cache.f.grad(G, θ)` at line 181, but `cache.f.grad` was `nothing` when no AD backend was specified. ## Solution Added a check in the `__solve` method to verify that if the algorithm requires gradients, `cache.f.grad` is not `nothing`. If it is `nothing`, we now throw a helpful `IncompatibleOptimizerError` that guides users to: 1. Use `OptimizationFunction` with an AD backend (e.g., `AutoForwardDiff()`) 2. Or provide gradients manually via the `grad` kwarg ## Changes 1. **OptimizationNLopt.jl**: Added gradient availability check before attempting to use gradients, providing a clear error message for users 2. **runtests.jl**: Added comprehensive tests to verify: - Error is thrown when gradient-based algorithms are used without AD - Error is thrown with both `NLopt.LD_LBFGS()` and `NLopt.Opt(:LD_LBFGS, 2)` - Gradient-free algorithms still work without AD backend - Gradient-based algorithms work correctly when AD is provided 3. **multistartoptimization.md**: Fixed documentation example to include AD backend ## Test Results All tests pass, including the new test that reproduces the discourse issue. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <[email protected]>
1 parent 6fe78dc commit 57bcc04

File tree

3 files changed

+45
-3
lines changed

3 files changed

+45
-3
lines changed

docs/src/optimization_packages/multistartoptimization.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ using Optimization, OptimizationMultistartOptimization, OptimizationNLopt
3636
rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2
3737
x0 = zeros(2)
3838
p = [1.0, 100.0]
39-
f = OptimizationFunction(rosenbrock)
39+
f = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff())
4040
prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0])
4141
sol = solve(prob, MultistartOptimization.TikTak(100), NLopt.LD_LBFGS())
4242
```

lib/OptimizationNLopt/src/OptimizationNLopt.jl

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@ SciMLBase.allowsbounds(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
1212
SciMLBase.supports_opt_cache_interface(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
1313
end
1414
@static if isdefined(OptimizationBase, :supports_opt_cache_interface)
15-
OptimizationBase.supports_opt_cache_interface(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
15+
OptimizationBase.supports_opt_cache_interface(opt::Union{
16+
NLopt.Algorithm, NLopt.Opt}) = true
1617
end
1718

1819
function SciMLBase.requiresgradient(opt::Union{NLopt.Algorithm, NLopt.Opt})
@@ -70,7 +71,8 @@ function __map_optimizer_args!(cache::OptimizationBase.OptimizationCache, opt::N
7071
kwargs...)
7172

7273
# Check if AUGLAG algorithm requires local_method
73-
if opt.algorithm (NLopt.LN_AUGLAG, NLopt.LD_AUGLAG, NLopt.AUGLAG) && local_method === nothing
74+
if opt.algorithm (NLopt.LN_AUGLAG, NLopt.LD_AUGLAG, NLopt.AUGLAG) &&
75+
local_method === nothing
7476
error("NLopt.$(opt.algorithm) requires a local optimization method. " *
7577
"Please specify a local_method, e.g., solve(prob, NLopt.$(opt.algorithm)(); " *
7678
"local_method = NLopt.LN_NELDERMEAD())")
@@ -167,6 +169,15 @@ function SciMLBase.__solve(cache::OptimizationBase.OptimizationCache{
167169
}
168170
local x
169171

172+
# Check if algorithm requires gradients but none are provided
173+
opt = cache.opt isa NLopt.Opt ? cache.opt.algorithm : cache.opt
174+
if SciMLBase.requiresgradient(opt) && isnothing(cache.f.grad)
175+
throw(OptimizationBase.IncompatibleOptimizerError(
176+
"The NLopt algorithm $(opt) requires gradients, but no gradient function is available. " *
177+
"Please use `OptimizationFunction` with an automatic differentiation backend, " *
178+
"e.g., `OptimizationFunction(f, AutoForwardDiff())`, or provide gradients manually via the `grad` kwarg."))
179+
end
180+
170181
_loss = function (θ)
171182
x = cache.f(θ, cache.p)
172183
opt_state = OptimizationBase.OptimizationState(u = θ, p = cache.p, objective = x[1])

lib/OptimizationNLopt/test/runtests.jl

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -174,4 +174,35 @@ using Test, Random
174174
@test sol.retcode == ReturnCode.MaxIters
175175
@test sol.objective < l1
176176
end
177+
178+
@testset "gradient-based algorithm without AD backend" begin
179+
# Test that gradient-based algorithms throw a helpful error when no AD backend is specified
180+
# This reproduces the issue from https://discourse.julialang.org/t/error-when-using-multistart-optimization/133174
181+
rosenbrock_test(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2
182+
x0_test = zeros(2)
183+
p_test = [1.0, 100.0]
184+
185+
# Create OptimizationFunction WITHOUT specifying an AD backend
186+
f_no_ad = OptimizationFunction(rosenbrock_test)
187+
prob_no_ad = OptimizationProblem(
188+
f_no_ad, x0_test, p_test, lb = [-1.0, -1.0], ub = [1.5, 1.5])
189+
190+
# Test with LD_LBFGS (gradient-based algorithm) - should throw IncompatibleOptimizerError
191+
@test_throws OptimizationBase.IncompatibleOptimizerError solve(prob_no_ad, NLopt.LD_LBFGS())
192+
193+
# Test with NLopt.Opt interface - should also throw IncompatibleOptimizerError
194+
@test_throws OptimizationBase.IncompatibleOptimizerError solve(prob_no_ad, NLopt.Opt(:LD_LBFGS, 2))
195+
196+
# Test that gradient-free algorithms still work without AD backend
197+
sol = solve(prob_no_ad, NLopt.LN_NELDERMEAD())
198+
@test sol.retcode == ReturnCode.Success
199+
200+
# Test that with AD backend, gradient-based algorithms work correctly
201+
f_with_ad = OptimizationFunction(rosenbrock_test, OptimizationBase.AutoZygote())
202+
prob_with_ad = OptimizationProblem(
203+
f_with_ad, x0_test, p_test, lb = [-1.0, -1.0], ub = [1.5, 1.5])
204+
sol = solve(prob_with_ad, NLopt.LD_LBFGS())
205+
@test sol.retcode == ReturnCode.Success
206+
@test sol.objective < 1.0
207+
end
177208
end

0 commit comments

Comments
 (0)