diff --git a/copier-answers.jso.yml b/.copier-answers.jso.yml similarity index 100% rename from copier-answers.jso.yml rename to .copier-answers.jso.yml diff --git a/src/trust-region/tron-trust-region.jl b/src/trust-region/tron-trust-region.jl index b7a635a..49b236c 100644 --- a/src/trust-region/tron-trust-region.jl +++ b/src/trust-region/tron-trust-region.jl @@ -92,9 +92,10 @@ function aredpred!( Δm::T, x_trial::V, step::V, - slope::T, + slope::T; + kwargs..., ) where {T, V} - ared, pred, tr.good_grad = aredpred_common(nlp, f, f_trial, Δm, x_trial, step, tr.gt, slope) + ared, pred, tr.good_grad = aredpred_common(nlp, f, f_trial, Δm, x_trial, step, tr.gt, slope; kwargs...) γ = f_trial - f - slope tr.quad_min = γ <= 0 ? tr.increase_factor : max(tr.large_decrease_factor, -slope / γ / 2) return ared, pred @@ -108,10 +109,11 @@ function aredpred!( Δm::T, x_trial::V, step::V, - slope::T, + slope::T; + kwargs... ) where {T, V} Fx = similar(x_trial, nls.nls_meta.nequ) - return aredpred!(tr, nls, Fx, f, f_trial, Δm, x_trial, step, slope) + return aredpred!(tr, nls, Fx, f, f_trial, Δm, x_trial, step, slope; kwargs...) end function aredpred!( @@ -123,9 +125,10 @@ function aredpred!( Δm::T, x_trial::V, step::V, - slope::T, + slope::T; + kwargs... ) where {T, V} - ared, pred, tr.good_grad = aredpred_common(nls, Fx, f, f_trial, Δm, x_trial, step, tr.gt, slope) + ared, pred, tr.good_grad = aredpred_common(nls, Fx, f, f_trial, Δm, x_trial, step, tr.gt, slope; kwargs...) γ = f_trial - f - slope tr.quad_min = γ <= 0 ? tr.increase_factor : max(tr.large_decrease_factor, -slope / γ / 2) return ared, pred diff --git a/src/trust-region/trust-region.jl b/src/trust-region/trust-region.jl index 94ee970..e33b95e 100644 --- a/src/trust-region/trust-region.jl +++ b/src/trust-region/trust-region.jl @@ -44,7 +44,8 @@ function aredpred_common( x_trial::V, step::V, g_trial::V, - slope::T, + slope::T; + use_only_objgrad::Bool = false, ) where {T, V} absf = abs(f) ϵ = eps(T) @@ -54,7 +55,11 @@ function aredpred_common( ared = f_trial - f + max(one(T), absf) * 10 * ϵ if (abs(Δm) < 10_000 * ϵ) || (abs(ared) < 10_000 * ϵ * absf) # correct for roundoff error - grad!(nlp, x_trial, g_trial) + if use_only_objgrad + objgrad!(nlp, x_trial, g_trial) + else + grad!(nlp, x_trial, g_trial) + end good_grad = true slope_trial = dot(g_trial, step) ared = (slope_trial + slope) / 2 @@ -71,7 +76,8 @@ function aredpred_common( x_trial::V, step::V, g_trial::V, - slope::T, + slope::T; + kwargs..., ) where {T, V} absf = abs(f) ϵ = eps(T) @@ -110,9 +116,10 @@ function aredpred!( Δm::T, x_trial::V, step::V, - slope::T, + slope::T; + kwargs... ) where {T, V} - ared, pred, tr.good_grad = aredpred_common(nlp, f, f_trial, Δm, x_trial, step, tr.gt, slope) + ared, pred, tr.good_grad = aredpred_common(nlp, f, f_trial, Δm, x_trial, step, tr.gt, slope; kwargs...) return ared, pred end @@ -124,10 +131,11 @@ function aredpred!( Δm::T, x_trial::V, step::V, - slope::T, + slope::T; + kwargs... ) where {T, V} Fx = similar(x_trial, nls.nls_meta.nequ) - ared, pred, tr.good_grad = aredpred_common(nls, Fx, f, f_trial, Δm, x_trial, step, tr.gt, slope) + ared, pred, tr.good_grad = aredpred_common(nls, Fx, f, f_trial, Δm, x_trial, step, tr.gt, slope; kwargs...) return ared, pred end @@ -140,9 +148,10 @@ function aredpred!( Δm::T, x_trial::V, step::V, - slope::T, + slope::T; + kwargs... ) where {T, V} - ared, pred, tr.good_grad = aredpred_common(nls, Fx, f, f_trial, Δm, x_trial, step, tr.gt, slope) + ared, pred, tr.good_grad = aredpred_common(nls, Fx, f, f_trial, Δm, x_trial, step, tr.gt, slope; kwargs...) return ared, pred end