Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions docs/src/user/minimization.md
Original file line number Diff line number Diff line change
Expand Up @@ -219,3 +219,22 @@ line search errors if `initial_x` is a stationary point. Notice, that this is on
a first order check. If `initial_x` is any type of stationary point, `g_converged`
will be true. This includes local minima, saddle points, and local maxima. If `iterations` is `0`
and `g_converged` is `true`, the user needs to keep this point in mind.

## Iterator interface
For multivariable optimizations, iterator interface is provided through `Optim.optimizing`
function. Using this interface, `optimize(args...; kwargs...)` is equivalent to

```jl
let istate
for istate′ in Optim.optimizing(args...; kwargs...)
istate = istate′
end
Optim.OptimizationResults(istate)
end
```

The iterator returned by `Optim.optimizing` yields an iterator state for each iteration
step.

Functions that can be called on the result object (e.g. `minimizer`, `iterations`; see
[Complete list of functions](@ref)) can be used on the iteration state `istate`.
35 changes: 21 additions & 14 deletions src/api.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
Base.summary(r::OptimizationResults) = summary(r.method) # might want to do more here than just return summary of the method used
Base.summary(r::Union{OptimizationResults, IteratorState}) =
summary(AbstractOptimizer(r)) # might want to do more here than just return summary of the method used
minimizer(r::OptimizationResults) = r.minimizer
minimum(r::OptimizationResults) = r.minimum
iterations(r::OptimizationResults) = r.iterations
iteration_limit_reached(r::OptimizationResults) = r.iteration_converged
trace(r::OptimizationResults) = length(r.trace) > 0 ? r.trace : error("No trace in optimization results. To get a trace, run optimize() with store_trace = true.")

AbstractOptimizer(r::OptimizationResults) = r.method

function x_trace(r::UnivariateOptimizationResults)
tr = trace(r)
!haskey(tr[1].metadata, "minimizer") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
Expand All @@ -23,41 +26,45 @@ function x_upper_trace(r::UnivariateOptimizationResults)
end
x_upper_trace(r::MultivariateOptimizationResults) = error("x_upper_trace is not implemented for $(summary(r)).")

function x_trace(r::MultivariateOptimizationResults)
function x_trace(r::Union{MultivariateOptimizationResults, IteratorState})
tr = trace(r)
if isa(r.method, NelderMead)
if isa(AbstractOptimizer(r), NelderMead)
throw(ArgumentError("Nelder Mead does not operate with a single x. Please use either centroid_trace(...) or simplex_trace(...) to extract the relevant points from the trace."))
end
!haskey(tr[1].metadata, "x") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[ state.metadata["x"] for state in tr ]
end

function centroid_trace(r::MultivariateOptimizationResults)
if !isa(r.method, NelderMead)
throw(ArgumentError("There is no centroid involved in optimization using $(r.method). Please use x_trace(...) to grab the points from the trace."))
function centroid_trace(r::Union{MultivariateOptimizationResults, IteratorState})
tr = trace(r)
Copy link
Contributor Author

@tkf tkf Sep 10, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I suppose tr = trace(r) should be added here? I think it'll throw UndefVarError otherwise. There are two more places I did this change.

I'm including these changes (adding tr = trace(r)) although they are not directly related to PR.

if !isa(AbstractOptimizer(r), NelderMead)
throw(ArgumentError("There is no centroid involved in optimization using $(AbstractOptimizer(r)). Please use x_trace(...) to grab the points from the trace."))
end
!haskey(tr[1].metadata, "centroid") && error("Trace does not contain centroid. To get a trace of the centroid, run optimize() with extended_trace = true")
[ state.metadata["centroid"] for state in tr ]
end
function simplex_trace(r::MultivariateOptimizationResults)
if !isa(r.method, NelderMead)
throw(ArgumentError("There is no simplex involved in optimization using $(r.method). Please use x_trace(...) to grab the points from the trace."))
function simplex_trace(r::Union{MultivariateOptimizationResults, IteratorState})
tr = trace(r)
if !isa(AbstractOptimizer(r), NelderMead)
throw(ArgumentError("There is no simplex involved in optimization using $(AbstractOptimizer(r)). Please use x_trace(...) to grab the points from the trace."))
end
!haskey(tr[1].metadata, "simplex") && error("Trace does not contain simplex. To get a trace of the simplex, run optimize() with trace_simplex = true")
[ state.metadata["simplex"] for state in tr ]
end
function simplex_value_trace(r::MultivariateOptimizationResults)
if !isa(r.method, NelderMead)
throw(ArgumentError("There are no simplex values involved in optimization using $(r.method). Please use f_trace(...) to grab the objective values from the trace."))
function simplex_value_trace(r::Union{MultivariateOptimizationResults, IteratorState})
tr = trace(r)
if !isa(AbstractOptimizer(r), NelderMead)
throw(ArgumentError("There are no simplex values involved in optimization using $(AbstractOptimizer(r)). Please use f_trace(...) to grab the objective values from the trace."))
end
!haskey(tr[1].metadata, "simplex_values") && error("Trace does not contain objective values at the simplex. To get a trace of the simplex values, run optimize() with trace_simplex = true")
[ state.metadata["simplex_values"] for state in tr ]
end


f_trace(r::OptimizationResults) = [ state.value for state in trace(r) ]
f_trace(r::Union{OptimizationResults, IteratorState}) = [ state.value for state in trace(r) ]
g_norm_trace(r::OptimizationResults) = error("g_norm_trace is not implemented for $(summary(r)).")
g_norm_trace(r::MultivariateOptimizationResults) = [ state.g_norm for state in trace(r) ]
g_norm_trace(r::Union{MultivariateOptimizationResults, IteratorState}) =
[ state.g_norm for state in trace(r) ]

f_calls(r::OptimizationResults) = r.f_calls
f_calls(d) = first(d.f_calls)
Expand Down
54 changes: 32 additions & 22 deletions src/multivariate/optimize/interface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -54,84 +54,94 @@ promote_objtype(method::FirstOrderOptimizer, x, autodiff::Symbol, inplace::Bool
promote_objtype(method::SecondOrderOptimizer, x, autodiff::Symbol, inplace::Bool, td::TwiceDifferentiable) = td

# if no method or options are present
function optimize(f, initial_x::AbstractArray; inplace = true, autodiff = :finite, kwargs...)
function optimizing(f, initial_x::AbstractArray; inplace = true, autodiff = :finite, kwargs...)
method = fallback_method(f)
checked_kwargs, method = check_kwargs(kwargs, method)
d = promote_objtype(method, initial_x, autodiff, inplace, f)
add_default_opts!(checked_kwargs, method)

options = Options(; checked_kwargs...)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end
function optimize(f, g, initial_x::AbstractArray; inplace = true, autodiff = :finite, kwargs...)
function optimizing(f, g, initial_x::AbstractArray; inplace = true, autodiff = :finite, kwargs...)

method = fallback_method(f, g)
checked_kwargs, method = check_kwargs(kwargs, method)
d = promote_objtype(method, initial_x, autodiff, inplace, f, g)
add_default_opts!(checked_kwargs, method)

options = Options(; checked_kwargs...)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end
function optimize(f, g, h, initial_x::AbstractArray; inplace = true, autodiff = :finite, kwargs...)
function optimizing(f, g, h, initial_x::AbstractArray; inplace = true, autodiff = :finite, kwargs...)

method = fallback_method(f, g, h)
checked_kwargs, method = check_kwargs(kwargs, method)
d = promote_objtype(method, initial_x, autodiff, inplace, f, g, h)
add_default_opts!(checked_kwargs, method)

options = Options(; checked_kwargs...)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end

# no method supplied with objective
function optimize(d::T, initial_x::AbstractArray, options::Options) where T<:AbstractObjective
optimize(d, initial_x, fallback_method(d), options)
function optimizing(d::T, initial_x::AbstractArray, options::Options) where T<:AbstractObjective
optimizing(d, initial_x, fallback_method(d), options)
end
# no method supplied with inplace and autodiff keywords becauase objective is not supplied
function optimize(f, initial_x::AbstractArray, options::Options; inplace = true, autodiff = :finite)
function optimizing(f, initial_x::AbstractArray, options::Options; inplace = true, autodiff = :finite)
method = fallback_method(f)
d = promote_objtype(method, initial_x, autodiff, inplace, f)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end
function optimize(f, g, initial_x::AbstractArray, options::Options; inplace = true, autodiff = :finite)
function optimizing(f, g, initial_x::AbstractArray, options::Options; inplace = true, autodiff = :finite)

method = fallback_method(f, g)
d = promote_objtype(method, initial_x, autodiff, inplace, f, g)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end
function optimize(f, g, h, initial_x::AbstractArray{T}, options::Options; inplace = true, autodiff = :finite) where {T}
function optimizing(f, g, h, initial_x::AbstractArray{T}, options::Options; inplace = true, autodiff = :finite) where {T}

method = fallback_method(f, g, h)
d = promote_objtype(method, initial_x, autodiff, inplace, f, g, h)

optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end

# potentially everything is supplied (besides caches)
function optimize(f, initial_x::AbstractArray, method::AbstractOptimizer,
function optimizing(f, initial_x::AbstractArray, method::AbstractOptimizer,
options::Options = Options(;default_options(method)...); inplace = true, autodiff = :finite)

d = promote_objtype(method, initial_x, autodiff, inplace, f)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end
function optimize(f, g, initial_x::AbstractArray, method::AbstractOptimizer,
function optimizing(f, g, initial_x::AbstractArray, method::AbstractOptimizer,
options::Options = Options(;default_options(method)...); inplace = true, autodiff = :finite)

d = promote_objtype(method, initial_x, autodiff, inplace, f, g)

optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end
function optimize(f, g, h, initial_x::AbstractArray{T}, method::AbstractOptimizer,
function optimizing(f, g, h, initial_x::AbstractArray{T}, method::AbstractOptimizer,
options::Options = Options(;default_options(method)...); inplace = true, autodiff = :finite) where T

d = promote_objtype(method, initial_x, autodiff, inplace, f, g, h)

optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end

function optimize(d::D, initial_x::AbstractArray, method::SecondOrderOptimizer,
function optimizing(d::D, initial_x::AbstractArray, method::SecondOrderOptimizer,
options::Options = Options(;default_options(method)...); autodiff = :finite, inplace = true) where {D <: Union{NonDifferentiable, OnceDifferentiable}}
d = promote_objtype(method, initial_x, autodiff, inplace, d)
optimize(d, initial_x, method, options)
optimizing(d, initial_x, method, options)
end

function optimize(args...; kwargs...)
local istate
for istate′ in optimizing(args...; kwargs...)
istate = istate′
end
# We can safely assume that `istate` is defined at this point. That is to say,
# `OptimIterator` guarantees that `iterate(::OptimIterator) !== nothing`.
Comment on lines +334 to +335
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think JET won't agree with this comment 😄

Generally, the code above seems a bit unfortunate... Maybe optimizing should return the iterator AND the initial state?

I also wonder, is there no utility in Julia for directly obtaining the last state of an iterator?

return OptimizationResults(istate)
end
Loading