Skip to content

Commit 0ac3af5

Browse files
committed
Fix a few bugs in the existing code.
1 parent dbddd25 commit 0ac3af5

File tree

3 files changed

+18
-75
lines changed

3 files changed

+18
-75
lines changed

docs/src/optimization_packages/manopt.md

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
# Manopt.jl
22

3-
[Manopt.jl](https://github.com/JuliaManifolds/Manopt.jl) is a package with implementations of a variety of optimization solvers on manifolds supported by
4-
[Manifolds](https://github.com/JuliaManifolds/Manifolds.jl).
3+
[Manopt.jl](https://github.com/JuliaManifolds/Manopt.jl) is a package providing solvers
4+
for optimization problems defined on Riemannian manifolds.
5+
The implementation is based on [ManifoldsBase.jl](https://github.com/JuliaManifolds/ManifoldsBase.jl) interface and can hence be used for all maniolds defined in
6+
[Manifolds](https://github.com/JuliaManifolds/Manifolds.jl) or any other manifold implemented using the interface.
57

68
## Installation: OptimizationManopt.jl
79

@@ -29,7 +31,7 @@ The common kwargs `maxiters`, `maxtime` and `abstol` are supported by all the op
2931
function or `OptimizationProblem`.
3032

3133
!!! note
32-
34+
3335
The `OptimizationProblem` has to be passed the manifold as the `manifold` keyword argument.
3436

3537
## Examples

lib/OptimizationManopt/src/OptimizationManopt.jl

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ function call_manopt_optimizer(
6565
loss,
6666
gradF,
6767
x0;
68+
hessF=nothing, # ignore that keyword for this solver
6869
kwargs...)
6970
opts = Manopt.gradient_descent(M,
7071
loss,
@@ -84,6 +85,7 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold, opt::NelderMea
8485
loss,
8586
gradF,
8687
x0;
88+
hessF=nothing, # ignore that keyword for this solver
8789
kwargs...)
8890
opts = NelderMead(M, loss; return_state = true, kwargs...)
8991
minimizer = Manopt.get_solver_result(opts)
@@ -98,6 +100,7 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
98100
loss,
99101
gradF,
100102
x0;
103+
hessF=nothing, # ignore that keyword for this solver
101104
kwargs...)
102105
opts = Manopt.conjugate_gradient_descent(M,
103106
loss,
@@ -106,7 +109,6 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
106109
return_state = true,
107110
kwargs...
108111
)
109-
# we unwrap DebugOptions here
110112
minimizer = Manopt.get_solver_result(opts)
111113
return (; minimizer = minimizer, minimum = loss(M, minimizer), options = opts)
112114
end
@@ -119,6 +121,7 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
119121
loss,
120122
gradF,
121123
x0;
124+
hessF=nothing, # ignore that keyword for this solver
122125
population_size::Int = 100,
123126
kwargs...)
124127
swarm = [x0, [rand(M) for _ in 1:(population_size - 1)]...]
@@ -136,10 +139,10 @@ function call_manopt_optimizer(M::Manopt.AbstractManifold,
136139
loss,
137140
gradF,
138141
x0;
142+
hessF=nothing, # ignore that keyword for this solver
139143
kwargs...
140144
)
141145
opts = quasi_Newton(M, loss, gradF, x0; return_state = true, kwargs...)
142-
# we unwrap DebugOptions here
143146
minimizer = Manopt.get_solver_result(opts)
144147
return (; minimizer = minimizer, minimum = loss(M, minimizer), options = opts)
145148
end
@@ -151,9 +154,9 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
151154
loss,
152155
gradF,
153156
x0;
157+
hessF=nothing, # ignore that keyword for this solver
154158
kwargs...)
155159
opt = cma_es(M, loss, x0; return_state = true, kwargs...)
156-
# we unwrap DebugOptions here
157160
minimizer = Manopt.get_solver_result(opt)
158161
return (; minimizer = minimizer, minimum = loss(M, minimizer), options = opt)
159162
end
@@ -165,9 +168,9 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
165168
loss,
166169
gradF,
167170
x0;
171+
hessF=nothing, # ignore that keyword for this solver
168172
kwargs...)
169173
opt = convex_bundle_method(M, loss, gradF, x0; return_state = true, kwargs...)
170-
# we unwrap DebugOptions here
171174
minimizer = Manopt.get_solver_result(opt)
172175
return (; minimizer = minimizer, minimum = loss(M, minimizer), options = opt)
173176
end
@@ -205,7 +208,6 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
205208
else
206209
trust_regions(M, loss, gradF, hessF, x0; return_state = true, kwargs...)
207210
end
208-
# we unwrap DebugOptions here
209211
minimizer = Manopt.get_solver_result(opt)
210212
return (; minimizer = minimizer, minimum = loss(M, minimizer), options = opt)
211213
end
@@ -217,9 +219,9 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold,
217219
loss,
218220
gradF,
219221
x0;
222+
hessF=nothing, # ignore that keyword for this solver
220223
kwargs...)
221224
opt = Frank_Wolfe_method(M, loss, gradF, x0; return_state = true, kwargs...)
222-
# we unwrap DebugOptions here
223225
minimizer = Manopt.get_solver_result(opt)
224226
return (; minimizer = minimizer, minimum = loss(M, minimizer), options = opt)
225227
end
@@ -238,6 +240,9 @@ function SciMLBase.requireshessian(opt::Union{
238240
end
239241

240242
function build_loss(f::OptimizationFunction, prob, cb)
243+
# TODO: I do not understand this. Why is the manifold not used?
244+
# Either this is an Euclidean cost, then we should probably still call `embed`,
245+
# or it is not, then we need M.
241246
return function (::AbstractManifold, θ)
242247
x = f.f(θ, prob.p)
243248
cb(x, θ)
@@ -351,13 +356,11 @@ function SciMLBase.__solve(cache::OptimizationCache{
351356
stopping_criterion = Manopt.StopAfterIteration(500)
352357
end
353358

354-
# TODO: With the new keyword warnings we can not just always pass down hessF!
355359
opt_res = call_manopt_optimizer(manifold, cache.opt, _loss, gradF, cache.u0;
356360
solver_kwarg..., stopping_criterion = stopping_criterion, hessF)
357361

358362
asc = get_stopping_criterion(opt_res.options)
359-
# TODO: Switch to `has_converged` once that was released.
360-
opt_ret = Manopt.indicates_convergence(asc) ? ReturnCode.Success : ReturnCode.Failure
363+
opt_ret = Manopt.has_converged(asc) ? ReturnCode.Success : ReturnCode.Failure
361364

362365
return SciMLBase.build_solution(cache,
363366
cache.opt,

lib/OptimizationManopt/test/runtests.jl

Lines changed: 1 addition & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ R2 = Euclidean(2)
151151

152152
opt = OptimizationManopt.AdaptiveRegularizationCubicOptimizer()
153153

154-
#TODO: This autodiff currently provides a Hessian that seem to not procide a Hessian
154+
#TODO: This autodiff currently provides a Hessian that seem to not provide a Hessian
155155
# ARC Fails but also AD before that warns. So it passes _some_ hessian but a wrong one, even in format
156156
optprob = OptimizationFunction(rosenbrock, AutoForwardDiff())
157157
prob = OptimizationProblem(optprob, x0, p; manifold = R2)
@@ -175,23 +175,6 @@ R2 = Euclidean(2)
175175
@test sol.minimum < 0.1
176176
end
177177

178-
# @testset "Circle example from Manopt" begin
179-
# Mc = Circle()
180-
# pc = 0.0
181-
# data = [-π / 4, 0.0, π / 4]
182-
# fc(y, _) = 1 / 2 * sum([distance(M, y, x)^2 for x in data])
183-
# sgrad_fc(G, y, _) = G .= -log(Mc, y, rand(data))
184-
185-
# opt = OptimizationManopt.StochasticGradientDescentOptimizer()
186-
187-
# optprob = OptimizationFunction(fc, grad = sgrad_fc)
188-
# prob = OptimizationProblem(optprob, pc; manifold = Mc)
189-
190-
# sol = Optimization.solve(prob, opt)
191-
192-
# @test all([is_point(Mc, q, true) for q in [q1, q2, q3, q4, q5]])
193-
# end
194-
195178
@testset "Custom constraints" begin
196179
cons(res, x, p) = (res .= [x[1]^2 + x[2]^2, x[1] * x[2]])
197180

@@ -204,49 +187,4 @@ R2 = Euclidean(2)
204187
#TODO: What is this?
205188
@test_throws SciMLBase.IncompatibleOptimizerError Optimization.solve(prob_cons, opt)
206189
end
207-
208-
@testset "SPD Manifold" begin
209-
M = SymmetricPositiveDefinite(5)
210-
m = 100
211-
σ = 0.005
212-
q = Matrix{Float64}(I, 5, 5) .+ 2.0
213-
data2 = [exp(M, q, σ * rand(M; vector_at = q)) for i in 1:m]
214-
215-
f(x, p = nothing) = sum(distance(M, x, data2[i])^2 for i in 1:m)
216-
217-
optf = OptimizationFunction(f, Optimization.AutoFiniteDiff())
218-
prob = OptimizationProblem(optf, data2[1]; manifold = M, maxiters = 1000)
219-
220-
opt = OptimizationManopt.GradientDescentOptimizer()
221-
@time sol = Optimization.solve(prob, opt)
222-
223-
@test sol.uq rtol=1e-2
224-
225-
function closed_form_solution(M::SymmetricPositiveDefinite, L, U, p, X)
226-
# extract p^1/2 and p^{-1/2}
227-
(p_sqrt_inv, p_sqrt) = Manifolds.spd_sqrt_and_sqrt_inv(p)
228-
# Compute D & Q
229-
e2 = eigen(p_sqrt_inv * X * p_sqrt_inv) # decompose Sk = QDQ'
230-
D = Diagonal(1.0 .* (e2.values .< 0))
231-
Q = e2.vectors
232-
Uprime = Q' * p_sqrt_inv * U * p_sqrt_inv * Q
233-
Lprime = Q' * p_sqrt_inv * L * p_sqrt_inv * Q
234-
P = cholesky(Hermitian(Uprime - Lprime))
235-
236-
z = P.U' * D * P.U + Lprime
237-
return p_sqrt * Q * z * Q' * p_sqrt
238-
end
239-
N = m
240-
U = mean(data2)
241-
L = inv(sum(1 / N * inv(matrix) for matrix in data2))
242-
243-
opt = OptimizationManopt.FrankWolfeOptimizer()
244-
optf = OptimizationFunction(f, Optimization.AutoFiniteDiff())
245-
prob = OptimizationProblem(optf, data2[1]; manifold = M)
246-
247-
@time sol = Optimization.solve(
248-
prob, opt, sub_problem = (M, p, X) -> closed_form_solution(M, p, L, U, X),
249-
maxiters = 1000)
250-
@test sol.uq rtol=1e-2
251-
end
252190
end

0 commit comments

Comments
 (0)