11module OptimizationManopt
22
3- using Optimization, Manopt, ManifoldsBase
3+ using Optimization, Manopt, ManifoldsBase, ManifoldDiff
44
55"""
66 abstract type AbstractManoptOptimizer end
@@ -52,7 +52,7 @@ function call_manopt_optimizer(opt::GradientDescentOptimizer{Teval},
5252 stepsize = opt. stepsize,
5353 sckwarg... )
5454 # we unwrap DebugOptions here
55- minimizer = Manopt . get_solver_result ( opts)
55+ minimizer = opts
5656 return (; minimizer = minimizer, minimum = loss (opt. M, minimizer), options = opts),
5757 :who_knows
5858end
6161
6262struct NelderMeadOptimizer{
6363 TM <: AbstractManifold ,
64- Tpop <: AbstractVector
6564 } <: AbstractManoptOptimizer
6665 M:: TM
67- initial_population:: Tpop
6866end
6967
7068function NelderMeadOptimizer (M:: AbstractManifold )
71- initial_population = [rand (M) for _ in 1 : (manifold_dimension (M) + 1 )]
72- return NelderMeadOptimizer {typeof(M), typeof(initial_population)} (M, initial_population)
69+ return NelderMeadOptimizer {typeof(M)} (M)
7370end
7471
7572function call_manopt_optimizer (opt:: NelderMeadOptimizer ,
@@ -80,11 +77,10 @@ function call_manopt_optimizer(opt::NelderMeadOptimizer,
8077 sckwarg = stopping_criterion_to_kwarg (stopping_criterion)
8178
8279 opts = NelderMead (opt. M,
83- loss,
84- opt. initial_population;
80+ loss;
8581 return_options = true ,
8682 sckwarg... )
87- minimizer = Manopt . get_solver_result ( opts)
83+ minimizer = opts
8884 return (; minimizer = minimizer, minimum = loss (opt. M, minimizer), options = opts),
8985 :who_knows
9086end
@@ -123,7 +119,7 @@ function call_manopt_optimizer(opt::ConjugateGradientDescentOptimizer{Teval},
123119 stepsize = opt. stepsize,
124120 sckwarg... )
125121 # we unwrap DebugOptions here
126- minimizer = Manopt . get_solver_result ( opts)
122+ minimizer = opts
127123 return (; minimizer = minimizer, minimum = loss (opt. M, minimizer), options = opts),
128124 :who_knows
129125end
@@ -177,7 +173,7 @@ function call_manopt_optimizer(opt::ParticleSwarmOptimizer{Teval},
177173 vector_transport_method = opt. vector_transport_method,
178174 sckwarg... )
179175 # we unwrap DebugOptions here
180- minimizer = Manopt . get_solver_result ( opts)
176+ minimizer = opts
181177 return (; minimizer = minimizer, minimum = loss (opt. M, minimizer), options = opts),
182178 :who_knows
183179end
@@ -229,7 +225,7 @@ function call_manopt_optimizer(opt::QuasiNewtonOptimizer{Teval},
229225 stepsize = opt. stepsize,
230226 sckwarg... )
231227 # we unwrap DebugOptions here
232- minimizer = Manopt . get_solver_result ( opts)
228+ minimizer = opts
233229 return (; minimizer = minimizer, minimum = loss (opt. M, minimizer), options = opts),
234230 :who_knows
235231end
@@ -245,14 +241,14 @@ function build_loss(f::OptimizationFunction, prob, cur)
245241end
246242
247243function build_gradF (f:: OptimizationFunction{true} , prob, cur)
248- function (M:: AbstractManifold , G, θ)
244+ function g (M:: AbstractManifold , G, θ)
249245 f. grad (G, θ, cur... )
250246 G .= riemannian_gradient (M, θ, G)
251- if prob . sense === Optimization . MaxSense
252- return - G # TODO : check
253- else
254- return G
255- end
247+ end
248+ function g (M :: AbstractManifold , θ)
249+ G = zero (θ)
250+ f . grad (G, θ, cur ... )
251+ return riemannian_gradient (M, θ, G)
256252 end
257253end
258254
0 commit comments