2
2
# using Revise
3
3
using Test
4
4
using LinearAlgebra
5
+ using IncrementalInference
5
6
using ManifoldsBase
6
7
using Manifolds, Manopt
7
8
import Optim
@@ -189,22 +190,48 @@ end
189
190
190
191
X = hat (M, e0, zeros (6 ))
191
192
g_FD! (X, q)
193
+
194
+ @show X_ = [X. x[1 ][:]; X. x[2 ][:]]
192
195
# gradient at the optimal point should be zero
193
- @test isapprox (0 , sum (abs .(X[:] )); atol= 1e-8 )
196
+ @test isapprox (0 , sum (abs .(X_ )); atol= 1e-8 )
194
197
195
198
# gradient not the optimal point should be non-zero
196
199
g_FD! (X, e0)
197
- @test 0.01 < sum (abs .(X[:]))
200
+ @show X_ = [X. x[1 ][:]; X. x[2 ][:]]
201
+ @test 0.01 < sum (abs .(X_))
198
202
199
203
# # do optimization
200
204
x0 = deepcopy (e0)
201
205
sol = Optim. optimize (f, g_FD!, x0, Optim. ConjugateGradient (; manifold= ManifoldWrapper (M)))
202
- Cq .= 0.5 * randn (6 )
206
+ # Cq .= 0.5*randn(6)
203
207
# Cq[
204
208
@show sol. minimizer
205
209
@test isapprox ( f (sol. minimizer), 0 ; atol= 1e-8 )
206
210
@test isapprox ( 0 , sum (abs .(log (M, e0, compose (M, inv (M,q), sol. minimizer)))); atol= 1e-5 )
207
211
208
212
213
+ # #
214
+ end
215
+
216
+
217
+ @testset " Optim.Manifolds, SpecialEuclidean(3), using IIF.optimizeManifold_FD" begin
218
+ # #
219
+
220
+ M = Manifolds. SpecialEuclidean (3 )
221
+ e0 = ArrayPartition ([0 ,0 ,0. ], Matrix (_Rot. RotXYZ (0 ,0 ,0. )))
222
+
223
+ x0 = deepcopy (e0)
224
+ Cq = 0.5 * randn (6 )
225
+ q = exp (M,e0,hat (M,e0,Cq))
226
+
227
+ f (p) = distance (M, p, q)^ 2
228
+
229
+ sol = IncrementalInference. optimizeManifold_FD (M,f,x0)
230
+
231
+ @show sol. minimizer
232
+ @test isapprox ( f (sol. minimizer), 0 ; atol= 1e-8 )
233
+ @test isapprox ( 0 , sum (abs .(log (M, e0, compose (M, inv (M,q), sol. minimizer)))); atol= 1e-5 )
234
+
235
+
209
236
# #
210
237
end
0 commit comments