Skip to content

Commit 8101563

Browse files
committed
Merge branch 'master' into 23Q3/dev/parametric
2 parents 5d62f24 + 525602e commit 8101563

File tree

9 files changed

+103
-64
lines changed

9 files changed

+103
-64
lines changed

NEWS.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ The list below highlights breaking changes according to normal semver workflow -
1717
- Deprecate `AbstractRelativeRoots`.
1818
- Standardization improvements surrounding weakdeps code extensions.
1919
- Code quality improvements along wiht refactoring and reorganizing of file names and locations.
20-
- Restoring `DERelative` factors, although further fixes necessary beyond anticipated patch release v0.34.1.
20+
- Restoring `DERelative` factors, through v0.34.1 and v0.34.2.
2121
- Switching to weakdep AMD.jl for `ccolmod` dependency, part of Julia 1.10 upgrade. Dropping `SuiteSparse_long` dependency. Further fixes necessary to restore full user constrained tree variable order functionality.
2222

2323
# Changes in v0.33

src/entities/ExtFactors.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ DevNotes
1919
- FIXME Lots of consolidation and standardization to do, see RoME.jl #244 regarding Manifolds.jl.
2020
- TODO does not yet handle case where a factor spans across two timezones.
2121
"""
22-
struct DERelative{T <: InferenceVariable, P, D} <: AbstractRelativeMinimize
22+
struct DERelative{T <: InferenceVariable, P, D} <: AbstractManifoldMinimize # AbstractRelativeMinimize
2323
domain::Type{T}
2424
forwardProblem::P
2525
backwardProblem::P

src/services/GraphInit.jl

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -123,10 +123,11 @@ Notes:
123123
- Special carve out for multihypo cases, see issue 427.
124124
125125
Development Notes:
126-
> Target factor is first (singletons) or second (dim 2 pairwise) variable vertex in `xi`.
127-
* TODO use DFG properly with local operations and DB update at end.
128-
* TODO get faster version of `isInitialized` for database version.
129-
* TODO: Persist this back if we want to here.
126+
- Target factor is first (singletons) or second (dim 2 pairwise) variable vertex in `xi`.
127+
- TODO use DFG properly with local operations and DB update at end.
128+
- TODO get faster version of `isInitialized` for database version.
129+
- TODO: Persist this back if we want to here.
130+
- TODO: init from just partials
130131
"""
131132
function doautoinit!(
132133
dfg::AbstractDFG,

src/services/GraphProductOperations.jl

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,12 @@ function propagateBelief(
3636
# make sure oldPoints vector has right length
3737
oldBel = getBelief(dfg, destlbl, solveKey)
3838
_pts = getPoints(oldBel, false)
39-
oldPoints = if Npts(oldBel) <= N
40-
_pts[1:N]
41-
else
39+
oldPoints = if Npts(oldBel) < N
4240
nn = N - length(_pts) # should be larger than 0
43-
vcat(_pts, sample(oldBel, nn))
41+
_pts_, = sample(oldBel, nn)
42+
vcat(_pts, _pts_)
43+
else
44+
_pts[1:N]
4445
end
4546

4647
# few more data requirements

src/services/NumericalCalculations.jl

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -362,10 +362,9 @@ function _solveCCWNumeric!(
362362
perturb::Real = 1e-10,
363363
) where {N_, F <: AbstractRelative, S, T}
364364
#
365-
365+
366366
#
367367
# thrid = Threads.threadid()
368-
369368
smpid = ccwl.particleidx[]
370369
# cannot Nelder-Mead on 1dim, partial can be 1dim or more but being conservative.
371370
islen1 = length(ccwl.partialDims) == 1 || ccwl.partial
@@ -377,10 +376,11 @@ function _solveCCWNumeric!(
377376
# a separate deepcopy of the destination (aka target) memory is necessary.
378377
# Choosen solution is to splice together ccwl.varValsAll each time, with destination as
379378
# deepcopy but other input variables are just point to the source variable values directly.
380-
if ccwl.partial
381-
target = view(ccwl.varValsAll[][ccwl.varidx[]][smpid], ccwl.partialDims)
379+
target = if ccwl.partial # FIXME likely type-instability on `typeof(target)`
380+
# view(ccwl.varValsAll[][ccwl.varidx[]][smpid], ccwl.partialDims)
381+
ccwl.varValsAll[][ccwl.varidx[]][smpid][ccwl.partialDims]
382382
else
383-
target = ccwl.varValsAll[][ccwl.varidx[]][smpid];
383+
ccwl.varValsAll[][ccwl.varidx[]][smpid]
384384
end
385385
# build the pre-objective function for this sample's hypothesis selection
386386
unrollHypo!, _ = _buildCalcFactorLambdaSample(
@@ -407,12 +407,13 @@ function _solveCCWNumeric!(
407407
# target .+= _perturbIfNecessary(getFactorType(ccwl), length(target), perturb)
408408
sfidx = ccwl.varidx[]
409409
# do the parameter search over defined decision variables using Minimization
410-
if ccwl.partial
411-
X = collect(view(ccwl.varValsAll[][sfidx][smpid], ccwl.partialDims))
412-
else
413-
X = ccwl.varValsAll[][sfidx][smpid][ccwl.partialDims]
414-
end
415-
# X = destVarVals[smpid]#[ccwl.partialDims]
410+
X = ccwl.varValsAll[][sfidx][smpid][ccwl.partialDims]
411+
# X = if ccwl.partial # TODO check for type-instability on `X`
412+
# collect(view(ccwl.varValsAll[][sfidx][smpid], ccwl.partialDims))
413+
# else
414+
# ccwl.varValsAll[][sfidx][smpid][ccwl.partialDims]
415+
# end
416+
# # X = destVarVals[smpid]#[ccwl.partialDims]
416417

417418
retval = _solveLambdaNumeric(
418419
getFactorType(ccwl),
@@ -430,7 +431,13 @@ function _solveCCWNumeric!(
430431

431432
# insert result back at the correct variable element location
432433
if ccwl.partial
433-
ccwl.varValsAll[][sfidx][smpid][ccwl.partialDims] .= retval
434+
# NOTE use workaround of TranslationGroup for coordinates on partial assignment
435+
# FIXME consolidate to Manopt and upgrade to Riemannian (i.e. incl non-groups)
436+
M = getManifold(ccwl) # TranslationGroup(length(ccwl.varValsAll[][sfidx][smpid]))
437+
src = Vector{typeof(retval)}()
438+
push!(src, retval)
439+
setPointPartial!(M, ccwl.varValsAll[][sfidx], M, src, ccwl.partialDims, smpid, 1, true )
440+
# ccwl.varValsAll[][sfidx][smpid][ccwl.partialDims] .= retval
434441
else
435442
# copyto!(ccwl.varValsAll[sfidx][smpid], retval)
436443
copyto!(ccwl.varValsAll[][sfidx][smpid][ccwl.partialDims], retval)

test/runtests.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,30 +5,30 @@ TEST_GROUP = get(ENV, "IIF_TEST_GROUP", "all")
55
# temporarily moved to start (for debugging)
66
#...
77
if TEST_GROUP in ["all", "tmp_debug_group"]
8-
include("testDERelative.jl")
98
include("testSpecialOrthogonalMani.jl")
109
include("testMultiHypo3Door.jl")
1110
include("priorusetest.jl")
1211
end
1312

1413
if TEST_GROUP in ["all", "basic_functional_group"]
15-
# more frequent stochasic failures from numerics
14+
# more frequent stochasic failures from numerics
1615
include("manifolds/manifolddiff.jl")
1716
include("manifolds/factordiff.jl")
1817
include("testSpecialEuclidean2Mani.jl")
1918
include("testEuclidDistance.jl")
2019

21-
# regular testing
22-
include("testSphereMani.jl")
23-
include("testBasicManifolds.jl")
24-
2520
# start as basic as possible and build from there
2621
include("typeReturnMemRef.jl")
2722
include("testDistributionsGeneric.jl")
28-
include("testHeatmapGridDensity.jl")
2923
include("testCliqSolveDbgUtils.jl")
3024
include("basicGraphsOperations.jl")
3125

26+
# regular testing
27+
include("testSphereMani.jl")
28+
include("testBasicManifolds.jl")
29+
include("testDERelative.jl")
30+
include("testHeatmapGridDensity.jl")
31+
3232
# include("TestModuleFunctions.jl")
3333
include("testCompareVariablesFactors.jl")
3434
include("saveconvertertypes.jl")

test/testDERelative.jl

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -252,10 +252,10 @@ val0 = getPPESuggested( hists[1][12][4].cliqSubFg[:x0] )
252252

253253
##
254254

255-
@test getPPE(fg, :x0).suggested - x0_val_ref |> norm < 0.1
256-
@test getPPE(fg, :x1).suggested - x1_val_ref |> norm < 0.1
257-
@test getPPE(fg, :x2).suggested - x2_val_ref |> norm < 0.1
258-
@test getPPE(fg, :x3).suggested - x3_val_ref |> norm < 0.1
255+
@test isapprox( getPPE(fg, :x0).suggested, x0_val_ref; atol = 0.1)
256+
@test isapprox( getPPE(fg, :x1).suggested, x1_val_ref; atol = 0.1)
257+
@test isapprox( getPPE(fg, :x2).suggested, x2_val_ref; atol = 0.1)
258+
@test isapprox( getPPE(fg, :x3).suggested, x3_val_ref; atol = 0.1)
259259

260260
##
261261

@@ -390,9 +390,9 @@ pts_ = approxConv(fg, :x0x1f1, :x0)
390390
##
391391

392392
tfg = initfg()
393-
for s in ls(fg)
394-
initVariable!(fg, s, [0.1.*zeros(2) for _ in 1:100])
395-
end
393+
# for s in ls(fg)
394+
# initVariable!(fg, s, [0.1.*zeros(2) for _ in 1:100])
395+
# end
396396

397397
pts = approxConv(fg, :x0f1, :x7, setPPE=true, tfg=tfg)
398398
initVariable!(tfg, :x7, pts)
@@ -413,16 +413,23 @@ initVariable!(tfg, :x7, pts)
413413
@error "Disabling useMsgLikelihood for DERelative test, follow fix on #1010 as rough guide"
414414
getSolverParams(fg).useMsgLikelihoods = false
415415

416-
solveTree!(fg);
416+
smtasks = Task[]
417+
tree = solveTree!(fg; recordcliqs=ls(fg), smtasks);
418+
419+
hists = fetchCliqHistoryAll!(smtasks)
420+
printCSMHistoryLogical(hists)
421+
422+
_, csmc = repeatCSMStep!(hists[2], 6; duplicate=true);
423+
417424

418425
##
419426

420427
# solveTree has weird problem in breaking correct init and inserting zeros???
421-
@test_broken isapprox( getPPESuggested(fg, :x0), x0_val_ref; atol=0.2)
422-
@test_broken isapprox( getPPESuggested(fg, :x1), x1_val_ref; atol=0.2)
423-
@test_broken isapprox( getPPESuggested(fg, :x2), x2_val_ref; atol=0.2)
424-
@test_broken isapprox( getPPESuggested(fg, :x3), x3_val_ref; atol=0.2)
425-
@test_broken isapprox( getPPESuggested(fg, :x4), x4_val_ref; atol=0.2)
428+
@test isapprox( getPPESuggested(fg, :x0), x0_val_ref; atol=0.2)
429+
@test isapprox( getPPESuggested(fg, :x1), x1_val_ref; atol=0.2)
430+
@test isapprox( getPPESuggested(fg, :x2), x2_val_ref; atol=0.2)
431+
@test isapprox( getPPESuggested(fg, :x3), x3_val_ref; atol=0.2)
432+
@test isapprox( getPPESuggested(fg, :x4), x4_val_ref; atol=0.2)
426433

427434
@test isapprox( getPPESuggested(fg, :x5), x5_val_ref; atol=0.2)
428435
@test isapprox( getPPESuggested(fg, :x6), x6_val_ref; atol=0.2)

test/testSpecialEuclidean2Mani.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ function (cf::CalcFactor{<:ManifoldFactorSE2})(X, p, q)
158158
vee!(M, Xc, q, log(M, q, q̂))
159159
return Xc
160160
end
161-
161+
162162
##
163163

164164
@testset "Test Pose2 like hex as SpecialEuclidean2" begin
@@ -314,7 +314,7 @@ doautoinit!(fg, :x1)
314314
vnd = getVariableSolverData(fg, :x1)
315315
@test all(isapprox.(mean(vnd.val), [1.0,2.0], atol=0.1))
316316

317-
# ##
317+
##
318318
smtasks = Task[]
319319
solveTree!(fg; smtasks, verbose=true, recordcliqs=ls(fg))
320320
# # hists = fetchCliqHistoryAll!(smtasks);

test/testpartialconstraint.jl

Lines changed: 43 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -60,20 +60,18 @@ f2 = addFactor!(fg,[:x1],dp, graphinit=false)
6060
doautoinit!(fg, :x1)
6161

6262
##
63-
6463
@testset "test evaluation of full constraint prior" begin
64+
##
6565

66-
67-
pts_, _ = evalFactor(fg, f1, v1.label, N=N)
66+
pts_, _ = evalFactor(fg, f1, v1.label; N)
6867
@cast pts[i,j] := pts_[j][i]
6968
@test size(pts,1) == 2
7069
@test size(pts,2) == N
7170
@test norm(Statistics.mean(pts,dims=2)[1] .- [0.0]) < 0.3
7271

73-
72+
##
7473
end
7574

76-
##
7775

7876
@testset "test evaluation of partial constraint prior" begin
7977
##
@@ -83,7 +81,7 @@ memcheck_ = getVal(v1)
8381

8482
X1pts_ = getVal(v1)
8583
@cast X1pts[i,j] := X1pts_[j][i]
86-
pts_ = approxConv(fg, getLabel(f2), :x1, N=N)
84+
pts_ = approxConv(fg, getLabel(f2), :x1; N)
8785
@cast pts[i,j] := pts_[j][i]
8886

8987
@test size(pts, 1) == 2
@@ -153,7 +151,7 @@ gradients = FactorGradientsCached!(dpp, (ContinuousEuclid{2}, ContinuousEuclid{2
153151
J = gradients(one_meas, pts...)
154152

155153
@test size(J) == (4,4)
156-
@test norm(J - [0 0 0 0; 0 0 0 1; 0 0 0 0; 0 1 0 0] ) < 1e-4
154+
@test_broken norm(J - [0 0 0 0; 0 0 0 1; 0 0 0 0; 0 1 0 0] ) < 1e-4
157155

158156
## check perturbation logic
159157

@@ -162,7 +160,7 @@ prtb = calcPerturbationFromVariable(gradients, [1=>[1;1]])
162160
# self variation is taken as 0 at this time
163161
@test isapprox( prtb[1], [0;0] )
164162
# variable 1 influences 2 only through partial dimension 2 (as per DevelopPartialPairwise)
165-
@test isapprox( prtb[2], [0;1] )
163+
@test_broken isapprox( prtb[2], [0;1] )
166164

167165
## test evaluation through the convolution operation withing a factor graph
168166

@@ -177,20 +175,19 @@ end
177175

178176
@testset "test evaluation of multiple simultaneous partial constraints" begin
179177
global fg
180-
181178
##
182179

183180
initAll!(fg)
184-
pts_ = approxConv(fg, :x1x2f1, :x2, N=N)
181+
pts_ = approxConv(fg, :x1x2f1, :x2; N)
185182
@cast pts[i,j] := pts_[j][i]
186183
@test size(pts,1) == 2
187-
@test norm(Statistics.mean(pts,dims=2)[2] .- [10.0]) < 3.0
184+
@test_broken norm(Statistics.mean(pts,dims=2)[2] .- [10.0]) < 3.0
188185
# not the same memory, ccw.varValsAll[][sfidx] is now a deepcopy as alternate destination memory
189186
valx2_ = IIF._getCCW(fg[:x1x2f1]).varValsAll[][2] # getVal(fg, :x2)
190187
@cast valx2[i,j] := valx2_[j][i]
191188
@test norm(valx2[1,:] - pts[1,:]) < 1e-5
192189

193-
pts_ = approxConv(fg, :x2f1, :x2, N=N)
190+
pts_ = approxConv(fg, :x2f1, :x2; N)
194191
@cast pts[i,j] := pts_[j][i]
195192
@test size(pts,1) == 2
196193
@test norm(Statistics.mean(pts,dims=2)[1] .- [-20.0]) < 0.75
@@ -213,7 +210,7 @@ thefac = getFactor(fg, :x1x2f1)
213210

214211
X2lpts_ = getVal(getVariable(fg, :x2))
215212
@cast X2lpts[i,j] := X2lpts_[j][i]
216-
keepaside, = (calcProposalBelief(fg, thefac, :x2, N=N),)
213+
keepaside, = (calcProposalBelief(fg, thefac, :x2; N),)
217214
@test Ndim(keepaside) == 2
218215
lpts_ = getPoints(keepaside, false)
219216
@cast lpts[i,j] := lpts_[j][i]
@@ -235,7 +232,7 @@ memcheck_ = getVal(v2)
235232

236233
X2lpts_ = getVal(v2)
237234
@cast X2lpts[i,j] := X2lpts_[j][i]
238-
p4 = calcProposalBelief(fg, f4, v2.label, N=N)
235+
p4 = calcProposalBelief(fg, f4, v2.label; N)
239236
@test Ndim(p4) == 2
240237
lpts_ = getPoints(keepaside, false)
241238
@cast lpts[i,j] := lpts_[j][i]
@@ -263,7 +260,7 @@ global v2, fg
263260
X2pts_ = getVal(v2)
264261
@cast X2pts[i,j] := X2pts_[j][i]
265262
# NOTE, SUPER IMPORTANT, predictbelief returns full dimension points (even if only partials are sent in for proposals)
266-
valB, = propagateBelief(fg, v2, [f4], N=N)
263+
valB, = propagateBelief(fg, v2, [f4]; N)
267264
val_ = getPoints(valB, false)
268265
@cast val[i,j] := val_[j][i]
269266
@show X2pts_[1]';
@@ -276,19 +273,19 @@ val_ = getPoints(valB, false)
276273
# partial pairwise
277274
X2pts_ = getVal(v2)
278275
@cast X2pts[i,j] := X2pts_[j][i]
279-
valB, = propagateBelief(fg, v2, [f3], N=N)
276+
valB, = propagateBelief(fg, v2, [f3]; N)
280277
val_ = getPoints(valB, false)
281278
@cast val[i,j] := val_[j][i]
282279
@test norm(X2pts[1,:] - val[1,:]) < 1e-10
283280
@test 0.0 < norm(X2pts[2,:] - val[2,:])
284281
val2_ = getVal(v1)
285282
@cast val2[i,j] := val2_[j][i]
286-
@test abs(Statistics.mean(val[2,:] - val2[2,:]) .- 10.0) < 0.75
283+
@test_broken abs(Statistics.mean(val[2,:] - val2[2,:]) .- 10.0) < 0.75
287284

288285
##
289286

290287
# combination of partials
291-
valB, = propagateBelief(fg, v2, [f3;f4], N=N)
288+
valB, = propagateBelief(fg, v2, [f3;f4]; N)
292289
val_ = getPoints(valB, false)
293290
@cast val[i,j] := val_[j][i]
294291
# plotKDE(kde!(val),levels=3)
@@ -298,7 +295,7 @@ if false
298295
@test_broken norm(Statistics.mean(val,dims=2)[2] .- [10.0]) < 0.01
299296
end
300297
@test (Statistics.std(val,dims=2)[1] .- 1.0) < 3.0
301-
@test (Statistics.std(val,dims=2)[2] .- 1.0) < 3.0
298+
@test_broken (Statistics.std(val,dims=2)[2] .- 1.0) < 3.0
302299

303300
##
304301

@@ -325,11 +322,37 @@ X2 = getBelief(fg, :x2)
325322

326323
@cast pts[i,j] := pts_[j][i]
327324
@test (Statistics.std(pts,dims=2)[1]-1.0) < 3.0
328-
@test (Statistics.std(pts,dims=2)[2]-1.0) < 3.0
325+
@test_broken (Statistics.std(pts,dims=2)[2]-1.0) < 3.0
326+
327+
328+
##
329+
end
329330

330331

332+
@testset "Test number of samples returned, N=75" begin
331333
##
332334

335+
pr = DevelopDim2(MvNormal([0.0;0.0], diagm([0.01;0.01])))
336+
dp = DevelopPartial(Normal(2.0, 1.0),(1,))
337+
338+
#
339+
340+
fg = initfg()
341+
342+
v1 = addVariable!(fg,:x1,Position{2}(),N=N)
343+
f1 = addFactor!(fg,[:x1], pr, graphinit=false)
344+
345+
# force particular initialization
346+
u0 = getPointIdentity(Position{2})
347+
arr = push!(Vector{typeof(u0)}(), u0)
348+
setVal!(fg, :x1, arr)
349+
350+
##----------- sanity check that predictbelief plumbing is doing the right thing
351+
nbel, = propagateBelief(fg, :x1, ls(fg, :x1), N=75)
352+
353+
@test_broken 75 == Npts(nbel)
354+
355+
##
333356
end
334357

335358
# plotKDE(getBelief(fg, :x2),levels=3)

0 commit comments

Comments
 (0)