Skip to content

Commit f56c33e

Browse files
committed
ongoing code layout impr
1 parent 12f8947 commit f56c33e

File tree

4 files changed

+240
-232
lines changed

4 files changed

+240
-232
lines changed

src/ApproxConv.jl

Lines changed: 0 additions & 122 deletions
Original file line numberDiff line numberDiff line change
@@ -55,106 +55,6 @@ end
5555

5656

5757

58-
"""
59-
$(SIGNATURES)
60-
61-
Prepare a common functor computation object `prepareCommonConvWrapper{T}` containing
62-
the user factor functor along with additional variables and information using during
63-
approximate convolution computations.
64-
65-
DevNotes
66-
- TODO consolidate with others, see https://github.com/JuliaRobotics/IncrementalInference.jl/projects/6
67-
"""
68-
function prepareCommonConvWrapper!( F_::Type{<:AbstractRelative},
69-
ccwl::CommonConvWrapper{F},
70-
Xi::AbstractVector{<:DFGVariable},
71-
solvefor::Symbol,
72-
N::Int;
73-
needFreshMeasurements::Bool=true,
74-
solveKey::Symbol=:default ) where {F <: AbstractFactor}
75-
#
76-
77-
# FIXME, order of fmd ccwl cf are a little weird and should be revised.
78-
pttypes = getVariableType.(Xi) .|> getPointType
79-
PointType = 0 < length(pttypes) ? pttypes[1] : Vector{Float64}
80-
81-
#FIXME, see #1321
82-
vecPtsArr = Vector{Vector{Any}}()
83-
84-
#TODO some better consolidate is needed
85-
ccwl.vartypes = typeof.(getVariableType.(Xi))
86-
87-
# FIXME maxlen should parrot N (barring multi-/nullhypo issues)
88-
maxlen, sfidx, mani = prepareparamsarray!(vecPtsArr, Xi, solvefor, N, solveKey=solveKey)
89-
90-
# FIXME ON FIRE, what happens if this is a partial dimension factor? See #1246
91-
ccwl.xDim = getDimension(getVariableType(Xi[sfidx]))
92-
# ccwl.xDim = length(vecPtsArr[sfidx][1])
93-
# TODO should be selecting for the correct multihypothesis mode
94-
95-
# setup the partial or complete decision variable dimensions for this ccwl object
96-
# NOTE perhaps deconv has changed the decision variable list, so placed here during consolidation phase
97-
# TODO, should this not be part of `prepareCommonConvWrapper` -- only here do we look for .partial
98-
_setCCWDecisionDimsConv!(ccwl)
99-
100-
# SHOULD WE SLICE ARR DOWN BY PARTIAL DIMS HERE (OR LATER)?
101-
ccwl.params = vecPtsArr # map( ar->view(ar, ccwl.partialDims, :), vecPtsArr)
102-
103-
# get factor metadata -- TODO, populate, also see #784
104-
fmd = FactorMetadata(Xi, getLabel.(Xi), ccwl.params, solvefor, nothing)
105-
106-
# TODO consolidate with ccwl??
107-
# FIXME do not divert Mixture for sampling
108-
# cf = _buildCalcFactorMixture(ccwl, fmd, 1, ccwl.measurement, ccwl.params) # TODO perhaps 0 is safer
109-
cf = CalcFactor( ccwl.usrfnc!, fmd, 0, length(ccwl.measurement), ccwl.measurement, ccwl.params)
110-
111-
# get variable node data
112-
vnds = Xi
113-
114-
# option to disable fresh samples
115-
if needFreshMeasurements
116-
# TODO refactor
117-
ccwl.measurement = sampleFactor(cf, maxlen)
118-
# sampleFactor!(ccwl, maxlen, fmd, vnds)
119-
end
120-
121-
122-
ccwl.zDim = calcZDim(CalcFactor(ccwl))
123-
# if ccwl.specialzDim
124-
# ccwl.zDim = ccwl.usrfnc!.zDim[sfidx]
125-
# else
126-
# end
127-
ccwl.varidx = sfidx
128-
129-
# set each CPT
130-
for thrid in 1:Threads.nthreads()
131-
cpt_ = ccwl.cpt[thrid]
132-
cpt_.X = ccwl.params[sfidx]
133-
134-
# used in ccw functor for AbstractRelativeMinimize
135-
# TODO JT - Confirm it should be updated here. Testing in prepgenericconvolution
136-
resize!(cpt_.res, ccwl.zDim)
137-
fill!(cpt_.res, 0.0)
138-
end
139-
140-
# calculate new gradients perhaps
141-
# J = ccwl.gradients(measurement..., pts...)
142-
143-
return sfidx, maxlen, mani
144-
end
145-
146-
147-
function prepareCommonConvWrapper!( ccwl::Union{CommonConvWrapper{F},
148-
CommonConvWrapper{Mixture{N_,F,S,T}}},
149-
Xi::AbstractVector{<:DFGVariable},
150-
solvefor::Symbol,
151-
N::Int;
152-
kw... ) where {N_,F<:AbstractRelative,S,T}
153-
#
154-
prepareCommonConvWrapper!(F, ccwl, Xi, solvefor, N; kw...)
155-
end
156-
157-
15858
"""
15959
$SIGNATURES
16060
@@ -331,29 +231,7 @@ end
331231
# approxConvOnElements!(ccwl, allelements[count])
332232

333233

334-
"""
335-
$SIGNATURES
336-
Internal method to set which dimensions should be used as the decision variables for later numerical optimization.
337-
"""
338-
function _setCCWDecisionDimsConv!(ccwl::Union{CommonConvWrapper{F},
339-
CommonConvWrapper{Mixture{N_,F,S,T}}} ) where {N_,F<:Union{AbstractManifoldMinimize, AbstractRelativeMinimize, AbstractRelativeRoots, AbstractPrior},S,T}
340-
#
341-
# return nothing
342234

343-
p = if ccwl.partial
344-
Int32[ccwl.usrfnc!.partial...]
345-
else
346-
Int32[1:ccwl.xDim...]
347-
end
348-
349-
ccwl.partialDims = (p)
350-
# NOTE should only be done in the constructor
351-
for thrid in 1:Threads.nthreads()
352-
length(ccwl.cpt[thrid].p) != length(p) ? resize!(ccwl.cpt[thrid].p, length(p)) : nothing
353-
ccwl.cpt[thrid].p .= p # SVector... , see ccw.partialDims
354-
end
355-
nothing
356-
end
357235

358236
"""
359237
$(SIGNATURES)

src/FactorGraph.jl

Lines changed: 0 additions & 106 deletions
Original file line numberDiff line numberDiff line change
@@ -487,80 +487,8 @@ function addVariable!(dfg::AbstractDFG,
487487
return v
488488
end
489489

490-
function _resizePointsVector!(vecP::AbstractVector{P}, mkd::ManifoldKernelDensity, N::Int) where P
491-
#
492-
pN = length(vecP)
493-
resize!(vecP, N)
494-
for j in pN:N
495-
smp = AMP.sample(mkd, 1)[1]
496-
# @show j, smp, typeof(smp), typeof(vecP[j])
497-
vecP[j] = smp[1]
498-
end
499-
500-
vecP
501-
end
502-
503-
504-
"""
505-
$(SIGNATURES)
506-
507-
Prepare the particle arrays `ARR` to be used for approximate convolution.
508-
This function ensures that ARR has te same dimensions among all the parameters.
509-
Function returns with ARR[sfidx] pointing at newly allocated deepcopy of the
510-
existing values in getVal(Xi[.label==solvefor]).
511-
512-
Notes
513-
- Return values `sfidx` is the element in ARR where `Xi.label==solvefor` and
514-
- `maxlen` is length of all (possibly resampled) `ARR` contained particles.
515-
- `Xi` is order sensitive.
516-
- for initialization, solveFor = Nothing.
517-
- `P = getPointType(<:InferenceVariable)`
518-
"""
519-
function prepareparamsarray!( ARR::AbstractVector{<:AbstractVector{P}},
520-
Xi::Vector{<:DFGVariable},
521-
solvefor::Union{Nothing, Symbol},
522-
N::Int=0;
523-
solveKey::Symbol=:default ) where P
524-
#
525-
LEN = Int[]
526-
maxlen = N # FIXME see #105
527-
count = 0
528-
sfidx = 0
529490

530-
for xi in Xi
531-
vecP = getVal(xi, solveKey=solveKey)
532-
push!(ARR, vecP)
533-
LEN = length.(ARR)
534-
maxlen = maximum([N; LEN])
535-
count += 1
536-
if xi.label == solvefor
537-
sfidx = count #xi.index
538-
end
539-
end
540491

541-
# resample variables with too few kernels (manifolds points)
542-
SAMP = LEN .< maxlen
543-
for i in 1:count
544-
if SAMP[i]
545-
Pr = getBelief(Xi[i], solveKey)
546-
_resizePointsVector!(ARR[i], Pr, maxlen)
547-
end
548-
end
549-
550-
# TODO --rather define reusable memory for the proposal
551-
# we are generating a proposal distribution, not direct replacement for existing memory and hence the deepcopy.
552-
if sfidx > 0
553-
ARR[sfidx] = deepcopy(ARR[sfidx])
554-
end
555-
556-
# get solvefor manifolds
557-
# FIXME deprecate use of (:null,)
558-
mani = length(Xi)==0 || sfidx==0 ? (:null,) : getManifold(Xi[sfidx])
559-
560-
# FIXME, forcing maxlen to N results in errors (see test/testVariousNSolveSize.jl) see #105
561-
# maxlen = N == 0 ? maxlen : N
562-
return maxlen, sfidx, mani
563-
end
564492

565493
function parseusermultihypo(multihypo::Nothing, nullhypo::Float64)
566494
verts = Symbol[]
@@ -584,41 +512,7 @@ function parseusermultihypo(multihypo::Vector{Float64}, nullhypo::Float64)
584512
return mh, nullhypo
585513
end
586514

587-
# import IncrementalInference: prepgenericconvolution, convert
588-
589-
"""
590-
$SIGNATURES
591-
592-
Function to calculate measurement dimension from factor sampling.
593-
594-
Notes
595-
- Will not work in all situations, but good enough so far.
596-
- # TODO standardize via domain or manifold definition...??
597-
"""
598-
function calcZDim(cf::CalcFactor{T}) where {T <: AbstractFactor}
599-
#
600-
try
601-
M = getManifold(T)
602-
return manifold_dimension(M)
603-
catch
604-
try
605-
M = getManifold(cf.factor)
606-
return manifold_dimension(M)
607-
catch
608-
@warn "no method getManifold(::$(string(T))), calcZDim will attempt legacy length(sample) method instead"
609-
end
610-
end
611-
612-
# NOTE try to make sure we get matrix back (not a vector)
613-
smpls = sampleFactor(cf, 2)[1]
614-
return length(smpls[1])
615-
end
616-
617-
calcZDim(ccw::CommonConvWrapper) = calcZDim(CalcFactor(ccw))
618-
619-
calcZDim(cf::CalcFactor{<:GenericMarginal}) = 0
620515

621-
calcZDim(cf::CalcFactor{<:ManifoldPrior}) = manifold_dimension(cf.factor.M)
622516

623517
# return a BitVector masking the fractional portion, assuming converted 0's on 100% confident variables
624518
_getFractionalVars(varList::Union{<:Tuple, <:AbstractVector}, mh::Nothing) = zeros(length(varList)) .== 1

src/IncrementalInference.jl

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -415,17 +415,14 @@ getFactorOperationalMemoryType(dfg::SolverParams) = CommonConvWrapper
415415
getFactorOperationalMemoryType(dfg::NoSolverParams) = CommonConvWrapper
416416

417417
include("AliasScalarSampling.jl")
418+
include("entities/GraphConstraintTypes.jl")
418419
include("entities/OptionalDensities.jl")
419420
include("BeliefTypes.jl")
420-
include("services/CalcFactor.jl")
421421

422422

423423
include("Factors/GenericFunctions.jl")
424-
425-
# Refactoring in progress
426424
include("Factors/MsgLikelihoods.jl")
427425

428-
include("entities/GraphConstraintTypes.jl")
429426
include("CliqueTypes.jl")
430427

431428
include("JunctionTreeTypes.jl")
@@ -459,6 +456,10 @@ include("Factors/PartialPrior.jl")
459456
include("Factors/PartialPriorPassThrough.jl")
460457
include("DefaultNodeTypes.jl") # older file
461458

459+
# Refactoring in progress
460+
include("services/CalcFactor.jl")
461+
462+
462463
# solving graphs
463464
include("SolverUtilities.jl")
464465
include("NumericalCalculations.jl")

0 commit comments

Comments
 (0)