Skip to content

Commit

Permalink
Merge pull request #1693 from JuliaRobotics/23Q1/enh/dropjson12
Browse files Browse the repository at this point in the history
drop JSON1 and 2
  • Loading branch information
dehann authored Mar 11, 2023
2 parents 4ac23b2 + af67e30 commit 78d5f54
Show file tree
Hide file tree
Showing 14 changed files with 184 additions and 208 deletions.
6 changes: 1 addition & 5 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549"
FunctionalStateMachine = "3e9e306e-7e3c-11e9-12d2-8f8f67a2f951"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
JSON2 = "2535ab7d-5cd8-5a07-80ac-9b1792aadce3"
JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1"
KernelDensityEstimate = "2472808a-b354-52ea-a80e-1658a3c6056d"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Expand Down Expand Up @@ -52,13 +50,11 @@ ApproxManifoldProducts = "0.6.3"
BSON = "0.2, 0.3"
Combinatorics = "1.0"
DataStructures = "0.16, 0.17, 0.18"
DistributedFactorGraphs = "0.19.3, 0.20"
DistributedFactorGraphs = "0.20"
Distributions = "0.24, 0.25"
DocStringExtensions = "0.8, 0.9"
FileIO = "1"
FunctionalStateMachine = "0.2.9"
JSON = "0.21"
JSON2 = "0.3"
JSON3 = "1"
KernelDensityEstimate = "0.5.6"
Manifolds = "0.8.15"
Expand Down
209 changes: 121 additions & 88 deletions src/Deprecated.jl
Original file line number Diff line number Diff line change
@@ -1,69 +1,34 @@
## ================================================================================================
## Manifolds.jl Consolidation
## TODO: Still to be completed and tested.
## ================================================================================================
# struct ManifoldsVector <: Optim.Manifold
# manis::Vector{Manifold}
# end

##==============================================================================
## LEGACY, towards Sidecar
##==============================================================================

"""
Converter: Prior -> PackedPrior::Dict{String, Any}
FIXME see DFG #590 for consolidation with Serialization and Marshaling
"""
function convert(::Type{Dict{String, Any}}, prior::IncrementalInference.Prior)
@error("Obsolete, use pack/unpack converters instead")
z = convert(Type{Dict{String, Any}}, prior.Z)
return Packed_Factor([z], "Prior")
end

"""
Converter: PackedPrior::Dict{String, Any} -> Prior
FIXME see DFG #590 for consolidation on Serialization and Marshaling
"""
function convert(::Type{<:Prior}, prior::Dict{String, Any})
@error("Obsolete, use pack/unpack converters instead")
# Genericize to any packed type next.
z = prior["measurement"][1]
z = convert(DFG.getTypeFromSerializationModule(z["distType"]), z)
return Prior(z)
end

# more legacy, dont delete yet
function Base.getproperty(ccw::CommonConvWrapper, f::Symbol)
if f == :threadmodel
@warn "CommonConvWrapper.threadmodel is obsolete" maxlog=3
return SingleThreaded
elseif f == :params
@warn "CommonConvWrapper.params is deprecated, use .varValsAll instead" maxlog=3
return ccw.varValsAll
elseif f == :vartypes
@warn "CommonConvWrapper.vartypes is deprecated, use typeof.(getVariableType.(ccw.fullvariables) instead" maxlog=3
return typeof.(getVariableType.(ccw.fullvariables))
else
return getfield(ccw, f)
end
end


##==============================================================================
## Deprecate code below before v0.33
##==============================================================================

# export setThreadModel!
# introduced for approximate convolution operations
export SingleThreaded, MultiThreaded

function setThreadModel!(fgl::AbstractDFG; model = IIF.SingleThreaded)
#
@error("Obsolete, ThreadModel types are no longer in use.")
# for (key, id) in fgl.fIDs
# _getCCW(fgl, key).threadmodel = model
# end
return nothing
end
# Base.getindex(mv::ManifoldsVector, inds...) = getindex(mv.mani, inds...)
# Base.setindex!(mv, X, inds...) = setindex!(mv.mani, X, inds...)

# should have been deleted in v0.31 but no harm in keeping this one a bit longer
@deprecate initManual!(w...; kw...) initVariable!(w...; kw...)
# function ManifoldsVector(fg::AbstractDFG, varIds::Vector{Symbol})
# manis = Bool[]
# for k = varIds
# push!(manis, getVariableType(fg, k) |> getManifold)
# end
# ManifoldsVector(manis)
# end

# function Optim.retract!(manis::ManifoldsVector, x)
# for (i,M) = enumerate(manis)
# x[i] = project(M, x[i])
# end
# return x
# end
# function Optim.project_tangent!(manis::ManifoldsVector, G, x)
# for (i, M) = enumerate(manis)
# G[i] = project(M, x[i], G)
# end
# return G
# end


##==============================================================================
Expand Down Expand Up @@ -146,34 +111,102 @@ function solveGraphParametric2(
return d, result, flatvar.idx, Σ
end

## ================================================================================================
## Manifolds.jl Consolidation
## TODO: Still to be completed and tested.
## ================================================================================================
# struct ManifoldsVector <: Optim.Manifold
# manis::Vector{Manifold}
# end

# Base.getindex(mv::ManifoldsVector, inds...) = getindex(mv.mani, inds...)
# Base.setindex!(mv, X, inds...) = setindex!(mv.mani, X, inds...)
##==============================================================================
## Deprecate code below before v0.34
##==============================================================================

# function ManifoldsVector(fg::AbstractDFG, varIds::Vector{Symbol})
# manis = Bool[]
# for k = varIds
# push!(manis, getVariableType(fg, k) |> getManifold)
# end
# ManifoldsVector(manis)
# function CommonConvWrapper(
# usrfnc::T,
# fullvariables, #::Tuple ::Vector{<:DFGVariable};
# varValsAll::Tuple,
# X::AbstractVector{P}; #TODO remove X completely
# # xDim::Int = size(X, 1),
# userCache::CT = nothing,
# manifold = getManifold(usrfnc),
# partialDims::AbstractVector{<:Integer} = 1:length(X),
# partial::Bool = false,
# nullhypo::Real = 0,
# inflation::Real = 3.0,
# hypotheses::H = nothing,
# certainhypo = nothing,
# activehypo = collect(1:length(varValsAll)),
# measurement::AbstractVector = Vector(Vector{Float64}()),
# varidx::Int = 1,
# particleidx::Int = 1,
# res::AbstractVector{<:Real} = zeros(manifold_dimension(manifold)), # zDim
# gradients = nothing,
# ) where {T <: AbstractFactor, P, H, CT}
# #
# return CommonConvWrapper(
# usrfnc,
# tuple(fullvariables...),
# varValsAll,
# userCache,
# manifold,
# partialDims,
# partial,
# # xDim,
# float(nullhypo),
# float(inflation),
# hypotheses,
# certainhypo,
# activehypo,
# measurement,
# Ref(varidx),
# Ref(particleidx),
# res,
# gradients,
# )
# end

# function Optim.retract!(manis::ManifoldsVector, x)
# for (i,M) = enumerate(manis)
# x[i] = project(M, x[i])
# end
# return x
# function approxConvOnElements!(
# ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}},
# elements::Union{Vector{Int}, UnitRange{Int}},
# ::Type{<:MultiThreaded},
# _slack = nothing,
# ) where {N_, F <: AbstractRelative, S, T}
# #
# return error(
# "MultiThreaded `approxConvOnElements!` is deprecated and will soon be replaced",
# )
# # Threads.@threads for n in elements
# # # ccwl.thrid_ = Threads.threadid()
# # ccwl.cpt[Threads.threadid()].particleidx = n

# # # ccall(:jl_, Nothing, (Any,), "starting loop, thrid_=$(Threads.threadid()), partidx=$(ccwl.cpt[Threads.threadid()].particleidx)")
# # _solveCCWNumeric!( ccwl, _slack=_slack)
# # end
# # nothing
# end
# function Optim.project_tangent!(manis::ManifoldsVector, G, x)
# for (i, M) = enumerate(manis)
# G[i] = project(M, x[i], G)
# end
# return G

# function approxConvOnElements!(
# ccwl::Union{CommonConvWrapper{F}, CommonConvWrapper{Mixture{N_, F, S, T}}},
# elements::Union{Vector{Int}, UnitRange{Int}},
# _slack = nothing,
# ) where {N_, F <: AbstractRelative, S, T}
# #
# return approxConvOnElements!(ccwl, elements, ccwl.threadmodel, _slack)
# end

# more legacy, dont delete yet
function Base.getproperty(ccw::CommonConvWrapper, f::Symbol)
if f == :threadmodel
error("CommonConvWrapper.threadmodel is obsolete")
# return SingleThreaded
elseif f == :params
error("CommonConvWrapper.params is deprecated, use .varValsAll instead")
return ccw.varValsAll
elseif f == :vartypes
@warn "CommonConvWrapper.vartypes is deprecated, use typeof.(getVariableType.(ccw.fullvariables) instead" maxlog=3
return typeof.(getVariableType.(ccw.fullvariables))
else
return getfield(ccw, f)
end
end

##==============================================================================
## Deprecate code below before v0.35
##==============================================================================

##
1 change: 0 additions & 1 deletion src/IncrementalInference.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ using Dates,
ProgressMeter,
DocStringExtensions,
FunctionalStateMachine,
JSON2,
JSON3,
Combinatorics,
UUIDs,
Expand Down
1 change: 1 addition & 0 deletions src/Serialization/services/DispatchPackedConversions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ function reconstFactorData(
certainhypo = packed.certainhypo,
inflation = packed.inflation,
userCache,
attemptGradients = getSolverParams(dfg).attemptGradients,
# Block recursion if NoSolverParams or if set to not attempt gradients.
_blockRecursion=
getSolverParams(dfg) isa NoSolverParams ||
Expand Down
4 changes: 2 additions & 2 deletions src/Serialization/services/SerializationMKD.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ end
function Base.convert(::Type{String}, mkd::ManifoldKernelDensity)
#
packedMKD = packDistribution(mkd)
return JSON2.write(packedMKD)
return JSON3.write(packedMKD)
end

# Use general dispatch
Expand All @@ -86,7 +86,7 @@ end
# https://discourse.julialang.org/t/converting-string-to-datatype-with-meta-parse/33024/2
# https://discourse.julialang.org/t/is-there-a-way-to-import-modules-with-a-string/15723/6
function Base.convert(::Type{<:ManifoldKernelDensity}, str::AbstractString)
dtr = JSON2.read(str, PackedManifoldKernelDensity)
dtr = JSON3.read(str, PackedManifoldKernelDensity)
return unpackDistribution(dtr)
end

Expand Down
8 changes: 4 additions & 4 deletions src/Serialization/services/SerializingDistributions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -170,18 +170,18 @@ end
# FIXME ON FIRE, must deprecate nested JSON written fields in all serialization
# TODO is string necessary, because unpacking templated e.g. PackedType{T} has problems, see DFG #668
function convert(::Type{String}, dtr::StringThemSamplableBeliefs)
return JSON2.write(packDistribution(dtr))
return JSON3.write(packDistribution(dtr))
end

function convert(::Type{<:SamplableBelief}, str_obj::AbstractString)
#

# go from stringified to generic packed (no type info)
_pck = JSON2.read(str_obj)
_pck = JSON3.read(str_obj)
# NOTE, get the packed type from strong assumption that field `_type` exists in the
T = DFG.getTypeFromSerializationModule(_pck[:_type])
T = DFG.getTypeFromSerializationModule(_pck._type)
# unpack again to described packedType
pckT = JSON2.read(str_obj, T)
pckT = JSON3.read(str_obj, T)

# unpack to regular <:SamplableBelief
return unpackDistribution(pckT)
Expand Down
30 changes: 15 additions & 15 deletions src/entities/FactorOperationalMemory.jl
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ Related
[`CalcFactor`](@ref), [`CalcFactorMahalanobis`](@ref)
"""
struct CommonConvWrapper{
Base.@kwdef struct CommonConvWrapper{
T <: AbstractFactor,
VT <: Tuple,
NTP <: Tuple,
Expand All @@ -101,39 +101,39 @@ struct CommonConvWrapper{
to each hypothesis evaluation event on user function via CalcFactor, #1321 """
varValsAll::NTP
""" dummy cache value to be deep copied later for each of the CalcFactor instances """
dummyCache::CT
dummyCache::CT = nothing
# derived config parameters for this factor
""" Factor manifold definition for frequent use (not the variables manifolds) """
manifold::AM
manifold::AM = getManifold(usrfnc!)
""" Which dimensions does this factor influence. Sensitive (mutable) to both which 'solvefor index' variable and whether the factor is partial dimension """
partialDims::Vector{<:Integer}
partialDims::Vector{<:Integer} = collect(1:manifold_dimension(manifold))
""" is this a partial constraint as defined by the existance of factor field `.partial::Tuple` """
partial::Bool
partial::Bool = false
""" probability that this factor is wholly incorrect and should be ignored during solving """
nullhypo::Float64
nullhypo::Float64 = 0.0
""" inflationSpread particular to this factor (by how much to dispurse the belief initial values before numerical optimization is run). Analogous to stochastic search """
inflation::Float64
inflation::Float64 = 3.0
# multihypo specific field containers for recipe of hypotheses to compute
""" multi hypothesis settings #NOTE no need for a parameter as type is known from `parseusermultihypo` """
hypotheses::HP
hypotheses::HP = nothing
""" categorical to select which hypothesis is being considered during convolution operation """
certainhypo::CH
certainhypo::CH = nothing
""" subsection indices to select which params should be used for this hypothesis evaluation """
activehypo::Vector{Int}
activehypo::Vector{Int} = collect(1:length(varValsAll))
# buffers and indices to point numerical computations to specific memory locations
""" user defined measurement values for each approxConv operation
FIXME make type stable, JT should now be type stable if rest works.
SUPER IMPORTANT, if prior=>point or relative=>tangent, see #1661
can be a Vector{<:Tuple} or more direct Vector{<: pointortangenttype} """
measurement::Vector{MT}
measurement::Vector{MT} = Vector(Vector{Float64}())
""" which index is being solved for in params? """
varidx::Base.RefValue{Int}
varidx::Base.RefValue{Int} = Ref(1)
""" Consolidation from CPT, the actual particle being solved at this moment """
particleidx::Base.RefValue{Int}
particleidx::Base.RefValue{Int} = Ref(1)
""" working memory to store residual for optimization routines """
res::Vector{Float64}
res::Vector{Float64} = zeros(manifold_dimension(manifold))
""" experimental feature to embed gradient calcs with ccw """
_gradients::G
_gradients::G = nothing
end


Expand Down
Loading

0 comments on commit 78d5f54

Please sign in to comment.