Skip to content

Commit

Permalink
Fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mroavi committed Jan 31, 2024
1 parent 72656b5 commit 937e63c
Show file tree
Hide file tree
Showing 14 changed files with 1,166 additions and 279 deletions.
998 changes: 873 additions & 125 deletions test/Manifest.toml

Large diffs are not rendered by default.

8 changes: 8 additions & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[deps]
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
KaHyPar = "2a6221f6-aa48-11e9-3542-2d9e0ef01880"
OMEinsum = "ebe7aa44-baf0-506c-a96f-8464559b3922"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
TensorInference = "c2297e78-99bd-40ad-871d-f50e56b81012"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
40 changes: 20 additions & 20 deletions test/cuda.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,60 +4,60 @@ using TensorInference, CUDA
CUDA.allowscalar(false)

@testset "gradient-based tensor network solvers" begin
################# Load problem ####################
instance = read_uai_problem("Promedus_14")
problem = problem_from_artifact("uai2014", "MAR", "Promedus", 14)
model, evidence, reference_solution = read_model(problem), read_evidence(problem), read_solution(problem)

# does not optimize over open vertices
tn = TensorNetworkModel(instance; optimizer = TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40))
@info contraction_complexity(tn)
tn = TensorNetworkModel(model; optimizer = TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40), evidence)
@debug contraction_complexity(tn)
@time marginals2 = marginals(tn; usecuda = true)
@test all(x -> x isa CuArray, marginals2)
# for dangling vertices, the output size is 1.
npass = 0
for i in 1:(instance.nvars)
npass += (length(marginals2[i]) == 1 && instance.reference_marginals[i] == [0.0, 1]) || isapprox(Array(marginals2[i]), instance.reference_marginals[i]; atol = 1e-6)
for i in 1:(model.nvars)
npass += (length(marginals2[i]) == 1 && reference_solution[i] == [0.0, 1]) || isapprox(Array(marginals2[i]), reference_solution[i]; atol = 1e-6)
end
@test npass == instance.nvars
@test npass == model.nvars
end

@testset "map" begin
################# Load problem ####################
instance = read_uai_problem("Promedus_14")
problem = problem_from_artifact("uai2014", "MAR", "Promedus", 14)
model, evidence = read_model(problem), read_evidence(problem)

# does not optimize over open vertices
tn = TensorNetworkModel(instance; optimizer = TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40))
@info contraction_complexity(tn)
tn = TensorNetworkModel(model; optimizer = TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40), evidence)
@debug contraction_complexity(tn)
most_probable_config(tn)
@time logp, config = most_probable_config(tn; usecuda = true)
@test log_probability(tn, config) logp.n
@test log_probability(tn, config) logp
culogp = maximum_logp(tn; usecuda = true)
@test culogp isa CuArray
@test Array(culogp)[] logp
end

@testset "mmap" begin
################# Load problem ####################
instance = read_uai_problem("Promedus_14")
problem = problem_from_artifact("uai2014", "MAR", "Promedus", 14)
model, evidence = read_model(problem), read_evidence(problem)

optimizer = TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40)
tn_ref = TensorNetworkModel(instance; optimizer)
tn_ref = TensorNetworkModel(model; optimizer, evidence)
# does not marginalize any var
tn = MMAPModel(instance; marginalized = Int[], optimizer)
tn = MMAPModel(model; optimizer, queryvars=collect(1:model.nvars), evidence)
r1, r2 = maximum_logp(tn_ref; usecuda = true), maximum_logp(tn; usecuda = true)
@test r1 isa CuArray
@test r2 isa CuArray
@test r1 r2

# marginalize all vars
tn2 = MMAPModel(instance; marginalized = collect(1:(instance.nvars)), optimizer)
tn2 = MMAPModel(model; optimizer, queryvars=Int[], evidence)
cup = probability(tn_ref; usecuda = true)
culogp = maximum_logp(tn2; usecuda = true)
@test cup isa RescaledArray{T, N, <:CuArray} where {T, N}
@test culogp isa CuArray
@test Array(cup)[] exp(Array(culogp)[].n)
@test Array(cup)[] exp(Array(culogp)[])

# does not optimize over open vertices
tn3 = MMAPModel(instance; marginalized = [2, 4, 6], optimizer)
tn3 = MMAPModel(model; optimizer, queryvars=setdiff(1:model.nvars, [2, 4, 6]), evidence)
logp, config = most_probable_config(tn3; usecuda = true)
@test log_probability(tn3, config) logp.n
@test log_probability(tn3, config) logp
end
91 changes: 0 additions & 91 deletions test/inference.jl

This file was deleted.

25 changes: 25 additions & 0 deletions test/map.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
using Test
using OMEinsum
using TensorInference

@testset "gradient-based tensor network solvers" begin
model = problem_from_artifact("uai2014", "MAR", "Promedus", 14)

# does not optimize over open vertices
tn = TensorNetworkModel(read_model(model);
evidence=read_evidence(model),
optimizer = TreeSA(ntrials = 3, niters = 2, βs = 1:0.1:80))
@debug contraction_complexity(tn)
most_probable_config(tn)
@time logp, config = most_probable_config(tn)
@test log_probability(tn, config) logp
@test maximum_logp(tn)[] logp
end

@testset "UAI Reference Solution Comparison" begin
problem = problem_from_artifact("uai2014", "MAP", "Promedas", 70)
evidence = read_evidence(problem)
tn = TensorNetworkModel(read_model(problem); optimizer = TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100), evidence)
_, solution = most_probable_config(tn)
@test solution == read_solution(problem)
end
70 changes: 70 additions & 0 deletions test/mar.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
using Test
using OMEinsum
using KaHyPar
using TensorInference

@testset "composite number" begin
A = RescaledArray(2.0, [2.0 3.0; 5.0 6.0])
x = RescaledArray(2.0, [2.0, 3.0])
op = ein"ij, j -> i"
@test Array(x) exp(2.0) .* [2.0, 3.0]
@test op(Array(A), Array(x)) Array(op(A, x))
end

@testset "cached, rescaled contract" begin
problem = problem_from_artifact("uai2014", "MAR", "Promedus", 14)
ref_sol = read_solution(problem)
optimizer = TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)
evidence = read_evidence(problem)
tn = TensorNetworkModel(read_model(problem); optimizer, evidence)
p1 = probability(tn; usecuda = false, rescale = false)
p2 = probability(tn; usecuda = false, rescale = true)
@test p1 Array(p2)

# cached contract
xs = TensorInference.adapt_tensors(tn; usecuda = false, rescale = true)
size_dict = OMEinsum.get_size_dict!(getixsv(tn.code), xs, Dict{Int, Int}())
cache = TensorInference.cached_einsum(tn.code, xs, size_dict)
@test cache.content isa RescaledArray
@test Array(cache.content) p1

# compute marginals
ti_sol = marginals(tn)
ref_sol[collect(keys(evidence))] .= fill([1.0], length(evidence)) # imitate dummy vars
@test isapprox(ti_sol, ref_sol; atol = 1e-5)
end

@testset "UAI Reference Solution Comparison" begin
problems = dataset_from_artifact("uai2014")["MAR"]
problem_sets = [
#("Alchemy", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)),
#("CSP", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)),
#("DBN", KaHyParBipartite(sc_target = 25)),
#("Grids", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)), # greedy also works
#("linkage", TreeSA(ntrials = 3, niters = 20, βs = 0.1:0.1:40)), # linkage_15 fails
#("ObjectDetection", TreeSA(ntrials = 1, niters = 5, βs = 1:0.1:100)),
("Pedigree", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)), # greedy also works
#("Promedus", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)), # greedy also works
#("relational", TreeSA(ntrials=1, niters=5, βs=0.1:0.1:100)),
("Segmentation", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)) # greedy also works
]

for (problem_set_name, optimizer) in problem_sets
@testset "$(problem_set_name) problem set" begin
for (id, problem) in problems[problem_set_name]
@info "Testing: $(problem_set_name)_$id"
tn = TensorNetworkModel(read_model(problem); optimizer, evidence=read_evidence(problem))
ref_sol = read_solution(problem)
evidence = read_evidence(problem)

# does not optimize over open vertices
sc = contraction_complexity(tn).sc
sc > 28 && error("space complexity too large! got $(sc)")
@debug contraction_complexity(tn)
ti_sol = marginals(tn)
ref_sol[collect(keys(evidence))] .= fill([1.0], length(evidence)) # imitate dummy vars
@test isapprox(ti_sol, ref_sol; atol = 1e-4)
end
end
end
end
16 changes: 0 additions & 16 deletions test/maxprob.jl

This file was deleted.

47 changes: 33 additions & 14 deletions test/mmap.jl
Original file line number Diff line number Diff line change
@@ -1,31 +1,50 @@
using Test
using OMEinsum
# using TensorInference
using TensorInference

@testset "clustering" begin
ixs = [[1, 2, 3], [2, 3, 4], [4, 5, 6]]
@test TensorInference.connected_clusters(ixs, [2, 3, 6]) == [[2, 3] => [1, 2], [6] => [3]]
end

@testset "mmap" begin
################# Load problem ####################
instance = read_uai_problem("Promedus_14")
@testset "gradient-based tensor network solvers" begin
problem = problem_from_artifact("uai2014", "MAR", "Promedus", 14)
model, evidence = read_model(problem), read_evidence(problem)

optimizer = TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40)
tn_ref = TensorNetworkModel(instance; optimizer)
# does not marginalize any var
mmap = MMAPModel(instance; marginalized = Int[], optimizer)
@info(mmap)
tn_ref = TensorNetworkModel(model; optimizer, evidence)

# Does not marginalize any var
mmap = MMAPModel(model; optimizer, queryvars=collect(1:model.nvars), evidence)
@debug(mmap)
@test maximum_logp(tn_ref) maximum_logp(mmap)

# marginalize all vars
mmap2 = MMAPModel(instance; marginalized = collect(1:(instance.nvars)), optimizer)
@info(mmap2)
# Marginalize all vars
mmap2 = MMAPModel(model; optimizer, queryvars=Int[], evidence)
@debug(mmap2)
@test Array(probability(tn_ref))[] exp(maximum_logp(mmap2)[])

# does not optimize over open vertices
mmap3 = MMAPModel(instance; marginalized = [2, 4, 6], optimizer)
@info(mmap3)
# Does not optimize over open vertices
mmap3 = MMAPModel(model; optimizer, queryvars=setdiff(1:model.nvars, [2, 4, 6]), evidence)
@debug(mmap3)
logp, config = most_probable_config(mmap3)
@test log_probability(mmap3, config) logp
end

@testset "UAI Reference Solution Comparison" begin
problem_sets = dataset_from_artifact("uai2014")["MMAP"]
problems = [
("Segmentation", 12, TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40)),
# ("Segmentation", 13, TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40)), # fails!
# ("Segmentation", 14, TreeSA(ntrials = 1, niters = 2, βs = 1:0.1:40)) # fails!
]
for (problem_set_name, id, optimizer) in problems
@testset "$(problem_set_name) problem set, id = $id" begin
problem = problem_sets[problem_set_name][id]
@info "Testing: $(problem_set_name)_$id"
model = MMAPModel(read_model(problem); optimizer, evidence=read_evidence(problem), queryvars=read_queryvars(problem))
_, solution = most_probable_config(model)
@test solution == read_solution(problem)
end
end
end
30 changes: 30 additions & 0 deletions test/pr.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
using Test
using OMEinsum
using KaHyPar
using TensorInference

@testset "UAI Reference Solution Comparison" begin
problems = dataset_from_artifact("uai2014")["PR"]
problem_sets = [
#("Alchemy", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)), # fails
#("CSP", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)),
#("DBN", KaHyParBipartite(sc_target = 25)),
#("Grids", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)), # fails
#("linkage", TreeSA(ntrials = 3, niters = 20, βs = 0.1:0.1:40)), # fails
#("ObjectDetection", TreeSA(ntrials = 1, niters = 5, βs = 1:0.1:100)),
("Pedigree", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)),
#("Promedus", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100)),
#("relational", TreeSA(ntrials=1, niters=5, βs=0.1:0.1:100)), # fails
("Segmentation", TreeSA(ntrials = 1, niters = 5, βs = 0.1:0.1:100))
]
for (problem_set_name, optimizer) in problem_sets
@testset "$(problem_set_name) problem set" begin
for (id, problem) in problems[problem_set_name]
@info "Testing: $(problem_set_name)_$id"
tn = TensorNetworkModel(read_model(problem); optimizer, evidence=read_evidence(problem))
solution = probability(tn) |> first |> log10
@test isapprox(solution, read_solution(problem); atol = 1e-3)
end
end
end
end
Loading

0 comments on commit 937e63c

Please sign in to comment.