diff --git a/test/bivariate.jl b/test/bivariate.jl index 216c3880..1006ceef 100644 --- a/test/bivariate.jl +++ b/test/bivariate.jl @@ -56,5 +56,11 @@ for X in ([0.0], [0.0,0.0], [0.0,0.5], [-0.5:0.1:0.5;]) @test all(k5.density .>= 0.0) @test sum(k5.density)*step(k5.x)*step(k5.y) ≈ 1.0 + k6 = kde([X X],(r,r);kernel=D, weights=ones(X)/length(X)) + @test k4.density ≈ k6.density end end + +k1 = kde([0.0 0.0; 1.0 1.0], (r,r), bandwidth=(1,1), weights=[0,1]) +k2 = kde([1.0 1.0], (r,r), bandwidth=(1,1)) +@test k1.density ≈ k2.density diff --git a/test/univariate.jl b/test/univariate.jl index 9a07a595..dcefd08d 100644 --- a/test/univariate.jl +++ b/test/univariate.jl @@ -2,7 +2,7 @@ using Base.Test using Distributions using KernelDensity -import KernelDensity: kernel_dist, default_bandwidth, kde_boundary, kde_range, tabulate, UniformWeights +import KernelDensity: kernel_dist, default_bandwidth, kde_boundary, kde_range, tabulate for D in [Normal,Uniform,Logistic] d = kernel_dist(D,0.5) @@ -54,10 +54,10 @@ for X in ([0.0], [0.0,0.0], [0.0,0.5], [-0.5:0.1:0.5;]) @test sum(k5.density)*step(k5.x) ≈ 1.0 k6 = kde(X,r;kernel=D, weights=ones(X)/length(X)) - @test_approx_eq k4.density k6.density + @test k4.density ≈ k6.density end end k1 = kde([0.0, 1.], r, bandwidth=1, weights=[0,1]) k2 = kde([1.], r, bandwidth=1) -@test k1.density == k2.density +@test k1.density ≈ k2.density