Skip to content
This repository has been archived by the owner on Feb 3, 2020. It is now read-only.

Commit

Permalink
Merge branch 'release/v0.2'
Browse files Browse the repository at this point in the history
  • Loading branch information
KristofferC committed Feb 3, 2015
2 parents 2cc2442 + 8779d7f commit 62ab5b3
Show file tree
Hide file tree
Showing 8 changed files with 81 additions and 24 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ notifications:
email: false
script:
- julia -e 'Pkg.init(); Pkg.clone(pwd())'
- julia -e 'using KDtree; @assert isdefined(:KDtree); @assert typeof(KDtree) === Module'
#- julia -e 'using KDtree; @assert isdefined(:KDtree); @assert typeof(KDtree) === Module'
- julia -e 'Pkg.add("FactCheck");'
- julia -e 'Pkg.add("ArrayViews");'
- julia -e 'Pkg.test("KDtree", coverage=true)'

after_success:
Expand Down
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ Since this is a new project there are still some obvious improvements which are
## Author
Kristoffer Carlsson (@KristofferC)

## Requirements

Currently the "ArrayViews" package is required because `sub` in v0.3 of Julia
is quite bad.

## Examples

In the examples, notice that the module is called `KDtree` and the actual tree type is called `KDTree`. This is because modules and types can currently not have the same name in Julia.
Expand Down
10 changes: 9 additions & 1 deletion benchmark/bench_build_tree.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,15 @@ end
println(times)

#=
2015-02-02:
2015-02-03: (with ArrayViews)
[1.3373e-5 4.7584e-5 0.000489836 0.005438274 0.063624952
7.775e-6 5.4737e-5 0.000640676 0.007319247 0.083582606
8.087e-6 8.8015e-5 0.000739576 0.008121957 0.124985771
8.086e-6 6.5001e-5 0.000745174 0.00840684 0.119056416
8.397e-6 6.8733e-5 0.000790892 0.024885876 0.140038217
1.0574e-5 7.5575e-5 0.000862734 0.009941973 0.131831977]
2015-02-02: (with new select!)
[1.6794e-5 6.0335e-5 0.000620148 0.007069198 0.103463751
1.4306e-5 7.8995e-5 0.000901299 0.010201042 0.15665534
1.555e-5 8.5528e-5 0.000956037 0.011532464 0.162782185
Expand Down
8 changes: 8 additions & 0 deletions benchmark/bench_knn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,14 @@ end
println(times)

#=
2015-02-03: ArrayViews:
[1.3996e-5 2.1771e-5 5.1316e-5 4.4474e-5
2.0837e-5 2.5502e-5 5.9402e-5 6.8732e-5
3.2034e-5 4.1364e-5 8.7393e-5 8.2106e-5
8.5838e-5 8.7394e-5 0.000183183 0.000170121
0.000156437 0.000143063 0.000305409 0.000290792
0.000695723 0.001162545 0.001638387 0.001731067]
2015-02-02:
[2.3015e-5 2.1771e-5 7.0288e-5 6.0958e-5
2.7368e-5 3.5143e-5 8.957e-5 7.7752e-5
Expand Down
7 changes: 7 additions & 0 deletions benchmark/bench_query_ball.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ end
println(times)

#=
2015-02-03: ArrayViews + no sqrt
[1.1196e-5 1.9593e-5 7.5885e-5
1.5239e-5 4.1986e-5 0.000167011
1.9905e-5 7.8374e-5 0.00041986
2.6125e-5 0.000124713 0.000603665]
2015-02-03:
[2.1149e-5 3.2966e-5 8.3661e-5
2.146e-5 5.1628e-5 0.000229523
Expand Down
2 changes: 2 additions & 0 deletions src/KDtree.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
module KDtree

using ArrayViews

export KDTree
export k_nearest_neighbour, query_ball_point

Expand Down
55 changes: 39 additions & 16 deletions src/kd_tree.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
# Todo: update to mikowski, p = 1, inf
function euclidean_distance{T <: FloatingPoint}(point_1::Array{T, 1},
point_2::Array{T, 1})
# Not returning sqrt of ans.
function euclidean_distance{T <: FloatingPoint}(point_1::AbstractVector{T},
point_2::AbstractVector{T})
dist = 0.0
for i in 1:size(point_1, 1)
dist += (point_1[i] - point_2[i]) * (point_1[i] - point_2[i])
end
return sqrt(dist)
return dist
end

# Hyper rectangles are used to bound points in space.
Expand All @@ -29,12 +30,13 @@ function split_hyper_rec{T <: FloatingPoint}(hyper_rec::HyperRectangle,
new_min[dim] = value

return HyperRectangle(hyper_rec.mins, new_max),
HyperRectangle(new_min, hyper_rec.maxes)
HyperRectangle(new_min, hyper_rec.maxes)
end


# From a hyper rectangle we can find the minimum and maximum distance to a point.
# If the point is inside the hyper cube the minimum dist is 0
# We do not return the sqrt here.
function get_min_max_distance{T <: FloatingPoint}(rec::HyperRectangle, point::Vector{T})
min_d = zero(T)
max_d = zero(T)
Expand All @@ -46,7 +48,7 @@ function get_min_max_distance{T <: FloatingPoint}(rec::HyperRectangle, point::Ve
end
max_d += max(d1,d2)
end
return sqrt(min_d), sqrt(max_d)
return min_d, max_d
end


Expand All @@ -68,7 +70,7 @@ get_parent_node(idx::Int) = div(idx, 2)
get_point_index(tree::KDTree, idx::Int) = idx - tree.n_internal_nodes

# From node index -> point in data
get_point(tree::KDTree, idx::Int) = tree.data[: , tree.indices[get_point_index(tree, idx)]]
get_point(tree::KDTree, idx::Int) = view(tree.data, :, tree.indices[get_point_index(tree, idx)])

is_leaf_node(tree::KDTree, idx::Int) = idx > tree.n_internal_nodes

Expand All @@ -77,6 +79,14 @@ is_leaf_node(tree::KDTree, idx::Int) = idx > tree.n_internal_nodes
function KDTree{T <: FloatingPoint}(data::Matrix{T})

n_dim, n_points = size(data)

if n_dim > 20
warn(string("You are sending in data with a large dimension, n_dim = ", n_dim,
". K-d trees are not optimal for high dimensional data.",
" The data matrix should be given in dimensions (n_dim, n_points).",
" Did you acidentally flip them?"))
end

n_internal_nodes = n_points - 1
n_total_nodes = n_internal_nodes + n_points

Expand All @@ -103,7 +113,7 @@ function KDTree{T <: FloatingPoint}(data::Matrix{T})
hyper_recs[1] = HyperRectangle(mins, maxes)

# Call the recursive KDTree builder
build_KDTree(1, data, sub(perm,1:length(perm)), split_vals,
build_KDTree(1, data, view(perm,1:length(perm)), split_vals,
split_dims, hyper_recs, n_internal_nodes, indices)

KDTree(data, split_vals, split_dims,
Expand All @@ -118,7 +128,7 @@ function KDTree{T <: FloatingPoint}(data::Matrix{T})
# with the new cubes and node indices.
function build_KDTree{T <: FloatingPoint}(index::Int,
data::Matrix{T},
perm::SubArray{Int,1},
perm::AbstractVector,
split_vals::Vector{T},
split_dims::Vector{Int8},
hyper_recs::Vector{HyperRectangle},
Expand Down Expand Up @@ -179,20 +189,27 @@ function build_KDTree{T <: FloatingPoint}(index::Int,
hyper_recs[get_left_node(index)] = hyper_rec_1
hyper_recs[get_right_node(index)] = hyper_rec_2

build_KDTree(get_left_node(index), data, sub(perm, 1:mid_idx),
build_KDTree(get_left_node(index), data, view(perm, 1:mid_idx),
split_vals, split_dims, hyper_recs, n_internal_nodes, indices )

build_KDTree(get_right_node(index), data, sub(perm, mid_idx+1:length(perm)),
build_KDTree(get_right_node(index), data, view(perm, mid_idx+1:length(perm)),
split_vals, split_dims, hyper_recs, n_internal_nodes, indices )
end


# Finds the k nearest neighbour to a given point in space.
function k_nearest_neighbour{T <: FloatingPoint}(tree::KDTree, point::Array{T, 1}, k::Int)

if k > size(tree.data, 2) || k < 0
error("k > number of points in tree or < 0")
if k > size(tree.data, 2) || k <= 0
error("k > number of points in tree or <= 0")
end

if size(point,1) != size(tree.data, 1)
error(string("Wrong dimension of input point, points in the tree",
" have dimension ", size(tree.data, 1), " you",
" gave a point with dimension ", size(point,1), "."))
end

best_idxs = [-1 for i in 1:k]
best_dists = [typemax(T) for i in 1:k]

Expand All @@ -201,7 +218,7 @@ function k_nearest_neighbour{T <: FloatingPoint}(tree::KDTree, point::Array{T, 1
# Convert from indices in tree to indices in data
true_indices = [tree.indices[get_point_index(tree, x)] for x in best_idxs]

return true_indices, best_dists
return true_indices, sqrt(best_dists)
end


Expand Down Expand Up @@ -248,9 +265,15 @@ function query_ball_point{T <: FloatingPoint}(tree::KDTree,
point::Vector{T},
radius::T)

if size(point,1) != size(tree.data, 1)
error(string("Wrong dimension of input point, points in the tree",
" have dimension ", size(tree.data, 1), " you",
" gave a point with dimension ", size(point,1), "."))
end

index = 1
idx_in_ball = Int[]
traverse_check(tree, index, point, radius, idx_in_ball)
traverse_check(tree, index, point, radius^2 , idx_in_ball)
return idx_in_ball
end

Expand All @@ -265,7 +288,7 @@ function traverse_check{T <: FloatingPoint}(tree::KDTree,
min_d, max_d = get_min_max_distance(tree.hyper_recs[index], point)
if min_d > r # Hyper shpere is outside hyper rectangle, skip the whole sub tree
return
elseif (max_d < r)
elseif max_d < r
traverse_no_check(tree, index, idx_in_ball)
elseif is_leaf_node(tree, index)
if euclidean_distance(get_point(tree, index), point) < r
Expand All @@ -280,7 +303,7 @@ end


# Adds everything in this subtree since we have determined
#that the hyper rectangle completely encloses the hyper sphere
# that the hyper rectangle completely encloses the hyper sphere
function traverse_no_check(tree::KDTree, index::Int, idx_in_ball::Vector{Int})
if is_leaf_node(tree, index)
push!(idx_in_ball, tree.indices[get_point_index(tree, index)])
Expand Down
15 changes: 9 additions & 6 deletions test/test_kd_tree.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ facts("KDtree") do

# 8 node rectangle
data = [0.0 0.0 0.0 0.5 0.5 1.0 1.0 1.0;
0.0 0.5 1.0 0.0 1.0 0.0 0.5 1.0]
0.0 0.5 1.0 0.0 1.0 0.0 0.5 1.0]
tree = KDTree(data)

idxs, dists = k_nearest_neighbour(tree, [0.8, 0.8], 1)
Expand All @@ -30,15 +30,16 @@ facts("KDtree") do
idxs, dists = k_nearest_neighbour(tree, [0.1, 0.8], 3)
@fact idxs => [3, 2, 5]

@fact_throws k_nearest_neighbour(tree, [0.1, 0.8], 10) # k > n_points

@fact_throws k_nearest_neighbour(tree, [0.1, 0.8], 10) # k > n_points
@fact_throws k_nearest_neighbour(tree, [0.1], 10) # n_dim != trees dim
end #context

context("KDtree.ball_query") do

data = [0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0;
0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0;
0.0 1.0 0.0 1.0 0.0 1.0 0.0 1.0] # 8 node cube
0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0;
0.0 1.0 0.0 1.0 0.0 1.0 0.0 1.0] # 8 node cube

tree = KDTree(data)

Expand All @@ -48,11 +49,13 @@ facts("KDtree") do
idxs = query_ball_point(tree, [0.0, 0.0, 0.5], 0.6)
@fact idxs => [1, 2] # Corner 1 and 2 at least 0.6 distance away from [0.0, 0.0, 0.5]

idxs = query_ball_point(tree, [0.5, 0.5, 0.5], 0.2)
idxs = query_ball_point(tree, [0.5, 0.5, 0.5], 0.2)
@fact idxs => [] #

idxs = query_ball_point(tree, [0.5, 0.5, 0.5], 1.0)
idxs = query_ball_point(tree, [0.5, 0.5, 0.5], 1.0)
@fact idxs => [1, 2, 3, 4, 5, 6, 7, 8] #

@fact_throws query_ball_poin(tree, [0.1], 1.0) # n_dim != trees dim
end #context

context("KDtree.yolo_testing") do
Expand Down

0 comments on commit 62ab5b3

Please sign in to comment.