Skip to content

Commit

Permalink
more tests and fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
jbrea committed Aug 5, 2019
1 parent c8d3b76 commit 8c29ea7
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 14 deletions.
32 changes: 19 additions & 13 deletions src/MeshAdaptiveDirectSearch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,13 @@ LogMesh(; τ = 4, Δᵐ = 1) = LogMesh(τ, Int(-log(τ, Δᵐ)))
function update!(m::LogMesh, i)
i == 0 && return m
if i > 0
m.neglogΔᵐ == 0 && return m
m.neglogΔᵐ -= 1
else
m.neglogΔᵐ += 1
end
m
end
Δ(m::LogMesh) = 1/m.τ^m.neglogΔᵐ
Δ(m::LogMesh) = min(1, (1/m.τ)^m.neglogΔᵐ)
(m::LogMesh) = m.neglogΔᵐ

####
Expand All @@ -67,7 +66,7 @@ function b(d::LTDirectionGenerator{N}, l::Int) where N
d.b[l] = (i = i,
v = SVector{N}([rand((-1,1)) * (k == i ? 2^l : (2^l - 1)) for k in 1:N]))
end
iterator(g::LTDirectionGenerator, l) = LTDirectionIterator(g, l)
iterator(g::LTDirectionGenerator, l) = LTDirectionIterator(g, max(0, l))

# Implements generation of the positive bias, box on p. 204 from Audet & Dennis 2006
struct LTDirectionIterator{N,Np}
Expand Down Expand Up @@ -250,6 +249,7 @@ end

# poll stage
update!(::Any, ::Any) = nothing
init!(::Any, ::Any) = nothing
poll(m, f, constraints, x, fx) = poll(m, iterator(m.poll, (m.mesh)), Δ(m.mesh), f, constraints, x, fx)
isnewincumbent(m::MADS, x, fx, oldfx) = fx < oldfx ? 1 : -1, x, fx
@inline function poll(m, it, Δᵐ, f, constraints, x, fx)
Expand Down Expand Up @@ -314,18 +314,19 @@ end
Implements (suc, neg) reduction of OrthoMADS.
See Audet et al. 2014.
"""
NegReduction(N) = NegReduction(zeros(N), zeros(N))
NegReduction(N) = NegReduction(zeros(N), fill(NaN, N))
mutable struct OrthoDirectionGenerator{N,R}
reduction::R
t₀::Int
ℓmax::Int
tmax::Int
end
function OrthoDirectionGenerator(N; t0 = N,
reduction = NegReduction(zeros(N), zeros(N)))
function OrthoDirectionGenerator(N; t0 = 2*N,
reduction = NegReduction(N))
OrthoDirectionGenerator{N,typeof(reduction)}(reduction, t0, 0, 0)
end
iterator(g::OrthoDirectionGenerator, l) = OrthoDirectionIterator(g, l)
init!(g::OrthoDirectionGenerator{N,NegReduction}, x) where N = g.reduction.oldincumbent .= x
function update!(g::OrthoDirectionGenerator{N,NegReduction}, x) where N
@. g.reduction.w = x - g.reduction.oldincumbent
@. g.reduction.oldincumbent = x
Expand All @@ -335,19 +336,22 @@ struct OrthoDirectionIterator{N, R}
reduction::R
H::Matrix{Float64}
end
function OrthoDirectionIterator(g::OrthoDirectionGenerator{N,R}, ℓ) where {N,R}
if> g.ℓmax
function determine_t!(g::OrthoDirectionGenerator, ℓ)
if>= g.ℓmax
g.ℓmax =
> g.tmax && (g.tmax = ℓ)
t =+ g.t₀
+ g.t₀ > g.tmax && (g.tmax = + g.t₀)
return+ g.t₀
else
g.tmax += 1
t = g.tmax
return g.tmax
end
end
function OrthoDirectionIterator(g::OrthoDirectionGenerator{N,R}, ℓ) where {N,R}
t = determine_t!(g, ℓ)
u = first(iterate(HaltonIterator{N}(), t))
q = normalized_halton_direction(u, ℓ)
H = scaledhouseholder(q)
if R === NegReduction && sum(abs2, g.reduction.w) == 0
if R === NegReduction && isnan(g.reduction.w[1])
reduction = NoReduction()
else
reduction = g.reduction
Expand All @@ -368,7 +372,8 @@ function iterate(it::OrthoDirectionIterator{N,NegReduction}, state = (1, zeros(N
d = @view(it.H[:, i])
sign = dot(d, it.reduction.w) >= 0 ? 1 : -1
d .*= sign
return d, (i + 1, sumd .- d)
sumd .-= d
return d, (i + 1, sumd)
else
return sumd, (i + 1, sumd)
end
Expand Down Expand Up @@ -411,6 +416,7 @@ function minimize(m::AbstractMADS, f, x0 = zeros(length(x0));
finternal = x -> f(to(x))
cinternal = [x -> c(to(x)) for c in constraints]
incumbent = from(x0)
init!(m.poll, incumbent)
fincumbent = finternal(incumbent)
for k in 1:max_iterations
incumbent, fincumbent, i = search(m, finternal, cinternal, incumbent, fincumbent)
Expand Down
44 changes: 43 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
using Test, MeshAdaptiveDirectSearch
using Test, MeshAdaptiveDirectSearch, Random

@testset "halton" begin
import MeshAdaptiveDirectSearch: haltonnumber,
normalized_halton_direction, scaledhouseholder
# examples from Audet & Dennis 2006
@test 2*haltonnumber(2, 1) - 1 == 0.
@test haltonnumber(5, 6) 6/25
@test haltonnumber(7, 7) 1/49
Expand All @@ -12,4 +13,45 @@ using Test, MeshAdaptiveDirectSearch
q = normalized_halton_direction(u, 3)
@test q == [-1, -1, -2, 0]
@test scaledhouseholder([-1, -2]) == [3 -4; -4 -3]
# example from Abramson et al. 2009, p. 958
u = first(Iterators.drop(MeshAdaptiveDirectSearch.HaltonIterator{4}(), 12))
q = normalized_halton_direction(u, 6)
@test scaledhouseholder(q) == [36 0 -18 -36;
0 54 0 0;
-18 0 36 -36;
-36 0 -36 -18]
end

@testset "orthomads" begin
import MeshAdaptiveDirectSearch: scaledhouseholder,
normalized_halton_direction, LogMesh, ℓ, Δ, determine_t!, update!
mesh = LogMesh()
g = MeshAdaptiveDirectSearch.OrthoDirectionGenerator(4, reduction = NoReduction(), t0 = 7)
# p. 958 from Abramson et al. 2009
for (success, t, Δᵐ) in zip([1, 1, -1, -1, -1, -1, 1, -1, -1],
[7, 8, 9, 10, 7, 8, 9, 11, 9, 10],
[1, 1, 1, 1, 1, 1/4, 1/16, 1/4, 1/16, 1/64])
l = (mesh)
τ = determine_t!(g, l)
@test t == τ
@test Δ(mesh) == Δᵐ
update!(mesh, success)
end
end

@testset "readme" begin
Random.seed!(12418)
f(x) = (1 - exp(-sum(abs2, x))) * max(sum(abs2, x .- [30, 40]), sum(abs2, x .+ [30, 40]))
noisyf(x) = f(x) + .1 * randn()

res = minimize(LtMADS(2), f, [-2.1, 1.7], lowerbound = [-10, -10], upperbound = [10, 10])
@test res.f < 1e-9
res = minimize(LtMADS(2), f, [-2.1, 1.7], lowerbound = [-10, -10], upperbound = [10, 10], constraints = [x -> sum(x) < .5])
@test res.f < 1e-9
res = minimize(OrthoMADS(2), f, [-2.1, 1.7], lowerbound = [-10, -10], upperbound = [10, 10])
@test res.f < 1e-9
res = minimize(RobustLtMADS(2), noisyf, [-2.1, 1.7], lowerbound = [-10, -10], upperbound = [10, 10])
@test res.f < 1e-9
res = minimize(RobustOrthoMADS(2), noisyf, [-2.1, 1.7], lowerbound = [-10, -10], upperbound = [10, 10])
@test res.f < 1e-9
end

0 comments on commit 8c29ea7

Please sign in to comment.