Skip to content

Commit

Permalink
get_optimal() test and fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
metelkin committed Oct 18, 2024
1 parent 46106be commit 69cbedf
Show file tree
Hide file tree
Showing 5 changed files with 164 additions and 40 deletions.
10 changes: 5 additions & 5 deletions src/get_optimal.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
fill((-Inf, Inf), length(theta_init)),
scale
),
scan_tol::Float64 = 1e-3,
scan_tol::union{Float64,Nothing} = nothing,
loss_tol::Float64 = 1e-3,
local_alg::Symbol = :LN_NELDERMEAD,
silent::Bool = false,
Expand Down Expand Up @@ -50,8 +50,8 @@ function get_optimal(
fill((-Inf, Inf), length(theta_init)),
scale
),
scan_tol::Float64 = 1e-3,
loss_tol::Float64 = 1e-3,
scan_tol::Union{Float64,Nothing} = nothing,
loss_tol::Float64 = 0.,
local_alg::Symbol = :LN_NELDERMEAD,
silent::Bool = false,
max_iter::Int = 10^5,
Expand Down Expand Up @@ -139,8 +139,8 @@ function get_optimal(
end

opt = Opt(local_alg, n_theta)
opt.ftol_abs = loss_tol
opt.xtol_abs = scan_tol
loss_tol !== 0. && (opt.ftol_abs = loss_tol)
scan_tol !== nothing && (opt.xtol_abs = scan_tol)
opt.min_objective = loss_func_g
opt.maxeval = max_iter

Expand Down
124 changes: 93 additions & 31 deletions test/cases_func.jl
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@

f_1p(x) = 5.0 + (x[1]-3.0)^2 # [100., missing, missing, missing]
f_1p(x) = 5.0 + (x[1]-3.0)^2 # [3.]

f_2p_1im(x) = 5.0 + (x[1]-3.0)^2 + 0.0*x[2] # [3., missing, missing, missing]
f_2p_1im(x) = 5.0 + (x[1]-3.0)^2 + 0.0*x[2] # [3., missing]

f_2p(x) = 5.0 + (x[1]-3.0)^2 + (x[2]-4.0)^2 # [3., 4., missing, missing]
f_2p(x) = 5.0 + (x[1]-3.0)^2 + (x[2]-4.0)^2 # [3., 4.]

f_3p_1im(x) = 5.0 + (x[1]-3.0)^2 + (x[2]/x[3]-4.0)^2 # [3., missing, missing, missing]
f_3p_1im(x) = 5.0 + (x[1]-3.0)^2 + (x[2]/x[3]-4.0)^2 # [3., missing, missing]

f_3p_1im_dep(x) = 5.0 + (x[1]-3.0)^2 + (x[1]-x[2]-1.0)^2 + 0*x[3]^2 # [3., 2., missing]

f_4p_2im(x) = 5.0 + (x[1]-3.0)^2 + (x[2]-4.0)^2 + 0.0*x[3] + 0.0*x[4] # [3., 4., missing, missing]

f_4p_3im(x) = 5.0 + (x[1]-3.0)^2 + (x[2]/x[3]-4.0)^2 + 0.0*x[4] # [3., missing, missing, missing]

f_1p_ex(x) = 5.0 + (x[1]-1e-8)^2 # [1e-8, missing, missing, missing]
f_1p_ex(x) = 5.0 + (x[1]-1e-8)^2 # [1e-8, missing]

f_5p_3im(x) = 5.0 + (x[1]-3.0)^2 + (exp(x[2])-1.0)^2 + (x[3]/x[4]-4.0)^2 + 0.0*x[5] # [3., 0., missing, missing, missing]

f_3p_im(x) = 5.0 + (x[1]-3.0)^2 + (exp(x[2])-1.0)^2 + 0.0*x[3] # [3.0, 0., missing]

# test each alg on all functions
function test_alg(
# test each algorithm on get_interval()
function test_alg_interval(
alg::NamedTuple,
func_dict::AbstractDict = test_funcs;
bounds::Tuple{Float64,Float64} = (-Inf,Inf),
Expand Down Expand Up @@ -63,63 +63,115 @@ function test_alg(
end
end

# test algorithms from the list on get_optimal()
function test_alg_optimal(
alg::NamedTuple;
bounds::Tuple{Float64,Float64} = (-Inf,Inf),
scale::Symbol = :direct,
scan_tol::Union{Float64,Nothing} = nothing,
loss_tol::Float64 = 0.,
)
@testset "get_optimal() for $(alg.algorithm)" begin
for (f_name, f) in test_funcs
should_skip = f_name in alg.skip
@testset "Case $f_name" begin
result = get_optimal(
f.x0,
f.func;
theta_bounds = fill(bounds, length(f.x0)),
scale = fill(scale, length(f.x0)),
scan_tol = scan_tol,
loss_tol = loss_tol,
local_alg = alg.algorithm,
silent = true
)
#println("Result: ", result)
# check loss_tol
if loss_tol !== 0.
@test result.ret == :FTOL_REACHED skip = should_skip
@test isapprox(result.loss, f.loss_optim, atol = loss_tol * 10.) skip = should_skip
end

if scan_tol !== nothing
for i in eachindex(f.x0)
if f.x_optim !== nothing
@test result.ret == :XTOL_REACHED skip = should_skip
#@test isapprox(result.params[i], f.x_optim[i], atol = scan_tol * 10) skip = should_skip
end
end
end

end
end
end
end

# functions dict
test_funcs = Dict(
:f_1p => (
func = f_1p,
x0 = [3.,1.],
endpoints = [(1.,5.),(nothing,nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
x0 = [2.],
endpoints = [(1.,5.)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL)],
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3.]
),

:f_2p_1im => (
func = f_2p_1im,
x0 = [3.,1.],
endpoints = [(1.,5.),
(nothing,nothing)],
x0 = [4.,1.],
endpoints = [(1.,5.),(nothing,nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., nothing]
),

:f_2p => (
func = f_2p,
x0 = [3.,4.],
x0 = [4.,5.],
endpoints = [(1.,5.),
(2.,6.)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., 4.]
),

:f_3p_1im => (
func = f_3p_1im,
x0 = [3.,4.,1.1],
x0 = [4.,4.,1.1],
endpoints = [(1.,5.),
(nothing,nothing),
(nothing,nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., nothing, nothing]
),

:f_3p_1im_dep => (
func = f_3p_1im_dep,
x0 = [3., 2., 2.1],
x0 = [4., 3., 2.1],
endpoints = [(1.,5.),
(2.0-2.0*sqrt(2.),2.0+2.0*sqrt(2.)),
(nothing,nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., 2., nothing]
),

:f_4p_2im => (
func = f_4p_2im,
x0 = [3.,4.,1.,1.],
x0 = [4.,5.,1.,1.],
endpoints = [(1.,5.),
(2.,6.),
(nothing,nothing),
Expand All @@ -128,12 +180,14 @@ test_funcs = Dict(
(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., 4., nothing, nothing]
),

:f_4p_3im => (
func = f_4p_3im,
x0 = [3.,4.,1.1,1.1],
x0 = [4.,4.,1.1,1.1],
endpoints = [(1.,5.),
(nothing,nothing),
(nothing,nothing),
Expand All @@ -142,20 +196,24 @@ test_funcs = Dict(
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., nothing, nothing, nothing]
),

:f_1p_ex => (
func = f_1p_ex,
x0 = [1.5, 2.],
endpoints = [(-2+1e-8,2+1e-8), (nothing, nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [1e-8, nothing]
),

:f_5p_3im => (
func = f_5p_3im,
x0 = [3., 0.5, 8., 2., 2.],
x0 = [4., 0.5, 8., 2., 2.],
endpoints = [(1.,5.),
(nothing,log(3)),
(nothing,nothing),
Expand All @@ -166,18 +224,22 @@ test_funcs = Dict(
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., nothing, nothing, nothing, nothing]
),

:f_3p_im => (
func = f_3p_im,
x0 = [3.,1.,1,],
x0 = [4.,0.5,1,],
endpoints = [(1.,5.),
(nothing,log(3)),
(nothing,nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.
loss_crit = 9.,
loss_optim = 5.,
x_optim = [3., 0., nothing]
)
)
)
6 changes: 4 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -39,5 +39,7 @@ println("Starting tests for get_optimal")

# experimental tests

@testset "testing derivative-free algorithms" begin include("test_deriv_free_algs.jl") end
@testset "gradient-based algorithms" begin include("test_grad_algs.jl") end
#@testset "testing derivative-free algorithms" begin include("test_deriv_free_algs.jl") end
#@testset "gradient-based algorithms" begin include("test_grad_algs.jl") end

@testset "get_optimal series" begin include("test_get_optimal_series.jl") end
60 changes: 60 additions & 0 deletions test/test_get_optimal_series.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
all_algorithms_loss = [
# good
(algorithm = :LN_NELDERMEAD, skip = [:f_1p, :f_2p_1im]), # Nelder Mead
# unstable
(algorithm = :LN_PRAXIS, skip = [:f_1p]), # "PRAXIS" gradient-free local optimization via the "principal-axis method"
(algorithm = :LN_SBPLX, skip = []), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
# errors
(algorithm = :LN_NEWUOA, skip = [:f_1p,:f_4p_2im,:f_3p_im,:f_3p_1im,:f_3p_1im_dep,:f_1p_ex,:f_4p_3im,:f_2p_1im,:f_2p]),
(algorithm = :LN_BOBYQA, skip = [:f_1p,:f_3p_1im,:f_4p_2im,:f_1p_ex,:f_4p_3im,:f_5p_3im,:f_2p_1im,:f_2p]), # BOBYQA algorithm for bound constrained optimization without derivatives
(algorithm = :LN_COBYLA, skip = []), # Constrained Optimization BY Linear Approximations
]
all_algorithms_loss_log = [
# good
(algorithm = :LN_NELDERMEAD, skip = [:f_1p,:f_2p_1im]), # Nelder Mead
# unstable
(algorithm = :LN_PRAXIS, skip = [:f_1p]), # "PRAXIS" gradient-free local optimization via the "principal-axis method"
(algorithm = :LN_SBPLX, skip = []), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
# errors
(algorithm = :LN_NEWUOA, skip = [:f_1p,:f_4p_2im,:f_3p_im]), # Unconstrained Optimization BY Quadratic Approximations
(algorithm = :LN_BOBYQA, skip = [:f_1p_ex,:f_3p_im,:f_5p_3im]), # BOBYQA algorithm for bound constrained optimization without derivatives
(algorithm = :LN_COBYLA, skip = [:f_1p_ex,:f_3p_im]), # Constrained Optimization BY Linear Approximations
]
all_algorithms_scan = [
# good
(algorithm = :LN_NELDERMEAD, skip = []), # Nelder Mead
# unstable
(algorithm = :LN_PRAXIS, skip = [:f_1p]), # "PRAXIS" gradient-free local optimization via the "principal-axis method"
(algorithm = :LN_SBPLX, skip = []), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
# errors
(algorithm = :LN_NEWUOA, skip = [:f_1p,:f_4p_2im,:f_3p_im,:f_3p_1im_dep,:f_1p_ex,:f_2p_1im,:f_2p,:f_5p_3im]),
(algorithm = :LN_BOBYQA, skip = [:f_3p_im]), # BOBYQA algorithm for bound constrained optimization without derivatives
(algorithm = :LN_COBYLA, skip = []), # Constrained Optimization BY Linear Approximations
]
all_algorithms_scan_log = [
# good
(algorithm = :LN_NELDERMEAD, skip = []), # Nelder Mead
# unstable
(algorithm = :LN_PRAXIS, skip = [:f_1p]), # "PRAXIS" gradient-free local optimization via the "principal-axis method"
(algorithm = :LN_SBPLX, skip = []), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
# errors
(algorithm = :LN_NEWUOA, skip = [:f_1p,:f_3p_1im,:f_3p_1im_dep,:f_4p_2im,:f_1p_ex,:f_2p_1im,:f_2p,:f_3p_im]),
(algorithm = :LN_BOBYQA, skip = [:f_3p_1im,:f_3p_1im_dep,:f_3p_im,:f_4p_3im,:f_5p_3im]), # BOBYQA algorithm for bound constrained optimization without derivatives
(algorithm = :LN_COBYLA, skip = []), # Constrained Optimization BY Linear Approximations
]

@testset "loss" begin
[test_alg_optimal(alg; loss_tol = 1e-3) for alg in all_algorithms_loss]
end

@testset ":log" begin
[test_alg_optimal(alg; loss_tol = 1e-3, scale = :log, bounds = (0.,Inf)) for alg in all_algorithms_loss_log]
end

@testset "scan" begin
[test_alg_optimal(alg; scan_tol = 1e-4) for alg in all_algorithms_scan]
end

@testset "scan :log" begin
[test_alg_optimal(alg; scan_tol = 1e-4, scale = :log, bounds = (0.,Inf)) for alg in all_algorithms_scan_log]
end
4 changes: 2 additions & 2 deletions test/test_grad_algs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ grad_algorithms_autodiff = [
(algorithm = :LD_VAR1, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]) # Shifted limited-memory variable-metric (rank 1)
]

[test_alg(alg; bounds=(-1e10,1e10), loss_grad=:AUTODIFF) for alg in grad_algorithms_autodiff]
[test_alg_interval(alg; bounds=(-1e10,1e10), loss_grad=:AUTODIFF) for alg in grad_algorithms_autodiff]

grad_algorithms_finite = [
# good
Expand All @@ -34,4 +34,4 @@ grad_algorithms_finite = [
(algorithm = :LD_VAR1, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]) # Shifted limited-memory variable-metric (rank 1)
]

[test_alg(alg; bounds=(-1e10,1e10), loss_grad=:FINITE) for alg in grad_algorithms_finite]
[test_alg_interval(alg; bounds=(-1e10,1e10), loss_grad=:FINITE) for alg in grad_algorithms_finite]

0 comments on commit 69cbedf

Please sign in to comment.