-
Notifications
You must be signed in to change notification settings - Fork 41
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
New benchmark #339
New benchmark #339
Changes from all commits
5512168
9d7e479
a23838c
cecc2ca
a4bf77e
99cb471
4ef1dd7
7fcf166
9f26d83
ee551c2
d90fed0
3091c08
546be06
5060c84
0925a2b
7d7acea
2a55909
f7e1435
6e334bc
77893eb
8fa2d42
056b125
7514294
3d94b8e
0cc7148
d32dbc3
3645923
2a51798
30a0b6d
377d242
a0f6396
07529fa
1d8bb8e
4db49ce
649bd55
02764d5
93528a4
96fb14f
fc9b060
dbf0bb2
565a9fb
92db97e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,280 @@ | ||
using Revise | ||
using Optim, Manopt | ||
using Manifolds | ||
using LineSearches | ||
using LinearAlgebra | ||
|
||
using Profile | ||
using ProfileView | ||
using BenchmarkTools | ||
using Plots | ||
using ManoptExamples | ||
using ImprovedHagerZhangLinesearch | ||
|
||
norm_inf(M::AbstractManifold, p, X) = norm(X, Inf) | ||
|
||
function f_rosenbrock(x) | ||
result = 0.0 | ||
for i in 1:2:length(x) | ||
result += (1.0 - x[i])^2 + 100.0 * (x[i + 1] - x[i]^2)^2 | ||
end | ||
return result | ||
end | ||
function f_rosenbrock(::AbstractManifold, x) | ||
return f_rosenbrock(x) | ||
end | ||
|
||
optimize(f_rosenbrock, [0.0, 0.0], Optim.NelderMead()) | ||
|
||
function g_rosenbrock!(storage, x) | ||
for i in 1:2:length(x) | ||
storage[i] = -2.0 * (1.0 - x[i]) - 400.0 * (x[i + 1] - x[i]^2) * x[i] | ||
storage[i + 1] = 200.0 * (x[i + 1] - x[i]^2) | ||
end | ||
return storage | ||
end | ||
|
||
optimize(f_rosenbrock, g_rosenbrock!, [0.0, 0.0], LBFGS()) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes; this will be removed or commented when preparing final version. |
||
|
||
function g_rosenbrock!(M::AbstractManifold, storage, x) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This could maybe be called |
||
g_rosenbrock!(storage, x) | ||
if isnan(x[1]) | ||
error("nan") | ||
end | ||
riemannian_gradient!(M, storage, x, storage) | ||
return storage | ||
end | ||
|
||
M = Euclidean(2) | ||
Manopt.NelderMead(M, f_rosenbrock) | ||
|
||
qn_opts = quasi_Newton( | ||
M, | ||
f_rosenbrock, | ||
g_rosenbrock!, | ||
[0.0, 0.0]; | ||
evaluation=InplaceEvaluation(), | ||
return_state=true, | ||
) | ||
|
||
function test_f(f, g!, x0, N::Int) | ||
M = Euclidean(N) | ||
return quasi_Newton(M, f, g!, x0; evaluation=InplaceEvaluation(), return_state=true) | ||
end | ||
|
||
function prof() | ||
N = 32 | ||
x0 = zeros(N) | ||
test_f(f_rosenbrock, g_rosenbrock!, x0, N) | ||
|
||
Profile.clear() | ||
@profile for i in 1:100000 | ||
test_f(f_rosenbrock, g_rosenbrock!, x0, N) | ||
end | ||
return ProfileView.view() | ||
end | ||
|
||
to_optim_manifold(::Manifolds.Sphere) = Optim.Sphere() | ||
to_optim_manifold(::Euclidean) = Optim.Flat() | ||
|
||
abstract type AbstractOptimConfig end | ||
struct ManoptQN <: AbstractOptimConfig end | ||
|
||
function benchmark_time_state( | ||
::ManoptQN, | ||
M::AbstractManifold, | ||
N, | ||
f, | ||
g!, | ||
x0, | ||
stepsize::Manopt.Stepsize, | ||
mem_len::Int, | ||
gtol::Real; | ||
kwargs..., | ||
) | ||
manopt_sc = StopWhenGradientNormLess(gtol; norm=norm_inf) | StopAfterIteration(1000) | ||
mem_len = min(mem_len, manifold_dimension(M)) | ||
bench_manopt = @benchmark quasi_Newton( | ||
$M, | ||
$f, | ||
$g!, | ||
$x0; | ||
stepsize=$(stepsize), | ||
evaluation=$(InplaceEvaluation()), | ||
memory_size=$mem_len, | ||
stopping_criterion=$(manopt_sc), | ||
debug=[], | ||
$kwargs..., | ||
) | ||
manopt_state = quasi_Newton( | ||
kellertuer marked this conversation as resolved.
Show resolved
Hide resolved
|
||
M, | ||
f, | ||
g!, | ||
x0; | ||
stepsize=stepsize, | ||
evaluation=InplaceEvaluation(), | ||
return_state=true, | ||
memory_size=mem_len, | ||
stopping_criterion=manopt_sc, | ||
debug=[], | ||
kwargs..., | ||
) | ||
iters = get_count(manopt_state, :Iterations) | ||
final_val = f(M, manopt_state.p) | ||
return median(bench_manopt.times) / 1000, iters, final_val | ||
end | ||
|
||
struct OptimQN <: AbstractOptimConfig end | ||
|
||
function benchmark_time_state( | ||
::OptimQN, | ||
M::AbstractManifold, | ||
N, | ||
f, | ||
g!, | ||
x0, | ||
stepsize, | ||
mem_len::Int, | ||
gtol::Real; | ||
kwargs..., | ||
) | ||
mem_len = min(mem_len, manifold_dimension(M)) | ||
options_optim = Optim.Options(; g_tol=gtol) | ||
method_optim = LBFGS(; m=mem_len, linesearch=stepsize, manifold=to_optim_manifold(M)) | ||
|
||
bench_optim = @benchmark optimize($f, $g!, $x0, $method_optim, $options_optim) | ||
|
||
optim_state = optimize(f, g!, x0, method_optim, options_optim) | ||
iters = optim_state.iterations | ||
final_val = optim_state.minimum | ||
return median(bench_optim.times) / 1000, iters, final_val | ||
end | ||
|
||
function generate_cmp( | ||
problem_for_N; | ||
mem_len::Int=2, | ||
manifold_constructors=[ | ||
("Euclidean", N -> Euclidean(N)), ("Sphere", N -> Manifolds.Sphere(N - 1)) | ||
], | ||
gtol::Real=1e-5, | ||
N_vals=[2^n for n in 1:3:16], | ||
) | ||
plt = plot() | ||
xlabel!(plt, "dimension") | ||
ylabel!(plt, "time [ms]") | ||
title!(plt, "Optimization times") | ||
|
||
ls_hz = LineSearches.HagerZhang() | ||
|
||
for (manifold_name, manifold_constructor) in manifold_constructors | ||
times_manopt = Float64[] | ||
times_optim = Float64[] | ||
|
||
println("Benchmarking for gtol=$gtol on $manifold_name") | ||
for N in N_vals | ||
f, g! = problem_for_N(N) | ||
println("Benchmarking for N=$N, f=$(typeof(f))") | ||
M = manifold_constructor(N) | ||
x0 = zeros(N) | ||
x0[1] = 1 | ||
manopt_time, manopt_iters, manopt_obj = benchmark_time_state( | ||
ManoptQN(), | ||
M, | ||
N, | ||
f, | ||
g!, | ||
x0, | ||
HagerZhangLinesearch(M; sigma=0.5), | ||
mem_len, | ||
gtol; | ||
retraction_method=ProjectionRetraction(), | ||
vector_transport_method=ProjectionTransport(), | ||
) | ||
|
||
push!(times_manopt, manopt_time) | ||
println("Manopt.jl time: $(manopt_time) ms") | ||
println("Manopt.jl iterations: $(manopt_iters)") | ||
println("Manopt.jl objective: $(manopt_obj)") | ||
|
||
optim_time, optim_iters, optim_obj = benchmark_time_state( | ||
OptimQN(), M, N, f, g!, x0, ls_hz, mem_len, gtol | ||
) | ||
println("Optim.jl time: $(optim_time) ms") | ||
push!(times_optim, optim_time) | ||
println("Optim.jl iterations: $(optim_iters)") | ||
println("Optim.jl objective: $(optim_obj)") | ||
end | ||
plot!( | ||
plt, | ||
N_vals, | ||
times_manopt; | ||
label="Manopt.jl ($manifold_name)", | ||
xaxis=:log, | ||
yaxis=:log, | ||
) | ||
plot!( | ||
plt, | ||
N_vals, | ||
times_optim; | ||
label="Optim.jl ($manifold_name)", | ||
xaxis=:log, | ||
yaxis=:log, | ||
) | ||
end | ||
xticks!(plt, N_vals, string.(N_vals)) | ||
|
||
return plt | ||
end | ||
|
||
# generate_cmp(N -> (f_rosenbrock, g_rosenbrock!), mem_len=4) | ||
|
||
function generate_rayleigh_problem(N::Int) | ||
A = Symmetric(randn(N, N) / N) | ||
f_manopt = ManoptExamples.RayleighQuotientCost(A) | ||
g_manopt! = ManoptExamples.RayleighQuotientGrad!!(A) | ||
M = Manifolds.Sphere(N - 1) | ||
f_ret(x) = f_manopt(M, x) | ||
f_ret(M::AbstractManifold, x) = f_manopt(M, x) | ||
g_ret!(storage, x) = g_manopt!(M, storage, x) | ||
g_ret!(M::AbstractManifold, storage, x) = g_manopt!(M, storage, x) | ||
return (f_ret, g_ret!) | ||
end | ||
# generate_cmp(generate_rayleigh_problem, manifold_names=[:Sphere], mem_len=4, N_vals=[10, 100, 1000]) | ||
|
||
function test_case_manopt() | ||
N = 4000 | ||
mem_len = 4 | ||
M = Manifolds.Sphere(N - 1; parameter=:field) | ||
ls_hz = LineSearches.HagerZhang() | ||
|
||
x0 = zeros(N) | ||
x0[1] = 1 | ||
manopt_sc = StopWhenGradientNormLess(1e-6; norm=norm_inf) | StopAfterIteration(1000) | ||
|
||
return quasi_Newton( | ||
M, | ||
f_rosenbrock, | ||
g_rosenbrock!, | ||
x0; | ||
#stepsize=Manopt.LineSearchesStepsize(ls_hz), | ||
stepsize=HagerZhangLinesearch(M), | ||
evaluation=InplaceEvaluation(), | ||
vector_transport_method=ProjectionTransport(), | ||
return_state=true, | ||
memory_size=mem_len, | ||
stopping_criterion=manopt_sc, | ||
) | ||
end | ||
|
||
function test_case_optim() | ||
N = 4 | ||
mem_len = 2 | ||
ls_hz = LineSearches.HagerZhang() | ||
method_optim = LBFGS(; m=mem_len, linesearch=ls_hz, manifold=Optim.Flat()) | ||
options_optim = Optim.Options(; g_tol=1e-6) | ||
|
||
x0 = zeros(N) | ||
x0[1] = 0 | ||
optim_state = optimize(f_rosenbrock, g_rosenbrock!, x0, method_optim, options_optim) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Similar to the previous all, this could also be one line just the |
||
return optim_state | ||
end |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This number has to be adapted.