diff --git a/Changelog.md b/Changelog.md index c00a6f9d48..762bdb9656 100644 --- a/Changelog.md +++ b/Changelog.md @@ -5,6 +5,12 @@ All notable Changes to the Julia package `Manopt.jl` will be documented in this The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [Unreleased] + +### Added + +* add `Manopt.JuMP_Optimizer` implementing JuMP's solver interface + ## [0.4.41] - 02/11/2023 ### Changed @@ -370,4 +376,4 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * the problem now contains a * `AbstractManoptSolverState` replaces `Options` * `random_point(M)` is replaced by `rand(M)` from `ManifoldsBase.jl -* `random_tangent(M, p)` is replaced by `rand(M; vector_at=p)` \ No newline at end of file +* `random_tangent(M, p)` is replaced by `rand(M; vector_at=p)` diff --git a/Project.toml b/Project.toml index a7b784d23e..23aa3674b4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "Manopt" uuid = "0fc0a36d-df90-57f3-8f93-d78a9fc72bb5" authors = ["Ronny Bergmann "] -version = "0.4.41" +version = "0.4.42" [deps] ColorSchemes = "35d6a980-a343-548e-a6ea-1d62b119f2f4" @@ -22,12 +22,14 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [weakdeps] +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LRUCache = "8ac3fa9e-de4c-5943-b1dc-09c6b5f20637" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e" Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" [extensions] +ManoptJuMPExt = ["JuMP"] ManoptLRUCacheExt = "LRUCache" ManoptLineSearchesExt = "LineSearches" ManoptManifoldsExt = ["Manifolds"] @@ -39,6 +41,7 @@ ColorTypes = "0.9.1, 0.10, 0.11" Colors = "0.11.2, 0.12" DataStructures = "0.17, 0.18" Dates = "1.6" +JuMP = "1.15" LinearAlgebra = "1.6" LRUCache = "1.4" ManifoldDiff = "0.3.8" @@ -56,6 +59,7 @@ julia = "1.6" [extras] ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LRUCache = "8ac3fa9e-de4c-5943-b1dc-09c6b5f20637" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e" @@ -63,4 +67,4 @@ Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test", "ForwardDiff", "Manifolds", "Plots", "LineSearches", "LRUCache"] +test = ["Test", "ForwardDiff", "JuMP", "Manifolds", "Plots", "LineSearches", "LRUCache"] diff --git a/Readme.md b/Readme.md index fb1ff0c82b..7692ed5b44 100644 --- a/Readme.md +++ b/Readme.md @@ -41,6 +41,7 @@ The following packages are related to `Manopt.jl` * [`Manifolds.jl`](https://juliamanifolds.github.io/Manifolds.jl/stable/) – a library of manifolds implemented using [`ManifoldsBase.jl`](https://juliamanifolds.github.io/ManifoldsBase.jl/stable/) :octocat: [GitHub repository](https://github.com/JuliaManifolds/Manifolds.jl) * [`ManifoldsDiff.jl`](https://juliamanifolds.github.io/ManifoldDiff.jl/stable/) – a package to use (Euclidean) AD tools on manifolds, that also provides several differentials and gradients. :octocat: [GitHub repository](https://github.com/JuliaManifolds/ManifoldDiff.jl) +* [`JuMP.jl`](https://jump.dev/) can be used as interface to solve an optimization problem with Manopt. See [usage examples](https://manoptjl.org/stable/extensions/). :octocat: [GitHub repository](https://github.com/jump-dev/JuMP.jl) ## Citation diff --git a/docs/Project.toml b/docs/Project.toml index 0152f6625a..bb30cdda67 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -9,6 +9,7 @@ DocumenterCitations = "daee34ce-89f3-4625-b898-19384cb65244" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" Images = "916415d5-f1e6-5110-898d-aaa5f9f070e0" JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819" +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LRUCache = "8ac3fa9e-de4c-5943-b1dc-09c6b5f20637" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" diff --git a/docs/make.jl b/docs/make.jl index 234b23dbdc..922bb75995 100755 --- a/docs/make.jl +++ b/docs/make.jl @@ -65,7 +65,7 @@ end # (c) load necessary packages for the docs using Documenter using DocumenterCitations -using LineSearches, LRUCache, Manopt, Manifolds, Plots +using JuMP, LineSearches, LRUCache, Manopt, Manifolds, Plots # (d) add contributing.md to docs generated_path = joinpath(@__DIR__, "src") @@ -112,6 +112,11 @@ makedocs(; ), modules=[ Manopt, + if isdefined(Base, :get_extension) + Base.get_extension(Manopt, :ManoptJuMPExt) + else + Manopt.ManoptJuMPExt + end, if isdefined(Base, :get_extension) Base.get_extension(Manopt, :ManoptLineSearchesExt) else diff --git a/docs/src/extensions.md b/docs/src/extensions.md index 7906b64c5f..ad5755d396 100644 --- a/docs/src/extensions.md +++ b/docs/src/extensions.md @@ -53,3 +53,54 @@ mid_point Manopt.max_stepsize(::TangentBundle, ::Any) Manopt.max_stepsize(::FixedRankMatrices, ::Any) ``` + +## JuMP.jl + +Manopt can be used using the [JuMP.jl](https://github.com/jump-dev/JuMP.jl) interface. +The manifold is provided in the `@variable` macro. Note that until now, only variables (points on manifolds) are supported, that are arrays, i.e. especially structs do not yet work. +The algebraic expression of the objective function is specified in the `@objective` macro. +The `descent_state_type` attribute specifies the solver. + +```julia +using JuMP, Manopt, Manifolds +model = Model(Manopt.Optimizer) +# Change the solver with this option, `GradientDescentState` is the default +set_attribute("descent_state_type", GradientDescentState) +@variable(model, U[1:2, 1:2] in Stiefel(2, 2), start = 1.0) +@objective(model, Min, sum((A - U) .^ 2)) +optimize!(model) +solution_summary(model) +``` + +```@docs +Manopt.JuMP_ArrayShape +Manopt.JuMP_VectorizedManifold +MOI.dimension(::Manopt.JuMP_VectorizedManifold) +Manopt.JuMP_Optimizer +MOI.empty!(::Manopt.JuMP_Optimizer) +MOI.supports(::Manopt.JuMP_Optimizer, ::MOI.RawOptimizerAttribute) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.RawOptimizerAttribute) +MOI.set(::Manopt.JuMP_Optimizer, ::MOI.RawOptimizerAttribute, ::Any) +MOI.supports_incremental_interface(::Manopt.JuMP_Optimizer) +MOI.copy_to(::Manopt.JuMP_Optimizer, ::MOI.ModelLike) +MOI.supports_add_constrained_variables(::Manopt.JuMP_Optimizer, ::Type{<:Manopt.JuMP_VectorizedManifold}) +MOI.add_constrained_variables(::Manopt.JuMP_Optimizer, ::Manopt.JuMP_VectorizedManifold) +MOI.is_valid(model::Manopt.JuMP_Optimizer, ::MOI.VariableIndex) +MOI.get(model::Manopt.JuMP_Optimizer, ::MOI.NumberOfVariables) +MOI.supports(::Manopt.JuMP_Optimizer, ::MOI.VariablePrimalStart, ::Type{MOI.VariableIndex}) +MOI.set(::Manopt.JuMP_Optimizer, ::MOI.VariablePrimalStart, ::MOI.VariableIndex, ::Union{Real,Nothing}) +MOI.set(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveSense, ::MOI.OptimizationSense) +MOI.set(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveFunction{F}, ::F) where {F} +MOI.supports(::Manopt.JuMP_Optimizer, ::Union{MOI.ObjectiveSense,MOI.ObjectiveFunction}) +JuMP.build_variable(::Function, ::Any, ::Manopt.AbstractManifold) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.ResultCount) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.SolverName) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveValue) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.PrimalStatus) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.DualStatus) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.TerminationStatus) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.SolverVersion) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.ObjectiveSense) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.VariablePrimal, ::MOI.VariableIndex) +MOI.get(::Manopt.JuMP_Optimizer, ::MOI.RawStatusString) +``` diff --git a/ext/ManoptJuMPExt.jl b/ext/ManoptJuMPExt.jl new file mode 100644 index 0000000000..d18d6cb39b --- /dev/null +++ b/ext/ManoptJuMPExt.jl @@ -0,0 +1,481 @@ +module ManoptJuMPExt + +using Manopt +using LinearAlgebra +if isdefined(Base, :get_extension) + using JuMP: JuMP +else + # imports need to be relative for Requires.jl-based workflows: + # https://github.com/JuliaArrays/ArrayInterface.jl/pull/387 + using ..JuMP: JuMP +end +const MOI = JuMP.MOI +using ManifoldsBase +using ManifoldDiff + +function __init__() + # So that the user can use the convenient `Manopt.JuMP_Optimizer` + if isdefined(Base, :setglobal!) + setglobal!(Manopt, :JuMP_Optimizer, Optimizer) + setglobal!(Manopt, :JuMP_VectorizedManifold, VectorizedManifold) + setglobal!(Manopt, :JuMP_ArrayShape, ArrayShape) + else + Manopt.eval(:(const JuMP_Optimizer = $Optimizer)) + Manopt.eval(:(const JuMP_VectorizedManifold = $VectorizedManifold)) + Manopt.eval(:(const JuMP_ArrayShape = $ArrayShape)) + end + return nothing +end + +struct VectorizedManifold{M<:ManifoldsBase.AbstractManifold} <: MOI.AbstractVectorSet + manifold::M +end + +""" + MOI.dimension(set::VectorizedManifold) + +Return the representation side of points on the (vectorized in representation) manifold. +As the MOI variables are real, this means if the [`representation_size`](https://juliamanifolds.github.io/ManifoldsBase.jl/stable/functions/#ManifoldsBase.representation_size-Tuple{AbstractManifold}) yields (in product) `n`, this refers to the vectorized point / tangent vector from (a subset of ``\\mathbb R^n``). +""" +function MOI.dimension(set::VectorizedManifold) + return prod(ManifoldsBase.representation_size(set.manifold)) +end + +mutable struct Optimizer <: MOI.AbstractOptimizer + # Manifold in which all the decision variables leave + manifold::Union{Nothing,ManifoldsBase.AbstractManifold} + # Description of the problem in Manopt + problem::Union{Nothing,Manopt.AbstractManoptProblem} + # State of the optimizer + state::Union{Nothing,Manopt.AbstractManoptSolverState} + # Starting value for each variable + variable_primal_start::Vector{Union{Nothing,Float64}} + # Sense of the optimization, e.g., min, max or no objective + sense::MOI.OptimizationSense + # Model used to compute gradient of the objective function with AD + nlp_model::MOI.Nonlinear.Model + # Solver parameters set with `MOI.RawOptimizerAttribute` + options::Dict{String,Any} + function Optimizer() + return new( + nothing, + nothing, + nothing, + Union{Nothing,Float64}[], + MOI.FEASIBILITY_SENSE, + MOI.Nonlinear.Model(), + Dict{String,Any}(DESCENT_STATE_TYPE => Manopt.GradientDescentState), + ) + end +end + +""" + MOI.get(::Optimizer, ::MOI.SolverVersion) + +Return the version of the Manopt solver, it corresponds to the version of +Manopt.jl. +""" +MOI.get(::Optimizer, ::MOI.SolverVersion) = "0.4.37" + +function MOI.is_empty(model::Optimizer) + return isnothing(model.manifold) && + isempty(model.variable_primal_start) && + MOI.is_empty(model.nlp_model) && + model.sense == MOI.FEASIBILITY_SENSE +end + +""" + MOI.empty!(model::ManoptJuMPExt.Optimizer) + +Clear all model data from `model` but keep the `options` set. +""" +function MOI.empty!(model::Optimizer) + model.manifold = nothing + model.problem = nothing + model.state = nothing + empty!(model.variable_primal_start) + model.sense = MOI.FEASIBILITY_SENSE + MOI.empty!(model.nlp_model) + return nothing +end + +""" + MOI.supports(::Optimizer, attr::MOI.RawOptimizerAttribute) + +Return a `Bool` indicating whether `attr.name` is a valid option name +for `Manopt`. +""" +function MOI.supports(::Optimizer, ::MOI.RawOptimizerAttribute) + # FIXME Ideally, this should only return `true` if it is a valid keyword argument for + # one of the `...DescentState()` constructors. Is there an easy way to check this ? + # Does it depend on the different solvers ? + return true +end + +""" + MOI.get(model::Optimizer, attr::MOI.RawOptimizerAttribute) + +Return last `value` set by `MOI.set(model, attr, value)`. +""" +function MOI.get(model::Optimizer, attr::MOI.RawOptimizerAttribute) + return model.options[attr.name] +end + +""" + MOI.get(model::Optimizer, attr::MOI.RawOptimizerAttribute) + +Set the value for the keyword argument `attr.name` to give for the constructor +`model.options[DESCENT_STATE_TYPE]`. +""" +function MOI.set(model::Optimizer, attr::MOI.RawOptimizerAttribute, value) + model.options[attr.name] = value + return nothing +end + +""" + MOI.get(::Optimizer, ::MOI.SolverName) + +Return the name of the `Optimizer` with the value of +the `descent_state_type` option. +""" +function MOI.get(model::Optimizer, ::MOI.SolverName) + return "Manopt with $(model.options[DESCENT_STATE_TYPE])" +end + +""" + MOI.supports_incremental_interface(::JuMP_Optimizer) + +Return `true` indicating that `Manopt.JuMP_Optimizer` implements +`MOI.add_constrained_variables` and `MOI.set` for +`MOI.ObjectiveFunction` so it can be used with [`JuMP.direct_model`](https://jump.dev/JuMP.jl/stable/api/JuMP/#JuMP.direct_model) +and does not require a `MOI.Utilities.CachingOptimizer`. +See [`MOI.supports_incremental_interface`](https://jump.dev/JuMP.jl/stable/moi/reference/models/#MathOptInterface.supports_incremental_interface). +""" +MOI.supports_incremental_interface(::Optimizer) = true + +""" + MOI.copy_to(dest::Optimizer, src::MOI.ModelLike) + +Because `supports_incremental_interface(dest)` is `true` we can simply +use `MOI.Utilities.default_copy_to`. This copies the variables with +`MOI.add_constrained_variables` and the objective sense with `MOI.set`. +` +""" +function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike) + return MOI.Utilities.default_copy_to(dest, src) +end + +""" + MOI.supports_add_constrained_variables(::JuMP_Optimizer, ::Type{<:VectorizedManifold}) + +Return `true` indicating that `Manopt.JuMP_Optimizer` support optimization on +variables constrained to belong in a vectorized manifold [`Manopt.JuMP_VectorizedManifold`](@ref). +""" +function MOI.supports_add_constrained_variables(::Optimizer, ::Type{<:VectorizedManifold}) + return true +end + +""" + MOI.add_constrained_variables(model::Optimizer, set::VectorizedManifold) + +Add `MOI.dimension(set)` variables constrained in `set` and return the list +of variable indices that can be used to reference them as well a constraint +index for the constraint enforcing the membership of the variables in the +[`Manopt.JuMP_VectorizedManifold`](@ref) `set`. +""" +function MOI.add_constrained_variables(model::Optimizer, set::VectorizedManifold) + F = MOI.VectorOfVariables + if !isnothing(model.manifold) + throw( + MOI.AddConstraintNotAllowed{F,typeof(set)}( + "Only one manifold allowed, variables in `$(model.manifold)` have already been added.", + ), + ) + end + model.manifold = set.manifold + model.problem = nothing + model.state = nothing + n = MOI.dimension(set) + v = MOI.VariableIndex.(1:n) + for _ in 1:n + push!(model.variable_primal_start, nothing) + end + return v, MOI.ConstraintIndex{F,typeof(set)}(1) +end + +""" + MOI.is_valid(model::Optimizer, vi::MOI.VariableIndex) + +Return whether `vi` is a valid variable index. +""" +function MOI.is_valid(model::Optimizer, vi::MOI.VariableIndex) + return !isnothing(model.manifold) && + 1 <= vi.value <= MOI.dimension(VectorizedManifold(model.manifold)) +end + +""" + MOI.get(model::Optimizer, ::MOI.NumberOfVariables) + +Return the number of variables added in the model, this corresponds +to the [`MOI.dimension`](@ref) of the [`Manopt.JuMP_VectorizedManifold`](@ref). +""" +function MOI.get(model::Optimizer, ::MOI.NumberOfVariables) + if isnothing(model.manifold) + return 0 + else + return MOI.dimension(VectorizedManifold(model.manifold)) + end +end + +""" + MOI.supports(::Manopt.JuMP_Optimizer, attr::MOI.RawOptimizerAttribute) + +Return `true` indicating that `Manopt.JuMP_Optimizer` supports starting values +for the variables. +""" +function MOI.supports(::Optimizer, ::MOI.VariablePrimalStart, ::Type{MOI.VariableIndex}) + return true +end + +""" + function MOI.set( + model::Optimizer, + ::MOI.VariablePrimalStart, + vi::MOI.VariableIndex, + value::Union{Real,Nothing}, + ) + +Set the starting value of the variable of index `vi` to `value`. Note that if +`value` is `nothing` then it essentially unset any previous starting values set +and hence `MOI.optimize!` unless another starting value is set. +""" +function MOI.set( + model::Optimizer, + ::MOI.VariablePrimalStart, + vi::MOI.VariableIndex, + value::Union{Real,Nothing}, +) + MOI.throw_if_not_valid(model, vi) + model.variable_primal_start[vi.value] = value + model.state = nothing + return nothing +end + +""" + MOI.supports(::Optimizer, ::Union{MOI.ObjectiveSense,MOI.ObjectiveFunction}) + +Return `true` indicating that `Optimizer` supports being set the objective +sense (that is, min, max or feasibility) and the objective function. +""" +function MOI.supports(::Optimizer, ::Union{MOI.ObjectiveSense,MOI.ObjectiveFunction}) + return true +end + +""" + MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense) + +Modify the objective sense to either `MOI.MAX_SENSE`, `MOI.MIN_SENSE` or +`MOI.FEASIBILITY_SENSE`. +""" +function MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense) + model.sense = sense + return nothing +end + +""" + MOI.set(model::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense) + +Return the objective sense, defaults to `MOI.FEASIBILITY_SENSE` if no sense has +already been set. +""" +MOI.get(model::Optimizer, ::MOI.ObjectiveSense) = model.sense + +""" + MOI.set(model::Optimizer, ::MOI.ObjectiveFunction{F}, func::F) where {F} + +Set the objective function as `func` for `model`. +""" +function MOI.set(model::Optimizer, ::MOI.ObjectiveFunction{F}, func::F) where {F} + nl = convert(MOI.ScalarNonlinearFunction, func) + MOI.Nonlinear.set_objective(model.nlp_model, nl) + model.problem = nothing + model.state = nothing + return nothing +end + +# Name of the attribute for the type of the descent state to be used as follows: +# ```julia +# set_attribute(model, "descent_state_type", Manopt.TrustRegionsState) +# ``` +const DESCENT_STATE_TYPE = "descent_state_type" + +function MOI.optimize!(model::Optimizer) + start = Float64[ + if isnothing(model.variable_primal_start[i]) + error("No starting value specified for `$i`th variable.") + else + model.variable_primal_start[i] + end for i in eachindex(model.variable_primal_start) + ] + backend = MOI.Nonlinear.SparseReverseMode() + vars = [MOI.VariableIndex(i) for i in eachindex(model.variable_primal_start)] + evaluator = MOI.Nonlinear.Evaluator(model.nlp_model, backend, vars) + MOI.initialize(evaluator, [:Grad]) + function eval_f_cb(M, x) + if model.sense == MOI.FEASIBILITY_SENSE + return 0.0 + end + obj = MOI.eval_objective(evaluator, JuMP.vectorize(x, _shape(model.manifold))) + if model.sense == MOI.MAX_SENSE + obj = -obj + end + return obj + end + function eval_grad_f_cb(M, X) + x = JuMP.vectorize(X, _shape(model.manifold)) + grad_f = zeros(length(x)) + if model.sense == MOI.FEASIBILITY_SENSE + grad_f .= zero(eltype(grad_f)) + else + MOI.eval_objective_gradient(evaluator, grad_f, x) + end + if model.sense == MOI.MAX_SENSE + LinearAlgebra.rmul!(grad_f, -1) + end + reshaped_grad_f = JuMP.reshape_vector(grad_f, _shape(model.manifold)) + return ManifoldDiff.riemannian_gradient(model.manifold, X, reshaped_grad_f) + end + mgo = Manopt.ManifoldGradientObjective(eval_f_cb, eval_grad_f_cb) + dmgo = decorate_objective!(model.manifold, mgo) + model.problem = DefaultManoptProblem(model.manifold, dmgo) + reshaped_start = JuMP.reshape_vector(start, _shape(model.manifold)) + descent_state_type = model.options[DESCENT_STATE_TYPE] + kws = Dict{Symbol,Any}( + Symbol(key) => value for (key, value) in model.options if key != DESCENT_STATE_TYPE + ) + s = descent_state_type(model.manifold, reshaped_start; kws...) + model.state = decorate_state!(s) + solve!(model.problem, model.state) + return nothing +end + +struct ArrayShape{N} <: JuMP.AbstractShape + size::NTuple{N,Int} +end + +function JuMP.vectorize(array::Array{T,N}, ::ArrayShape{M}) where {T,N,M} + return vec(array) +end + +function JuMP.reshape_vector(vector::Vector, shape::ArrayShape) + return reshape(vector, shape.size) +end + +function _shape(m::ManifoldsBase.AbstractManifold) + return ArrayShape(ManifoldsBase.representation_size(m)) +end + +""" + JuMP.build_variable(::Function, func, m::ManifoldsBase.AbstractManifold) + +Build a `JuMP.VariablesConstrainedOnCreation` object containing variables +and the [`Manopt.JuMP_VectorizedManifold`](@ref) in which they should belong as well as the +`shape` that can be used to go from the vectorized MOI representation to the +shape of the manifold, e.g., [`Manopt.JuMP_ArrayShape`](@ref). +""" +function JuMP.build_variable(::Function, func, m::ManifoldsBase.AbstractManifold) + shape = _shape(m) + return JuMP.VariablesConstrainedOnCreation( + JuMP.vectorize(func, shape), VectorizedManifold(m), shape + ) +end + +""" + MOI.get(model::Optimizer, ::MOI.ResultCount) + +Return `MOI.OPTIMIZE_NOT_CALLED` if `optimize!` hasn't been called yet and +`MOI.LOCALLY_SOLVED` otherwise indicating that the solver has solved the +problem to local optimality the the value of `MOI.RawStatusString` for more +details on why the solver stopped. +""" +function MOI.get(model::Optimizer, ::MOI.TerminationStatus) + if isnothing(model.state) + return MOI.OPTIMIZE_NOT_CALLED + else + return MOI.LOCALLY_SOLVED + end +end + +""" + MOI.get(model::Optimizer, ::MOI.ResultCount) + +Return `0` if `optimize!` hasn't been called yet and +`1` otherwise indicating that one solution is available. +""" +function MOI.get(model::Optimizer, ::MOI.ResultCount) + if isnothing(model.state) + return 0 + else + return 1 + end +end + +""" + MOI.get(model::Optimizer, ::MOI.PrimalStatus) + +Return `MOI.NO_SOLUTION` if `optimize!` hasn't been called yet and +`MOI.FEASIBLE_POINT` otherwise indicating that a solution is available +to query with `MOI.VariablePrimalStart`. +""" +function MOI.get(model::Optimizer, ::MOI.PrimalStatus) + if isnothing(model.state) + return MOI.NO_SOLUTION + else + return MOI.FEASIBLE_POINT + end +end + +""" + MOI.get(::Optimizer, ::MOI.DualStatus) + +Returns `MOI.NO_SOLUTION` indicating that there is no dual solution +available. +""" +MOI.get(::Optimizer, ::MOI.DualStatus) = MOI.NO_SOLUTION + +""" + MOI.get(model::Optimizer, ::MOI.RawStatusString) + +Return a `String` containing `Manopt.get_reason` without the ending newline +character. +""" +function MOI.get(model::Optimizer, ::MOI.RawStatusString) + # `strip` removes the `\n` at the end and returns an `AbstractString` + # Since MOI wants a `String`, we pass it through `string` + return string(strip(get_reason(model.state))) +end + +""" + MOI.get(model::Optimizer, attr::MOI.ObjectiveValue) + +Return the value of the objective function evaluated at the solution. +""" +function MOI.get(model::Optimizer, attr::MOI.ObjectiveValue) + MOI.check_result_index_bounds(model, attr) + solution = Manopt.get_solver_return(model.state) + return get_cost(model.problem, solution) +end + +""" + MOI.get(model::Optimizer, attr::MOI.VariablePrimal, vi::MOI.VariableIndex) + +Return the value of the solution for the variable of index `vi`. +""" +function MOI.get(model::Optimizer, attr::MOI.VariablePrimal, vi::MOI.VariableIndex) + MOI.check_result_index_bounds(model, attr) + MOI.throw_if_not_valid(model, vi) + solution = Manopt.get_solver_return(get_objective(model.problem), model.state) + return solution[vi.value] +end + +end # module diff --git a/src/Manopt.jl b/src/Manopt.jl index 83a99a2dc3..71cd5e2f74 100644 --- a/src/Manopt.jl +++ b/src/Manopt.jl @@ -183,11 +183,50 @@ include("helpers/LineSearchesTypes.jl") include("data/artificialDataFunctions.jl") include("deprecated.jl") +""" + Manopt.JuMP_Optimizer() + +Creates a new optimizer object for the [MathOptInterface](https://jump.dev/MathOptInterface.jl/) (MOI). +An alias `Manopt.JuMP_Optimizer` is defined for convenience. + +The minimization of a function `f(X)` of an an array `X[1:n1,1:n2,...]` +over a manifold `M` starting at `X0`, can be modeled as follows: +```julia +using JuMP +model = Model(Manopt.JuMP_Optimizer) +@variable(model, X[i1=1:n1,i2=1:n2,...] in M, start = X0[i1,i2,...]) +@objective(model, Min, f(X)) +``` +The optimizer assumes that `M` has a `Array` shape described +by `ManifoldsBase.representation_size`. +""" +global JuMP_Optimizer + +""" + struct VectorizedManifold{M} <: MOI.AbstractVectorSet + manifold::M + end + +Representation of points of `manifold` as a vector of `R^n` where `n` is +`MOI.dimension(VectorizedManifold(manifold))`. +""" +global JuMP_VectorizedManifold + +""" + struct ArrayShape{N} <: JuMP.AbstractShape + +Shape of an `Array{T,N}` of size `size`. +""" +global JuMP_ArrayShape + function __init__() # # Requires fallback for Julia < 1.9 # @static if !isdefined(Base, :get_extension) + @require JuMP = "4076af6c-e467-56ae-b986-b466b2749572" begin + include("../ext/ManoptJuMPExt.jl") + end @require Manifolds = "1cead3c2-87b3-11e9-0ccd-23c62b72b94e" begin include("../ext/ManoptManifoldsExt/ManoptManifoldsExt.jl") end diff --git a/test/MOI_wrapper.jl b/test/MOI_wrapper.jl new file mode 100644 index 0000000000..3a76c4dd2f --- /dev/null +++ b/test/MOI_wrapper.jl @@ -0,0 +1,106 @@ +using Test +using LinearAlgebra +using Manifolds +using Manopt +using JuMP + +function test_sphere() + model = Model(Manopt.JuMP_Optimizer) + start = normalize(1:3) + @variable(model, x[i=1:3] in Sphere(2), start = start[i]) + + @objective(model, Min, sum(x)) + @test MOI.get(unsafe_backend(model), MOI.ResultCount()) == 0 + optimize!(model) + @test MOI.get(unsafe_backend(model), MOI.NumberOfVariables()) == 3 + @test termination_status(model) == MOI.LOCALLY_SOLVED + @test primal_status(model) == MOI.FEASIBLE_POINT + @test primal_status(model) == MOI.FEASIBLE_POINT + @test dual_status(model) == MOI.NO_SOLUTION + @test objective_value(model) ≈ -√3 + @test value.(x) ≈ -inv(√3) * ones(3) rtol = 1e-2 + @test raw_status(model) isa String + @test raw_status(model)[end] != '\n' + + @objective(model, Max, sum(x)) + set_start_value.(x, start) + optimize!(model) + @test objective_value(model) ≈ -√3 + @test value.(x) ≈ inv(√3) * ones(3) rtol = 1e-2 + @test raw_status(model) isa String + @test raw_status(model)[end] != '\n' + + @objective(model, Min, sum(xi^4 for xi in x)) + set_start_value.(x, start) + optimize!(model) + @test objective_value(model) ≈ 1 / 3 rtol = 1e-4 + @test value.(x) ≈ inv(√3) * ones(3) rtol = 1e-2 + @test raw_status(model) isa String + @test raw_status(model)[end] != '\n' + + set_objective_sense(model, MOI.FEASIBILITY_SENSE) + optimize!(model) + @test sum(value.(x) .^ 2) ≈ 1 + + set_start_value(x[3], nothing) + err = ErrorException("No starting value specified for `3`th variable.") + @test_throws err optimize!(model) + set_start_value(x[3], 1.0) + + @variable(model, [1:2, 1:2] in Stiefel(2, 2)) + @test_throws MOI.AddConstraintNotAllowed optimize!(model) +end + +function _test_stiefel(solver) + A = [ + 1 -1 + -1 1 + ] + # Use `add_bridges = false` in order to test `copy_to` + model = Model(Manopt.JuMP_Optimizer; add_bridges=false) + dst = "descent_state_type" + @test MOI.supports(unsafe_backend(model), MOI.RawOptimizerAttribute(dst)) + set_attribute(model, dst, solver) + @test get_attribute(model, dst) == solver + @variable(model, U[1:2, 1:2] in Stiefel(2, 2), start = 1.0) + + @objective(model, Min, sum((A - U) .^ 2)) + optimize!(model) + @test objective_value(model) ≈ 2 + @test value.(U) ≈ [1 0; 0 1] + return nothing +end + +function test_stiefel() + return _test_stiefel(Manopt.GradientDescentState) +end + +@testset "JuMP tests" begin + test_sphere() + test_stiefel() +end + +function test_runtests() + optimizer = Manopt.JuMP_Optimizer() + config = MOI.Test.Config(; exclude=Any[MOI.ListOfModelAttributesSet]) + return MOI.Test.runtests( + optimizer, + config; + exclude=String[ + # See https://github.com/jump-dev/MathOptInterface.jl/pull/2195 + "test_model_copy_to_UnsupportedConstraint", + "test_model_copy_to_UnsupportedAttribute", + "test_model_ScalarFunctionConstantNotZero", + # See https://github.com/jump-dev/MathOptInterface.jl/pull/2196/ + "test_objective_ScalarQuadraticFunction_in_ListOfModelAttributesSet", + "test_objective_ScalarAffineFunction_in_ListOfModelAttributesSet", + "test_objective_VariableIndex_in_ListOfModelAttributesSet", + "test_objective_set_via_modify", + "test_objective_ObjectiveSense_in_ListOfModelAttributesSet", + ], + ) +end + +@testset "MOI tests" begin + test_runtests() +end diff --git a/test/runtests.jl b/test/runtests.jl index 97045ac724..3d1f361186 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -64,5 +64,6 @@ include("utils/example_tasks.jl") include("solvers/test_truncated_cg.jl") include("solvers/test_trust_regions.jl") end + include("MOI_wrapper.jl") include("test_deprecated.jl") end