Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨ Add support for solver at optimize with bridges #1626

Merged
merged 9 commits into from
Nov 20, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion REQUIRE
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
julia 0.6
MathOptInterface 0.6 0.7
MathOptInterface 0.6.3 0.7
ForwardDiff 0.5 0.11
Calculus
DataStructures
Expand Down
77 changes: 36 additions & 41 deletions src/JuMP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ end
# Model

# Model has three modes:
# 1) Automatic: moi_backend field holds a LazyBridgeOptimizer{CachingOptimizer} in Automatic mode.
# 2) Manual: moi_backend field holds a LazyBridgeOptimizer{CachingOptimizer} in Manual mode.
# 1) Automatic: moi_backend field holds a CachingOptimizer in Automatic mode.
# 2) Manual: moi_backend field holds a CachingOptimizer in Manual mode.
# 3) Direct: moi_backend field holds an AbstractOptimizer. No extra copy of the model is stored. The moi_backend must support add_constraint etc.
# Methods to interact with the CachingOptimizer are defined in solverinterface.jl.
@enum ModelMode Automatic Manual Direct
Expand All @@ -157,7 +157,7 @@ mutable struct Model <: AbstractModel
variable_to_fix::Dict{MOIVAR, MOIFIX}
variable_to_integrality::Dict{MOIVAR, MOIINT}
variable_to_zero_one::Dict{MOIVAR, MOIBIN}
# In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}.
# In Manual and Automatic modes, CachingOptimizer.
# In Direct mode, will hold an AbstractOptimizer.
moi_backend::MOI.AbstractOptimizer
# Hook into a solve call...function of the form f(m::Model; kwargs...),
Expand All @@ -184,11 +184,10 @@ a cache. The mode of the `CachingOptimizer` storing this cache is
`caching_mode`. The optimizer can be set later in the [`JuMP.optimize!`](@ref)
call. If `bridge_constraints` is true, constraints that are not supported by the
optimizer are automatically bridged to equivalent supported constraints when
an appropriate is defined in the `MathOptInterface.Bridges` module or is
defined in another module and is explicitely added.
an appropriate transformation is defined in the `MathOptInterface.Bridges`
module or is defined in another module and is explicitely added.
"""
function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic,
bridge_constraints::Bool=true,
solver=nothing)
if solver !== nothing
error("The solver= keyword is no longer available in JuMP 0.19 and " *
Expand All @@ -198,13 +197,7 @@ function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic,
universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}())
caching_opt = MOIU.CachingOptimizer(universal_fallback,
caching_mode)
if bridge_constraints
backend = MOI.Bridges.fullbridgeoptimizer(caching_opt,
Float64)
else
backend = caching_opt
end
return direct_model(backend)
return direct_model(caching_opt)
end

"""
Expand All @@ -224,10 +217,11 @@ The following creates a model using the optimizer
model = JuMP.Model(with_optimizer(IpoptOptimizer, print_level=0))
```
"""
function Model(optimizer_factory::OptimizerFactory; kwargs...)
function Model(optimizer_factory::OptimizerFactory;
bridge_constraints::Bool=true, kwargs...)
model = Model(; kwargs...)
optimizer = optimizer_factory()
MOIU.resetoptimizer!(model, optimizer)
set_optimizer(model, optimizer_factory,
bridge_constraints=bridge_constraints)
return model
end

Expand Down Expand Up @@ -269,22 +263,6 @@ if VERSION >= v"0.7-"
end


# In Automatic and Manual mode, `backend(model)` is either directly the
# `CachingOptimizer` if `bridge_constraints=false` was passed in the constructor
# or it is a `LazyBridgeOptimizer` and the `CachingOptimizer` is stored in the
# `model` field
function caching_optimizer(model::Model)
if backend(model) isa MOIU.CachingOptimizer
return backend(model)
elseif (backend(model) isa
MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer})
return backend(model).model
else
error("The function `caching_optimizer` cannot be called on a model " *
"in `Direct` mode.")
end
end

"""
backend(model::Model)

Expand All @@ -294,8 +272,7 @@ and whether there are any bridges in the model.

If JuMP is in direct mode (i.e., the model was created using [`JuMP.direct_model`](@ref)),
the backend with be the optimizer passed to `direct_model`. If JuMP is in manual
or automatic mode, the backend will either be a `MOI.Utilities.CachingOptimizer`
or a `MOI.Bridges.LazyBridgeOptimizer`.
or automatic mode, the backend is a `MOI.Utilities.CachingOptimizer`.

This function should only be used by advanced users looking to access low-level
MathOptInterface or solver-specific functionality.
Expand All @@ -308,16 +285,34 @@ backend(model::Model) = model.moi_backend
Return mode (Direct, Automatic, Manual) of model.
"""
function mode(model::Model)
if !(backend(model) isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} ||
backend(model) isa MOIU.CachingOptimizer)
if !(backend(model) isa MOIU.CachingOptimizer)
return Direct
elseif caching_optimizer(model).mode == MOIU.Automatic
elseif backend(model).mode == MOIU.Automatic
return Automatic
else
return Manual
end
end

"""
bridge_constraints(model::Model)

Return a `Bool` indicating whether the model `model` is in manual or automatic
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The wording is a bit confusing. Maybe split into cases. When in manual or automatic mode, returns X. When in direct mode, returns false.

mode, the optimizer is set and unsupported constraints are automatically bridged
to equivalent supported constraints when an appropriate transformation is
available.
"""
function bridge_constraints(model::Model)
caching_optimizer = backend(model)
if caching_optimizer isa MOIU.CachingOptimizer
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: if backend(model) isa MOIU.CachingOptimizer. It's weird call something caching_optimizer if it might not be one.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I renamed it moi_backend. The advantage of storing it in a local variable is that inference can use the fact that it is in the first clause of the if to infer that the variable is a CachingOptimizer and we loose a bit of the disadvantage that moi_backend is not concretely typed

return caching_optimizer.optimizer isa MOI.Bridges.LazyBridgeOptimizer
else
# Direct mode
return false
end
end


"""
num_variables(model::Model)

Expand Down Expand Up @@ -439,17 +434,17 @@ function optimizer_index(v::VariableRef)
if mode(model) == Direct
return index(v)
else
@assert caching_optimizer(model).state == MOIU.AttachedOptimizer
return caching_optimizer(model).model_to_optimizer_map[index(v)]
@assert backend(model).state == MOIU.AttachedOptimizer
return backend(model).model_to_optimizer_map[index(v)]
end
end

function optimizer_index(cr::ConstraintRef{Model})
if mode(cr.model) == Direct
return index(cr)
else
@assert caching_optimizer(cr.model).state == MOIU.AttachedOptimizer
return caching_optimizer(cr.model).model_to_optimizer_map[index(cr)]
@assert backend(cr.model).state == MOIU.AttachedOptimizer
return backend(cr.model).model_to_optimizer_map[index(cr)]
end
end

Expand Down
16 changes: 9 additions & 7 deletions src/constraints.jl
Original file line number Diff line number Diff line change
Expand Up @@ -218,23 +218,25 @@ function constraint_object(ref::ConstraintRef{Model, MOICON{FuncType, SetType}})
end

"""
add_constraint(m::Model, c::AbstractConstraint, name::String="")
add_constraint(model::Model, c::AbstractConstraint, name::String="")

Add a constraint `c` to `Model m` and sets its name.
Add a constraint `c` to `Model model` and sets its name.
"""
function add_constraint(m::Model, c::AbstractConstraint, name::String="")
function add_constraint(model::Model, c::AbstractConstraint, name::String="")
f = moi_function(c)
s = moi_set(c)
if !MOI.supports_constraint(backend(m), typeof(f), typeof(s))
if backend(m) isa MOI.Bridges.LazyBridgeOptimizer
if !MOI.supports_constraint(backend(model), typeof(f), typeof(s))
if mode(model) == Direct
bridge_message = "."
elseif bridge_constraints(model)
bridge_message = " and there are no bridges that can reformulate it into supported constraints."
else
bridge_message = ", try using `bridge_constraints=true` in the `JuMP.Model` constructor if you believe the constraint can be reformulated to constraints supported by the solver."
end
error("Constraints of type $(typeof(f))-in-$(typeof(s)) are not supported by the solver" * bridge_message)
end
cindex = MOI.add_constraint(backend(m), f, s)
cref = ConstraintRef(m, cindex, shape(c))
cindex = MOI.add_constraint(backend(model), f, s)
cref = ConstraintRef(model, cindex, shape(c))
if !isempty(name)
set_name(cref, name)
end
Expand Down
8 changes: 2 additions & 6 deletions src/copy.jl
Original file line number Diff line number Diff line change
Expand Up @@ -97,12 +97,8 @@ function copy_model(model::Model)
" instead of the `direct_model` constructor to be able to copy",
" the constructed model.")
end
caching_mode = caching_optimizer(model).mode
# TODO add bridges added to the bridge optimizer that are not part of the
# fullbridgeoptimizer
bridge_constraints = backend(model) isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer}
new_model = Model(caching_mode = caching_mode,
bridge_constraints = bridge_constraints)
caching_mode = backend(model).mode
new_model = Model(caching_mode = caching_mode)

# Copy the MOI backend, note that variable and constraint indices may have
# changed, the `index_map` gives the map between the indices of
Expand Down
10 changes: 5 additions & 5 deletions src/objective.jl
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,12 @@ However, it is not convertible to a variable.
julia> JuMP.objective_function(model, JuMP.VariableRef)
ERROR: InexactError: convert(MathOptInterface.SingleVariable, MathOptInterface.ScalarAffineFunction{Float64}(MathOptInterface.ScalarAffineTerm{Float64}[ScalarAffineTerm{Float64}(2.0, VariableIndex(1))], 1.0))
Stacktrace:
[1] convert at /home/blegat/.julia/dev/MathOptInterface/src/functions.jl:393 [inlined]
[2] get(::JuMP.JuMPMOIModel{Float64}, ::MathOptInterface.ObjectiveFunction{MathOptInterface.SingleVariable}) at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/model.jl:259
[3] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/universalfallback.jl:105 [inlined]
[4] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/cachingoptimizer.jl:436 [inlined]
[1] convert at /home/blegat/.julia/dev/MathOptInterface/src/functions.jl:398 [inlined]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Surely this fails because we're not filtering out the path as well? Or does that happen somewhere else?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We are filtering out this output so I was not required to update it.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, I missed the .* at the end of the regex. r"^Stacktrace:.*"s might be more clear?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I'm not mistaken, .* doesn't catch newlines

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The modifier s on the end makes it. I guess that's just as opaque... nm.

[2] get(::JuMP.JuMPMOIModel{Float64}, ::MathOptInterface.ObjectiveFunction{MathOptInterface.SingleVariable}) at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/model.jl:290
[3] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/universalfallback.jl:114 [inlined]
[4] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/cachingoptimizer.jl:439 [inlined]
[5] get(::MathOptInterface.Bridges.LazyBridgeOptimizer{MathOptInterface.Utilities.CachingOptimizer{MathOptInterface.AbstractOptimizer,MathOptInterface.Utilities.UniversalFallback{JuMP.JuMPMOIModel{Float64}}},MathOptInterface.Bridges.AllBridgedConstraints{Float64}}, ::MathOptInterface.ObjectiveFunction{MathOptInterface.SingleVariable}) at /home/blegat/.julia/dev/MathOptInterface/src/Bridges/bridgeoptimizer.jl:172
[6] objective_function(::Model, ::Type{VariableRef}) at /home/blegat/.julia/dev/JuMP/src/objective.jl:121
[6] objective_function(::Model, ::Type{VariableRef}) at /home/blegat/.julia/dev/JuMP/src/objective.jl:129
[7] top-level scope at none:0
```
"""
Expand Down
50 changes: 40 additions & 10 deletions src/optimizer_interface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,57 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

function error_if_direct_mode(model::Model, func::Symbol)
if mode(model) == Direct
error("The `$func` function is not supported in Direct mode.")
end
end

# These methods directly map to CachingOptimizer methods.
# They cannot be called in Direct mode.
function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer)
@assert mode(model) != Direct
MOIU.resetoptimizer!(caching_optimizer(model), optimizer)
function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer,
bridge_constraints::Bool=true)
error_if_direct_mode(model, :resetoptimizer!)
MOIU.resetoptimizer!(backend(model), optimizer)
end

function MOIU.resetoptimizer!(model::Model)
@assert mode(model) != Direct
MOIU.resetoptimizer!(caching_optimizer(model))
error_if_direct_mode(model, :resetoptimizer!)
MOIU.resetoptimizer!(backend(model))
end

function MOIU.dropoptimizer!(model::Model)
error_if_direct_mode(model, :dropoptimizer!)
@assert mode(model) != Direct
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The assert can be dropped now

MOIU.dropoptimizer!(caching_optimizer(model))
MOIU.dropoptimizer!(backend(model))
end

function MOIU.attachoptimizer!(model::Model)
error_if_direct_mode(model, :attachoptimizer!)
@assert mode(model) != Direct
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same here

MOIU.attachoptimizer!(caching_optimizer(model))
MOIU.attachoptimizer!(backend(model))
end

function set_optimizer(model::Model, optimizer_factory::OptimizerFactory;
bridge_constraints::Bool=true)
error_if_direct_mode(model, :set_optimizer)
optimizer = optimizer_factory()
if bridge_constraints
# The names are handled by the first caching optimizer.
# If default_copy_to without names is supported, no need for a second
# cache.
if !MOIU.supports_default_copy_to(optimizer, false)
if mode(model) == Manual
# TODO figure out what to do in manual mode with the two caches
error("Bridges in Manual mode with an optimizer not supporting `default_copy_to` is not supported yet")
end
universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}())
optimizer = MOIU.CachingOptimizer(universal_fallback, optimizer)
end
optimizer = MOI.Bridges.fullbridgeoptimizer(optimizer, Float64)
end
MOIU.resetoptimizer!(model, optimizer)
end

"""
optimize!(model::Model,
Expand Down Expand Up @@ -55,6 +84,7 @@ JuMP.optimize!(model, with_optimizer(GLPK.Optimizer))
"""
function optimize!(model::Model,
optimizer_factory::Union{Nothing, OptimizerFactory}=nothing;
bridge_constraints::Bool=true,
ignore_optimize_hook=(model.optimize_hook === nothing))
# The nlp_data is not kept in sync, so re-set it here.
# TODO: Consider how to handle incremental solves.
Expand All @@ -67,11 +97,11 @@ function optimize!(model::Model,
if mode(model) == Direct
error("An optimizer factory cannot be provided at the `optimize` call in Direct mode.")
end
if MOIU.state(caching_optimizer(model)) != MOIU.NoOptimizer
if MOIU.state(backend(model)) != MOIU.NoOptimizer
error("An optimizer factory cannot both be provided in the `Model` constructor and at the `optimize` call.")
end
optimizer = optimizer_factory()
MOIU.resetoptimizer!(model, optimizer)
set_optimizer(model, optimizer_factory,
bridge_constraints=bridge_constraints)
MOIU.attachoptimizer!(model)
end

Expand Down
4 changes: 2 additions & 2 deletions test/constraint.jl
Original file line number Diff line number Diff line change
Expand Up @@ -353,13 +353,13 @@ function test_shadow_price(model_string, constraint_dual, constraint_shadow)
JuMP.JuMPMOIModel{Float64}(),
eval_objective_value=false,
eval_variable_constraint_dual=false))
mock_optimizer = JuMP.caching_optimizer(model).optimizer
mock_optimizer = JuMP.backend(model).optimizer.model
MOI.set(mock_optimizer, MOI.TerminationStatus(), MOI.Success)
MOI.set(mock_optimizer, MOI.DualStatus(), MOI.FeasiblePoint)
JuMP.optimize!(model)

@testset "shadow price of $constraint_name" for constraint_name in keys(constraint_dual)
ci = MOI.get(JuMP.caching_optimizer(model), MOI.ConstraintIndex,
ci = MOI.get(JuMP.backend(model), MOI.ConstraintIndex,
constraint_name)
constraint_ref = JuMP.ConstraintRef(model, ci, JuMP.ScalarShape())
MOI.set(mock_optimizer, MOI.ConstraintDual(),
Expand Down
16 changes: 8 additions & 8 deletions test/generate_and_solve.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@

model = JuMP.JuMPMOIModel{Float64}()
MOIU.loadfromstring!(model, modelstring)
MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"])
MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"])

JuMP.optimize!(m, with_optimizer(MOIU.MockOptimizer,
JuMP.JuMPMOIModel{Float64}(),
eval_objective_value=false))

mockoptimizer = JuMP.caching_optimizer(m).optimizer
mockoptimizer = JuMP.backend(m).optimizer.model
MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Success)
MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0)
MOI.set(mockoptimizer, MOI.ResultCount(), 1)
Expand Down Expand Up @@ -138,11 +138,11 @@

model = JuMP.JuMPMOIModel{Float64}()
MOIU.loadfromstring!(model, modelstring)
MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["xfix", "xint", "ybin"])
MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["xfix", "xint", "ybin"])

MOIU.attachoptimizer!(m)

mockoptimizer = JuMP.caching_optimizer(m).optimizer
mockoptimizer = JuMP.backend(m).optimizer.model
MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Success)
MOI.set(mockoptimizer, MOI.ObjectiveValue(), 1.0)
MOI.set(mockoptimizer, MOI.ResultCount(), 1)
Expand Down Expand Up @@ -186,13 +186,13 @@

model = JuMP.JuMPMOIModel{Float64}()
MOIU.loadfromstring!(model, modelstring)
MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c1", "c2", "c3"])
MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["c1", "c2", "c3"])

JuMP.optimize!(m, with_optimizer(MOIU.MockOptimizer,
JuMP.JuMPMOIModel{Float64}(),
eval_objective_value=false))

mockoptimizer = JuMP.caching_optimizer(m).optimizer
mockoptimizer = JuMP.backend(m).optimizer.model
MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Success)
MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0)
MOI.set(mockoptimizer, MOI.ResultCount(), 1)
Expand Down Expand Up @@ -245,7 +245,7 @@

model = JuMP.JuMPMOIModel{Float64}()
MOIU.loadfromstring!(model, modelstring)
MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y","z"], ["varsoc", "affsoc", "rotsoc"])
MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y","z"], ["varsoc", "affsoc", "rotsoc"])

mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(),
eval_objective_value=false,
Expand Down Expand Up @@ -308,7 +308,7 @@

model = JuMP.JuMPMOIModel{Float64}()
MOIU.loadfromstring!(model, modelstring)
MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model,
MOIU.test_models_equal(JuMP.backend(m).model_cache, model,
["x11","x12","x22"],
["var_psd", "sym_psd", "con_psd"])

Expand Down
Loading