MathOptInterface.jl
MathOptInterface.jl copied to clipboard
Add example to use MatrixOfConstraints in the documentation
the documentation of the MatrixOfConstraints section is API-oriented with each function presented separately.
It could help to add an example to build a MatrixOfConstraints from matrices and RHS à la linprog and another one to extract matrices and RHS from another model
Hmm. It isn't really intended as an input-output layer, it's really just to help internal solver APIs. We should finish tidying up MatrixOptInterface.jl?
I guess a tutorial explaining how to get the LP standard form would be useful: https://github.com/jump-dev/MatrixOptInterface.jl
We can transfer the issue to MatrixOpt and then point to it in the MOIU documentation of MatrixOfConstraints yes
Yes, MatrixOptInterface could be rewritten with MatrixOfConstraints and provide these linprog, quadprog, etc...
I started looking into MatrixOptInterface for this, but it really has the opposite goal: converting from matrix formulations into MOI. I think we really need something in MOI that converts from MOI into matrix form. (We often get asked how to get the matrices from a JuMP model.)
One option is to add something like the to MOI.Utilities:
module MatrixOptInterface
import MathOptInterface
import SparseArrays
const MOI = MathOptInterface
MOI.Utilities.@product_of_sets(
_LPProductOfSets,
MOI.EqualTo{T},
MOI.LessThan{T},
MOI.GreaterThan{T},
MOI.Interval{T},
)
const _CachedModel = MOI.Utilities.GenericModel{
Float64,
MOI.Utilities.ObjectiveContainer{Float64},
MOI.Utilities.VariablesContainer{Float64},
MOI.Utilities.MatrixOfConstraints{
Float64,
MOI.Utilities.MutableSparseMatrixCSC{
Float64,
Int,
MOI.Utilities.OneBasedIndexing,
},
MOI.Utilities.Hyperrectangle{Float64},
_LPProductOfSets{Float64},
},
}
struct LinearStandardForm{T}
sense::MOI.OptimizationSense
obj::Vector{T}
obj_constant::T
A::SparseArrays.SparseMatrixCSC{T,Int}
row_lower::Vector{T}
row_upper::Vector{T}
col_lower::Vector{T}
col_upper::Vector{T}
binaries::Vector{MOI.VariableIndex}
integers::Vector{MOI.VariableIndex}
column_map::Dict{MOI.VariableIndex,Int}
row_map::Dict{MOI.ConstraintIndex,Int}
end
function _column_map(model, cache, index_map)
cache_map = Dict(
x => i for
(i, x) in enumerate(MOI.get(cache, MOI.ListOfVariableIndices()))
)
return Dict(
x => cache_map[index_map[x]] for
x in MOI.get(model, MOI.ListOfVariableIndices())
)
end
function _row_map(model, cache, index_map)
cache_map = Dict{MOI.ConstraintIndex,Int}()
for (F, S) in MOI.get(cache, MOI.ListOfConstraintTypesPresent())
if F != MOI.VariableIndex
for ci in MOI.get(cache, MOI.ListOfConstraintIndices{F,S}())
cache_map[ci] = MOI.Utilities.rows(cache.constraints, ci)
end
end
end
row_map = Dict{MOI.ConstraintIndex,Int}()
for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent())
if F != MOI.VariableIndex
for ci in MOI.get(model, MOI.ListOfConstraintIndices{F,S}())
row_map[ci] = cache_map[index_map[ci]]
end
end
end
return row_map
end
function _get_variables_in_set(model, ::Type{S}) where {S}
indices = MOI.get(model, MOI.ListOfConstraintIndices{MOI.VariableIndex,S}())
return map(indices) do ci
return MOI.get(model, MOI.ConstraintFunction(), ci)
end
end
function LinearStandardForm(model::MOI.ModelLike)
src = _CachedModel()
index_map = MOI.copy_to(src, model)
obj_f =
MOI.get(src, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}())
A = src.constraints.coefficients
obj = zeros(A.n)
for term in obj_f.terms
obj[term.variable.value] += term.coefficient
end
return LinearStandardForm{Float64}(
MOI.get(model, MOI.ObjectiveSense()),
obj,
obj_f.constant,
convert(SparseArrays.SparseMatrixCSC{Float64,Int}, A),
src.constraints.constants.lower,
src.constraints.constants.upper,
src.variables.lower,
src.variables.upper,
_get_variables_in_set(model, MOI.ZeroOne),
_get_variables_in_set(model, MOI.Integer),
_column_map(model, src, index_map),
_row_map(model, src, index_map)
)
end
end
import MathOptInterface
const MOI = MathOptInterface
model = MOI.Utilities.Model{Float64}()
MOI.Utilities.loadfromstring!(model, """
variables: x, y, z
maxobjective: x + 2.0 * y + -3.1 * z
x + y <= 1.0
2.0 * y >= 3.0
-4.0 * x + z == 5.0
x in Interval(0.0, 1.0)
y <= 10.0
z == 5.0
z in Integer()
x in ZeroOne()
""")
lp = MatrixOptInterface.LinearStandardForm(model)
julia> lp.A
3×3 SparseArrays.SparseMatrixCSC{Float64, Int64} with 5 stored entries:
1.0 ⋅ -4.0
⋅ 1.0 1.0
⋅ 2.0 ⋅
julia> lp.obj
3-element Vector{Float64}:
-3.1
2.0
1.0
julia> lp.obj_constant
0.0
julia> lp.row_lower
3-element Vector{Float64}:
5.0
-Inf
3.0
julia> lp.row_upper
3-element Vector{Float64}:
5.0
1.0
Inf
julia> lp.col_lower
3-element Vector{Float64}:
5.0
-Inf
0.0
julia> lp.col_upper
3-element Vector{Float64}:
5.0
10.0
1.0
julia> lp.binaries
1-element Vector{MathOptInterface.VariableIndex}:
MathOptInterface.VariableIndex(1)
julia> lp.integers
1-element Vector{MathOptInterface.VariableIndex}:
MathOptInterface.VariableIndex(3)
julia> lp.column_map
Dict{MathOptInterface.VariableIndex, Int64} with 3 entries:
VariableIndex(2) => 2
VariableIndex(3) => 1
VariableIndex(1) => 3
julia> lp.row_map
Dict{MathOptInterface.ConstraintIndex, Int64} with 3 entries:
ConstraintIndex{ScalarAffineFunction{Float64}, EqualTo{Float64}}(1) => 1
ConstraintIndex{ScalarAffineFunction{Float64}, GreaterThan{Float64}}(1) => 3
ConstraintIndex{ScalarAffineFunction{Float64}, LessThan{Float64}}(1) => 2
julia> lp.sense
MAX_SENSE::OptimizationSense = 1
I think that'd satisfy most people. It's probably too complicated to add just as some documentation.
Trying to handle all combinations that people might ask for just seems like more trouble than it's worth. So too supporting conic and quadratic matrix problems.