Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DNM: add hooks for JuMP extensions #101

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/predictors/Affine.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ end

function add_predictor(model::JuMP.AbstractModel, predictor::Affine, x::Vector)
m = size(predictor.A, 1)
y = JuMP.@variable(model, [1:m], base_name = "moai_Affine")
bounds = _get_variable_bounds.(x)
y = add_variables(model, predictor, x, m; base_name = "moai_Affine")
bounds = get_bounds.(x)
for i in 1:size(predictor.A, 1)
y_lb, y_ub = predictor.b[i], predictor.b[i]
for j in 1:size(predictor.A, 2)
Expand All @@ -71,7 +71,7 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Affine, x::Vector)
y_ub += a_ij * ifelse(a_ij >= 0, ub, lb)
y_lb += a_ij * ifelse(a_ij >= 0, lb, ub)
end
_set_bounds_if_finite(y[i], y_lb, y_ub)
set_bounds(y[i], y_lb, y_ub)
end
JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y)
return y
Expand Down
11 changes: 7 additions & 4 deletions src/predictors/BinaryDecisionTree.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,17 @@ function add_predictor(
atol::Float64 = 0.0,
)
paths = _tree_to_paths(predictor)
z = JuMP.@variable(
vars = add_variables(
model,
[1:length(paths)],
binary = true,
predictor,
x,
1 + length(paths);
base_name = "moai_BinaryDecisionTree_z",
)
y, z = vars[1], vars[2:end]
JuMP.set_name(y, "moai_BinaryDecisionTree_value")
JuMP.set_binary.(z)
JuMP.@constraint(model, sum(z) == 1)
y = JuMP.@variable(model, base_name = "moai_BinaryDecisionTree_value")
y_expr = JuMP.AffExpr(0.0)
for (zi, (leaf, path)) in zip(z, paths)
JuMP.add_to_expression!(y_expr, leaf, zi)
Expand Down
8 changes: 7 additions & 1 deletion src/predictors/GrayBox.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,13 @@ end

function add_predictor(model::JuMP.AbstractModel, predictor::GrayBox, x::Vector)
op = add_predictor(model, ReducedSpace(predictor), x)
y = JuMP.@variable(model, [1:length(op)], base_name = "moai_GrayBox")
y = add_variables(
model,
predictor,
x,
length(op);
base_name = "moai_GrayBox",
)
JuMP.@constraint(model, op .== y)
return y
end
Expand Down
2 changes: 1 addition & 1 deletion src/predictors/Quantile.jl
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ function add_predictor(
x::Vector,
)
M, N = length(x), length(predictor.quantiles)
y = JuMP.@variable(model, [1:N], base_name = "moai_quantile")
y = add_variables(model, predictor, x, N; base_name = "moai_quantile")
quantile(q, x...) = Distributions.quantile(predictor.distribution(x...), q)
for (qi, yi) in zip(predictor.quantiles, y)
op_i = JuMP.add_nonlinear_operator(
Expand Down
51 changes: 29 additions & 22 deletions src/predictors/ReLU.jl
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@ julia> y = MathOptAI.add_predictor(model, MathOptAI.ReducedSpace(f), x)
"""
struct ReLU <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, ::ReLU, x::Vector)
ub = last.(_get_variable_bounds.(x))
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, ub)
function add_predictor(model::JuMP.AbstractModel, predictor::ReLU, x::Vector)
ub = last.(get_bounds.(x))
y = add_variables(model, predictor, x, length(x); base_name = "moai_ReLU")
set_bounds.(y, 0, ub)
JuMP.@constraint(model, y .== max.(0, x))
return y
end
Expand Down Expand Up @@ -109,17 +109,25 @@ function add_predictor(
x::Vector,
)
m = length(x)
bounds = _get_variable_bounds.(x)
y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, last.(bounds))
bounds = get_bounds.(x)
vars = add_variables(
model,
predictor,
x,
2 * length(x);
base_name = "moai_ReLU",
)
y, z = vars[1:m], vars[m+1:end]
set_bounds.(y, 0, last.(bounds))
JuMP.set_binary.(z)
JuMP.set_name.(z, "")
for i in 1:m
lb, ub = bounds[i]
z = JuMP.@variable(model, binary = true)
JuMP.@constraint(model, y[i] >= x[i])
U = min(ub, predictor.M)
JuMP.@constraint(model, y[i] <= U * z)
JuMP.@constraint(model, y[i] <= U * z[i])
L = min(max(0, -lb), predictor.M)
JuMP.@constraint(model, y[i] <= x[i] + L * (1 - z))
JuMP.@constraint(model, y[i] <= x[i] + L * (1 - z[i]))
end
return y
end
Expand Down Expand Up @@ -178,14 +186,13 @@ function add_predictor(
predictor::ReLUSOS1,
x::Vector,
)
m = length(x)
bounds = _get_variable_bounds.(x)
y = JuMP.@variable(model, [i in 1:m], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, last.(bounds))
z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "_z")
_set_bounds_if_finite.(z, nothing, -first.(bounds))
bounds = get_bounds.(x)
y = add_variables(model, predictor, x, length(x); base_name = "moai_ReLU")
set_bounds.(y, 0, last.(bounds))
z = add_variables(model, predictor, x, length(x); base_name = "_z")
set_bounds.(z, 0, -first.(bounds))
JuMP.@constraint(model, x .== y - z)
for i in 1:m
for i in 1:length(x)
JuMP.@constraint(model, [y[i], z[i]] in MOI.SOS1([1.0, 2.0]))
end
return y
Expand Down Expand Up @@ -246,11 +253,11 @@ function add_predictor(
x::Vector,
)
m = length(x)
bounds = _get_variable_bounds.(x)
y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, last.(bounds))
z = JuMP.@variable(model, [1:m], base_name = "_z")
_set_bounds_if_finite.(z, 0, -first.(bounds))
bounds = get_bounds.(x)
y = add_variables(model, predictor, x, length(x); base_name = "moai_ReLU")
set_bounds.(y, 0, last.(bounds))
z = add_variables(model, predictor, x, length(x); base_name = "_z")
set_bounds.(z, 0, -first.(bounds))
JuMP.@constraint(model, x .== y - z)
JuMP.@constraint(model, y .* z .== 0)
return y
Expand Down
7 changes: 3 additions & 4 deletions src/predictors/Scale.jl
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,14 @@ function Base.show(io::IO, ::Scale)
end

function add_predictor(model::JuMP.AbstractModel, predictor::Scale, x::Vector)
m = length(predictor.scale)
y = JuMP.@variable(model, [1:m], base_name = "moai_Scale")
bounds = _get_variable_bounds.(x)
y = add_variables(model, predictor, x, length(x); base_name = "moai_Scale")
bounds = get_bounds.(x)
for (i, scale) in enumerate(predictor.scale)
y_lb = y_ub = predictor.bias[i]
lb, ub = bounds[i]
y_ub += scale * ifelse(scale >= 0, ub, lb)
y_lb += scale * ifelse(scale >= 0, lb, ub)
_set_bounds_if_finite(y[i], y_lb, y_ub)
set_bounds(y[i], y_lb, y_ub)
end
JuMP.@constraint(model, predictor.scale .* x .+ predictor.bias .== y)
return y
Expand Down
12 changes: 9 additions & 3 deletions src/predictors/Sigmoid.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,15 @@ julia> y = MathOptAI.add_predictor(model, MathOptAI.ReducedSpace(f), x)
"""
struct Sigmoid <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, ::Sigmoid, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Sigmoid")
_set_bounds_if_finite.(y, 0, 1)
function add_predictor(model::JuMP.AbstractModel, predictor::Sigmoid, x::Vector)
y = add_variables(
model,
predictor,
x,
length(x);
base_name = "moai_Sigmoid",
)
set_bounds.(y, 0, 1)
JuMP.@constraint(model, [i in 1:length(x)], y[i] == 1 / (1 + exp(-x[i])))
return y
end
Expand Down
25 changes: 17 additions & 8 deletions src/predictors/SoftMax.jl
Original file line number Diff line number Diff line change
Expand Up @@ -47,23 +47,32 @@ julia> y = MathOptAI.add_predictor(model, MathOptAI.ReducedSpace(f), x)
"""
struct SoftMax <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, ::SoftMax, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftMax")
_set_bounds_if_finite.(y, 0, 1)
denom = JuMP.@variable(model, base_name = "moai_SoftMax_denom")
JuMP.set_lower_bound(denom, 0)
function add_predictor(model::JuMP.AbstractModel, predictor::SoftMax, x::Vector)
vars = add_variables(
model,
predictor,
x,
1 + length(x);
base_name = "moai_SoftMax",
)
denom, y = vars[1], vars[2:end]
set_bounds.(y, 0, 1)
JuMP.set_name(denom, "moai_SoftMax_denom")
set_bounds(denom, 0, nothing)
JuMP.@constraint(model, denom == sum(exp.(x)))
JuMP.@constraint(model, y .== exp.(x) ./ denom)
return y
end

function add_predictor(
model::JuMP.AbstractModel,
::ReducedSpace{SoftMax},
predictor::ReducedSpace{SoftMax},
x::Vector,
)
denom = JuMP.@variable(model, base_name = "moai_SoftMax_denom")
JuMP.set_lower_bound(denom, 0)
vars =
add_variables(model, predictor, x, 1; base_name = "moai_SoftMax_denom")
denom = only(vars)
set_bounds(denom, 0, nothing)
JuMP.@constraint(model, denom == sum(exp.(x)))
return exp.(x) ./ denom
end
16 changes: 13 additions & 3 deletions src/predictors/SoftPlus.jl
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,19 @@ julia> y = MathOptAI.add_predictor(model, MathOptAI.ReducedSpace(f), x)
"""
struct SoftPlus <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, ::SoftPlus, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus")
_set_bounds_if_finite.(y, 0, nothing)
function add_predictor(
model::JuMP.AbstractModel,
predictor::SoftPlus,
x::Vector,
)
y = add_variables(
model,
predictor,
x,
length(x);
base_name = "moai_SoftPlus",
)
set_bounds.(y, 0, nothing)
JuMP.@constraint(model, y .== log.(1 .+ exp.(x)))
return y
end
Expand Down
6 changes: 3 additions & 3 deletions src/predictors/Tanh.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ julia> y = MathOptAI.add_predictor(model, MathOptAI.ReducedSpace(f), x)
"""
struct Tanh <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, ::Tanh, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Tanh")
_set_bounds_if_finite.(y, -1, 1)
function add_predictor(model::JuMP.AbstractModel, predictor::Tanh, x::Vector)
y = add_variables(model, predictor, x, length(x); base_name = "moai_Tanh")
set_bounds.(y, -1, 1)
JuMP.@constraint(model, y .== tanh.(x))
return y
end
Expand Down
54 changes: 46 additions & 8 deletions src/utilities.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,40 @@
# Use of this source code is governed by a BSD-style license that can be found
# in the LICENSE.md file.

function _get_variable_bounds(x::JuMP.GenericVariableRef{T}) where {T}
"""
add_variables(
model::JuMP.AbstractModel,
predictor::AbstractPredictor,
x::Vector;
base_name::String,
)

!!! note
This method is for JuMP extensions. It should not be called in regular usage
of MathOptAI.
"""
function add_variables(
model::JuMP.AbstractModel,
predictor::AbstractPredictor,
x::Vector,
n::Int;
base_name::String,
)
return JuMP.@variable(model, [1:n], base_name = base_name)
end

"""
get_bounds(x::JuMP.AbstractVariable)::Tuple

Return a tuple of the `(lower, upper)` bounds associated with variable `x`.

!!! note
This method is for JuMP extensions. It should not be called in regular usage
of MathOptAI.
"""
get_bounds(::Any) = -Inf, Inf

function get_bounds(x::JuMP.GenericVariableRef{T}) where {T}
lb, ub = typemin(T), typemax(T)
if JuMP.has_upper_bound(x)
ub = JuMP.upper_bound(x)
Expand All @@ -21,7 +54,18 @@ function _get_variable_bounds(x::JuMP.GenericVariableRef{T}) where {T}
return lb, ub
end

function _set_bounds_if_finite(
"""
set_bounds(x::JuMP.AbstractVariable, lower, upper)::Nothing

Set the bounds of `x` to `lower` and `upper` respectively.

!!! note
This method is for JuMP extensions. It should not be called in regular usage
of MathOptAI.
"""
set_bounds(::Any, ::Any, ::Any) = nothing

function set_bounds(
x::JuMP.GenericVariableRef{T},
l::Union{Nothing,Real},
u::Union{Nothing,Real},
Expand All @@ -34,9 +78,3 @@ function _set_bounds_if_finite(
end
return
end

# Default fallback: provide no detail on the bounds
_get_variable_bounds(::Any) = -Inf, Inf

# Default fallback: skip setting variable bound
_set_bounds_if_finite(::Any, ::Any, ::Any) = nothing