Skip to content

Commit

Permalink
Update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
odow committed Oct 25, 2024
1 parent d8fee23 commit b16e8d5
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 9 deletions.
11 changes: 3 additions & 8 deletions src/predictors/SoftPlus.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,20 +61,15 @@ struct SoftPlus <: AbstractPredictor
SoftPlus(; beta::Float64 = 1.0) = new(beta)
end

_softplus(f::SoftPlus, x::Real) = log(1 + exp(f.beta * x)) / f.beta

function add_predictor(
model::JuMP.AbstractModel,
predictor::SoftPlus,
x::Vector,
)
beta = predictor.beta
β = predictor.beta
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus")
cons = _set_direct_bounds(x -> _softplus(beta), 0, nothing, x, y)
append!(
cons,
JuMP.@constraint(model, y .== log.(1 .+ exp.(beta .* x)) ./ beta),
)
cons = _set_direct_bounds(xi -> log(1 + exp* xi)) / β, 0, nothing, x, y)
append!(cons, JuMP.@constraint(model, y .== log.(1 .+ exp.(β .* x)) ./ β))
return y, Formulation(predictor, y, cons)
end

Expand Down
53 changes: 52 additions & 1 deletion test/test_predictors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ function test_ReLU_bounds()
model = Model()
@variable(model, lb <= x <= ub)
y, _ = MathOptAI.add_predictor(model, f, [x])
@test lower_bound.(y) == [0.0]
@test lower_bound.(y) == [max(0.0, lb)]
@test upper_bound.(y) == [max(0.0, ub)]
end
end
Expand Down Expand Up @@ -254,6 +254,22 @@ function test_Sigmoid()
return
end

function test_Sigmoid_bounds()
f(x) = 1 / (1 + exp(-x))
values = [-Inf, -2, 0, 2, Inf]
for lb in values, ub in values
if lb == Inf || ub == -Inf || lb > ub
continue
end
model = Model()
@variable(model, lb <= x <= ub)
y, _ = MathOptAI.add_predictor(model, MathOptAI.Sigmoid(), [x])
@test lower_bound(y[1]) == f(lb)
@test upper_bound(y[1]) == f(ub)
end
return
end

function test_ReducedSpace_Sigmoid()
model = Model(Ipopt.Optimizer)
set_silent(model)
Expand Down Expand Up @@ -324,6 +340,26 @@ function test_SoftPlus()
return
end

function test_SoftPlus_bounds()
f(x, beta) = log(1 + exp(beta * x)) / beta
values = [-Inf, -2, 0, 2, Inf]
for beta in [1.0, 1.5, 2.0], lb in values, ub in values
if lb == Inf || ub == -Inf || lb > ub
continue
end
model = Model()
@variable(model, lb <= x <= ub)
y, _ = MathOptAI.add_predictor(model, MathOptAI.SoftPlus(; beta), [x])
@test lower_bound(y[1]) == f(lb, beta)
if isfinite(ub)
@test upper_bound(y[1]) == f(ub, beta)
else
@test !has_upper_bound(y[1])
end
end
return
end

function test_ReducedSpace_SoftPlus()
model = Model(Ipopt.Optimizer)
set_silent(model)
Expand Down Expand Up @@ -359,6 +395,21 @@ function test_Tanh()
return
end

function test_Tanh_bounds()
values = [-Inf, -2, 0, 2, Inf]
for lb in values, ub in values
if lb == Inf || ub == -Inf || lb > ub
continue
end
model = Model()
@variable(model, lb <= x <= ub)
y, _ = MathOptAI.add_predictor(model, MathOptAI.Tanh(), [x])
@test lower_bound.(y) == [tanh(lb)]
@test upper_bound.(y) == [tanh(ub)]
end
return
end

function test_ReducedSpace_Tanh()
model = Model(Ipopt.Optimizer)
set_silent(model)
Expand Down

0 comments on commit b16e8d5

Please sign in to comment.