From b16e8d5fd49ab818520adc2b6fd102310cc5b363 Mon Sep 17 00:00:00 2001 From: odow Date: Fri, 25 Oct 2024 13:42:12 +1300 Subject: [PATCH] Update tests --- src/predictors/SoftPlus.jl | 11 +++----- test/test_predictors.jl | 53 +++++++++++++++++++++++++++++++++++++- 2 files changed, 55 insertions(+), 9 deletions(-) diff --git a/src/predictors/SoftPlus.jl b/src/predictors/SoftPlus.jl index 5fc1bfb..b48d99d 100644 --- a/src/predictors/SoftPlus.jl +++ b/src/predictors/SoftPlus.jl @@ -61,20 +61,15 @@ struct SoftPlus <: AbstractPredictor SoftPlus(; beta::Float64 = 1.0) = new(beta) end -_softplus(f::SoftPlus, x::Real) = log(1 + exp(f.beta * x)) / f.beta - function add_predictor( model::JuMP.AbstractModel, predictor::SoftPlus, x::Vector, ) - beta = predictor.beta + β = predictor.beta y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus") - cons = _set_direct_bounds(x -> _softplus(beta), 0, nothing, x, y) - append!( - cons, - JuMP.@constraint(model, y .== log.(1 .+ exp.(beta .* x)) ./ beta), - ) + cons = _set_direct_bounds(xi -> log(1 + exp(β * xi)) / β, 0, nothing, x, y) + append!(cons, JuMP.@constraint(model, y .== log.(1 .+ exp.(β .* x)) ./ β)) return y, Formulation(predictor, y, cons) end diff --git a/test/test_predictors.jl b/test/test_predictors.jl index d8ee18c..f94f152 100644 --- a/test/test_predictors.jl +++ b/test/test_predictors.jl @@ -147,7 +147,7 @@ function test_ReLU_bounds() model = Model() @variable(model, lb <= x <= ub) y, _ = MathOptAI.add_predictor(model, f, [x]) - @test lower_bound.(y) == [0.0] + @test lower_bound.(y) == [max(0.0, lb)] @test upper_bound.(y) == [max(0.0, ub)] end end @@ -254,6 +254,22 @@ function test_Sigmoid() return end +function test_Sigmoid_bounds() + f(x) = 1 / (1 + exp(-x)) + values = [-Inf, -2, 0, 2, Inf] + for lb in values, ub in values + if lb == Inf || ub == -Inf || lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, MathOptAI.Sigmoid(), [x]) + @test lower_bound(y[1]) == f(lb) + @test upper_bound(y[1]) == f(ub) + end + return +end + function test_ReducedSpace_Sigmoid() model = Model(Ipopt.Optimizer) set_silent(model) @@ -324,6 +340,26 @@ function test_SoftPlus() return end +function test_SoftPlus_bounds() + f(x, beta) = log(1 + exp(beta * x)) / beta + values = [-Inf, -2, 0, 2, Inf] + for beta in [1.0, 1.5, 2.0], lb in values, ub in values + if lb == Inf || ub == -Inf || lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, MathOptAI.SoftPlus(; beta), [x]) + @test lower_bound(y[1]) == f(lb, beta) + if isfinite(ub) + @test upper_bound(y[1]) == f(ub, beta) + else + @test !has_upper_bound(y[1]) + end + end + return +end + function test_ReducedSpace_SoftPlus() model = Model(Ipopt.Optimizer) set_silent(model) @@ -359,6 +395,21 @@ function test_Tanh() return end +function test_Tanh_bounds() + values = [-Inf, -2, 0, 2, Inf] + for lb in values, ub in values + if lb == Inf || ub == -Inf || lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, MathOptAI.Tanh(), [x]) + @test lower_bound.(y) == [tanh(lb)] + @test upper_bound.(y) == [tanh(ub)] + end + return +end + function test_ReducedSpace_Tanh() model = Model(Ipopt.Optimizer) set_silent(model)