diff --git a/src/predictors/ReLU.jl b/src/predictors/ReLU.jl index cad6667..5ec3598 100644 --- a/src/predictors/ReLU.jl +++ b/src/predictors/ReLU.jl @@ -59,7 +59,7 @@ struct ReLU <: AbstractPredictor end function add_predictor(model::JuMP.AbstractModel, predictor::ReLU, x::Vector) ub = last.(_get_variable_bounds.(x)) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, ub) + _set_bounds_if_finite.(y, 0, max.(0, ub)) cons = JuMP.@constraint(model, y .== max.(0, x)) constraints = Any[JuMP.LowerBoundRef.(y); cons] return y, Formulation(predictor, y, constraints) @@ -128,7 +128,7 @@ function add_predictor( m = length(x) bounds = _get_variable_bounds.(x) y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, last.(bounds)) + _set_bounds_if_finite.(y, 0, max.(0, last.(bounds))) formulation = Formulation(predictor) append!(formulation.variables, y) for i in 1:m @@ -203,7 +203,7 @@ function add_predictor( m = length(x) bounds = _get_variable_bounds.(x) y = JuMP.@variable(model, [i in 1:m], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, last.(bounds)) + _set_bounds_if_finite.(y, 0, max.(0, last.(bounds))) z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "moai_z") _set_bounds_if_finite.(z, nothing, -first.(bounds)) cons = JuMP.@constraint(model, x .== y - z) @@ -271,9 +271,9 @@ function add_predictor( m = length(x) bounds = _get_variable_bounds.(x) y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU") - _set_bounds_if_finite.(y, 0, last.(bounds)) + _set_bounds_if_finite.(y, 0, max.(0, last.(bounds))) z = JuMP.@variable(model, [1:m], base_name = "moai_z") - _set_bounds_if_finite.(z, 0, -first.(bounds)) + _set_bounds_if_finite.(z, 0, max.(0, -first.(bounds))) c1 = JuMP.@constraint(model, x .== y - z) c2 = JuMP.@constraint(model, y .* z .== 0) return y, Formulation(predictor, Any[y; z], Any[c1; c2]) diff --git a/test/test_Flux.jl b/test/test_Flux.jl index 6d0cd09..79c4ffc 100644 --- a/test/test_Flux.jl +++ b/test/test_Flux.jl @@ -51,15 +51,13 @@ function test_end_to_end_with_scale() ) model = Model(HiGHS.Optimizer) set_silent(model) - @variable(model, x) + @variable(model, x == -1.2) y, _ = MathOptAI.add_predictor( model, chain, [x]; config = Dict(Flux.relu => MathOptAI.ReLUBigM(100.0)), ) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -72,15 +70,13 @@ function test_end_to_end_ReLUBigM() ) model = Model(HiGHS.Optimizer) set_silent(model) - @variable(model, x) - y, formulation = MathOptAI.add_predictor( + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor( model, chain, [x]; config = Dict(Flux.relu => MathOptAI.ReLUBigM(100.0)), ) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -93,8 +89,8 @@ function test_end_to_end_ReLUQuadratic() ) model = Model(Ipopt.Optimizer) set_silent(model) - @variable(model, x) - y, formulation = MathOptAI.add_predictor( + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor( model, chain, [x]; @@ -102,8 +98,6 @@ function test_end_to_end_ReLUQuadratic() ) # Ipopt needs a starting point to avoid the local minima. set_start_value(only(y), 4.0) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -115,11 +109,10 @@ function test_end_to_end_ReLU() Flux.Chain(Flux.Dense(1 => 16, Flux.relu), Flux.Dense(16 => 1)), ) model = Model(Ipopt.Optimizer) - set_silent(model) - @variable(model, x) - y, formulation = MathOptAI.add_predictor(model, chain, [x]) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) + # set_silent(model) + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor(model, chain, [x]) + print(model) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -132,11 +125,8 @@ function test_end_to_end_ReLU_reduced_space() ) model = Model(Ipopt.Optimizer) set_silent(model) - @variable(model, x) - y, formulation = - MathOptAI.add_predictor(model, chain, [x]; reduced_space = true) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor(model, chain, [x]; reduced_space = true) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -149,10 +139,8 @@ function test_end_to_end_SoftPlus() ) model = Model(Ipopt.Optimizer) set_silent(model) - @variable(model, x) - y, formulation = MathOptAI.add_predictor(model, chain, [x]) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor(model, chain, [x]) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -165,10 +153,8 @@ function test_end_to_end_Sigmoid() ) model = Model(Ipopt.Optimizer) set_silent(model) - @variable(model, x) - y, formulation = MathOptAI.add_predictor(model, chain, [x]) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor(model, chain, [x]) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -181,10 +167,8 @@ function test_end_to_end_Tanh() ) model = Model(Ipopt.Optimizer) set_silent(model) - @variable(model, x) - y, formulation = MathOptAI.add_predictor(model, chain, [x]) - @constraint(model, only(y) <= 4) - @objective(model, Min, x) + @variable(model, x == -1.2) + y, _ = MathOptAI.add_predictor(model, chain, [x]) optimize!(model) @test is_solved_and_feasible(model) @test isapprox(value.(y), chain(Float32[value(x)]); atol = 1e-2) @@ -299,10 +283,8 @@ function test_end_to_end_Softmax() chain = Flux.Chain(Flux.Dense(2 => 3), Flux.softmax) model = Model(Ipopt.Optimizer) set_silent(model) - @variable(model, x[1:2]) + @variable(model, x[i in 1:2] == i) y, _ = MathOptAI.add_predictor(model, chain, x) - @constraint(model, x[1] == 1.0) - @constraint(model, x[2] == 2.0) optimize!(model) @test is_solved_and_feasible(model) y_val = chain(Float32.(value.(x))) diff --git a/test/test_predictors.jl b/test/test_predictors.jl index 0657d64..022c365 100644 --- a/test/test_predictors.jl +++ b/test/test_predictors.jl @@ -132,6 +132,28 @@ function test_ReLU_direct() return end +function test_ReLU_bounds() + values = [-2, 0, 2] + for f in ( + MathOptAI.ReLU(), + MathOptAI.ReLUBigM(100.0), + MathOptAI.ReLUQuadratic(), + MathOptAI.ReLUSOS1(), + ) + for lb in values, ub in values + if lb > ub + continue + end + model = Model() + @variable(model, lb <= x <= ub) + y, _ = MathOptAI.add_predictor(model, f, [x]) + @test lower_bound.(y) == [0.0] + @test upper_bound.(y) == [max(0.0, ub)] + end + end + return +end + function test_ReducedSpace_ReLU_direct() model = Model(Ipopt.Optimizer) set_silent(model)