diff --git a/ext/OmeletteLuxExt.jl b/ext/OmeletteLuxExt.jl index 8b52b8b..fc10655 100644 --- a/ext/OmeletteLuxExt.jl +++ b/ext/OmeletteLuxExt.jl @@ -13,7 +13,7 @@ function _add_predictor(predictor::Omelette.Pipeline, layer::Lux.Dense, p) if layer.activation === identity # Do nothing elseif layer.activation === Lux.NNlib.relu - push!(predictor.layers, Omelette.ReLUBigM(layer.out_dims, 1e6)) + push!(predictor.layers, Omelette.ReLUBigM(1e6)) else error("Unsupported activation function: $x") end diff --git a/src/models/LinearLayer.jl b/src/models/LinearLayer.jl deleted file mode 100644 index 2cd2b2c..0000000 --- a/src/models/LinearLayer.jl +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) 2024: Oscar Dowson and contributors -# -# Use of this source code is governed by an MIT-style license that can be found -# in the LICENSE.md file or at https://opensource.org/licenses/MIT. - -""" - LinearRegression(parameters::Matrix) - -Represents the linear relationship: -```math -f(x) = A x -``` -where \$A\$ is the \$m \\times n\$ matrix `parameters`. - -## Example - -```jldoctest -julia> using JuMP, Omelette - -julia> model = Model(); - -julia> @variable(model, x[1:2]); - -julia> f = Omelette.LinearRegression([2.0, 3.0]) -Omelette.LinearRegression([2.0 3.0]) - -julia> y = Omelette.add_predictor(model, f, x) -1-element Vector{VariableRef}: - omelette_y[1] - -julia> print(model) - Feasibility - Subject to - 2 x[1] + 3 x[2] - omelette_y[1] = 0 -``` -""" -struct LinearLayer <: AbstractPredictor - weights::Matrix{Float64} - bias::Vector{Float64} -end - -Base.size(x::LinearLayer) = size(x.weights) - -function _add_predictor_inner( - model::JuMP.Model, - predictor::LinearLayer, - x::Vector{JuMP.VariableRef}, - y::Vector{JuMP.VariableRef}, -) - JuMP.@constraint(model, y .== predictor.weights * x .+ predictor.bias) - return -end diff --git a/src/models/LinearRegression.jl b/src/models/LinearRegression.jl index d03a1a9..8a86179 100644 --- a/src/models/LinearRegression.jl +++ b/src/models/LinearRegression.jl @@ -55,7 +55,7 @@ function add_predictor( predictor::LinearRegression, x::Vector{JuMP.VariableRef}, ) - m = size(predictor.parameters, 1) + m = size(predictor.A, 1) y = JuMP.@variable(model, [1:m], base_name = "omelette_y") JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y) return y diff --git a/src/models/LogisticRegression.jl b/src/models/LogisticRegression.jl index 9e8d86f..b8a53d3 100644 --- a/src/models/LogisticRegression.jl +++ b/src/models/LogisticRegression.jl @@ -42,8 +42,6 @@ function LogisticRegression(parameters::Vector{Float64}) return LogisticRegression(reshape(parameters, 1, length(parameters))) end -Base.size(f::LogisticRegression) = size(f.parameters) - function add_predictor( model::JuMP.Model, predictor::LogisticRegression, diff --git a/src/models/Pipeline.jl b/src/models/Pipeline.jl index d46103b..403a27c 100644 --- a/src/models/Pipeline.jl +++ b/src/models/Pipeline.jl @@ -21,10 +21,10 @@ julia> model = Model(); julia> @variable(model, x[1:2]); julia> f = Omelette.Pipeline([ - Omelette.LinearLayer([1.0 2.0], [0.0]), - Omelette.ReLUQuadratic(1), + Omelette.LinearRegression([1.0 2.0], [0.0]), + Omelette.ReLUQuadratic(), ]) -Omelette.Pipeline(Omelette.AbstractPredictor[Omelette.LinearLayer([1.0 2.0], [0.0]), Omelette.ReLUQuadratic(1)]) +Omelette.Pipeline(Omelette.AbstractPredictor[Omelette.LinearRegression([1.0 2.0], [0.0]), Omelette.ReLUQuadratic()]) julia> y = Omelette.add_predictor(model, f, x) 1-element Vector{VariableRef}: @@ -33,31 +33,24 @@ julia> y = Omelette.add_predictor(model, f, x) julia> print(model) Feasibility Subject to - -x[1] - 2 x[2] + omelette_y[1] = 0 - omelette_y[1] - _z[1]+ + _z[1]- = 0 - _z[1]+*_z[1]- = 0 - _z[1]+ ≥ 0 - _z[1]- ≥ 0 + x[1] + 2 x[2] - omelette_y[1] = 0 + omelette_y[1] - omelette_y[1] + _z[1] = 0 + omelette_y[1]*_z[1] = 0 + omelette_y[1] ≥ 0 + _z[1] ≥ 0 ``` """ struct Pipeline <: AbstractPredictor layers::Vector{AbstractPredictor} end -Base.size(x::Pipeline) = (size(last(x.layers), 1), size(first(x.layers), 2)) - -function _add_predictor_inner( +function add_predictor( model::JuMP.Model, predictor::Pipeline, x::Vector{JuMP.VariableRef}, - y::Vector{JuMP.VariableRef}, ) - for (i, layer) in enumerate(predictor.layers) - if i == length(predictor.layers) - add_predictor!(model, layer, x, y) - else - x = add_predictor(model, layer, x) - end + for layer in predictor.layers + x = add_predictor(model, layer, x) end - return + return x end diff --git a/src/models/ReLU.jl b/src/models/ReLU.jl index 2817f95..6c642fb 100644 --- a/src/models/ReLU.jl +++ b/src/models/ReLU.jl @@ -4,7 +4,7 @@ # in the LICENSE.md file or at https://opensource.org/licenses/MIT. """ - ReLUBigM(dimension::Int, M::Float64) + ReLUBigM(M::Float64) Represents the rectified linear unit relationship: ```math @@ -20,48 +20,42 @@ julia> model = Model(); julia> @variable(model, x[1:2]); -julia> f = Omelette.ReLUBigM(2, 100.0) -Omelette.ReLUBigM(2, 100.0) +julia> f = Omelette.ReLUBigM(100.0) +Omelette.ReLUBigM(100.0) julia> y = Omelette.add_predictor(model, f, x) -2-element Vector{VariableRef}: - omelette_y[1] - omelette_y[2] julia> print(model) Feasibility Subject to - omelette_y[1] ≥ 0 - omelette_y[2] ≥ 0 -x[1] + omelette_y[1] ≥ 0 -x[2] + omelette_y[2] ≥ 0 omelette_y[1] - 100 _[5] ≤ 0 omelette_y[2] - 100 _[6] ≤ 0 -x[1] + omelette_y[1] + 100 _[5] ≤ 100 -x[2] + omelette_y[2] + 100 _[6] ≤ 100 + omelette_y[1] ≥ 0 + omelette_y[2] ≥ 0 _[5] binary _[6] binary ``` """ struct ReLUBigM <: AbstractPredictor - dimension::Int M::Float64 end -Base.size(x::ReLUBigM) = (x.dimension, x.dimension) - -function _add_predictor_inner( +function add_predictor( model::JuMP.Model, predictor::ReLUBigM, x::Vector{JuMP.VariableRef}, - y::Vector{JuMP.VariableRef}, ) - z = JuMP.@variable(model, [1:length(x)], Bin) - JuMP.@constraint(model, y .>= 0) + m = length(x) + y = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "omelette_y") + z = JuMP.@variable(model, [1:m], Bin) JuMP.@constraint(model, y .>= x) JuMP.@constraint(model, y .<= predictor.M * z) JuMP.@constraint(model, y .<= x .+ predictor.M * (1 .- z)) - return + return y end """ @@ -70,8 +64,9 @@ end Implements the ReLU constraint \$y = max(0, x)\$ by the reformulation: ```math \\begin{aligned} -x = x^+ - x^- \\\\ -[x^+ , x^-] \\in SOS1 +x = y - z \\\\ +[y, z] \\in SOS1 \\\\ +y, z \\ge 0 \\end{aligned} ``` @@ -84,8 +79,8 @@ julia> model = Model(); julia> @variable(model, x[1:2]); -julia> f = Omelette.ReLUSOS1(2) -Omelette.ReLUSOS1(2) +julia> f = Omelette.ReLUSOS1() +Omelette.ReLUSOS1() julia> y = Omelette.add_predictor(model, f, x) 2-element Vector{VariableRef}: @@ -95,35 +90,31 @@ julia> y = Omelette.add_predictor(model, f, x) julia> print(model) Feasibility Subject to - x[1] - omelette_y[1] + _[5] = 0 - x[2] - omelette_y[2] + _[6] = 0 + x[1] - omelette_y[1] + _z[1] = 0 + x[2] - omelette_y[2] + _z[2] = 0 + [omelette_y[1], _z[1]] ∈ MathOptInterface.SOS1{Float64}([1.0, 2.0]) + [omelette_y[2], _z[2]] ∈ MathOptInterface.SOS1{Float64}([1.0, 2.0]) omelette_y[1] ≥ 0 omelette_y[2] ≥ 0 - [omelette_y[1], _[5]] ∈ MathOptInterface.SOS1{Float64}([1.0, 2.0]) - [omelette_y[2], _[6]] ∈ MathOptInterface.SOS1{Float64}([1.0, 2.0]) - _[5] ≥ 0 - _[6] ≥ 0 + _z[1] ≥ 0 + _z[2] ≥ 0 ``` """ -struct ReLUSOS1 <: AbstractPredictor - dimension::Int -end - -Base.size(x::ReLUSOS1) = (x.dimension, x.dimension) +struct ReLUSOS1 <: AbstractPredictor end -function _add_predictor_inner( +function add_predictor( model::JuMP.Model, predictor::ReLUSOS1, x::Vector{JuMP.VariableRef}, - y::Vector{JuMP.VariableRef}, ) - for i in 1:length(x) - z = JuMP.@variable(model, lower_bound = 0) - JuMP.@constraint(model, y[i] >= 0) - JuMP.@constraint(model, x[i] == y[i] - z) - JuMP.@constraint(model, [y[i], z] in MOI.SOS1([1.0, 2.0])) + m = length(x) + y = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "omelette_y") + z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "_z") + JuMP.@constraint(model, x .== y - z) + for i in 1:m + JuMP.@constraint(model, [y[i], z[i]] in MOI.SOS1([1.0, 2.0])) end - return + return y end """ @@ -141,14 +132,12 @@ y, z \\ge 0 ## Example ```jldoctest -julia> using JuMP, Omelette - julia> model = Model(); julia> @variable(model, x[1:2]); -julia> f = Omelette.ReLUQuadratic(2) -Omelette.ReLUQuadratic(2) +julia> f = Omelette.ReLUQuadratic() +Omelette.ReLUQuadratic() julia> y = Omelette.add_predictor(model, f, x) 2-element Vector{VariableRef}: @@ -160,31 +149,25 @@ Feasibility Subject to x[1] - omelette_y[1] + _z[1] = 0 x[2] - omelette_y[2] + _z[2] = 0 - omelette_y[1] ≥ 0 - omelette_y[2] ≥ 0 omelette_y[1]*_z[1] = 0 omelette_y[2]*_z[2] = 0 + omelette_y[1] ≥ 0 + omelette_y[2] ≥ 0 _z[1] ≥ 0 _z[2] ≥ 0 ``` """ -struct ReLUQuadratic <: AbstractPredictor - dimension::Int -end - -Base.size(x::ReLUQuadratic) = (x.dimension, x.dimension) +struct ReLUQuadratic <: AbstractPredictor end -function _add_predictor_inner( +function add_predictor( model::JuMP.Model, predictor::ReLUQuadratic, x::Vector{JuMP.VariableRef}, - y::Vector{JuMP.VariableRef}, ) - for i in 1:length(x) - z = JuMP.@variable(model, lower_bound = 0, base_name = "_z[$i]") - JuMP.@constraint(model, y[i] >= 0) - JuMP.@constraint(model, x[i] == y[i] - z) - JuMP.@constraint(model, y[i] * z == 0) - end - return + m = length(x) + y = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "omelette_y") + z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "_z") + JuMP.@constraint(model, x .== y - z) + JuMP.@constraint(model, y .* z .== 0) + return y end diff --git a/test/test_ReLU.jl b/test/test_ReLU.jl index d38f542..1622e67 100644 --- a/test/test_ReLU.jl +++ b/test/test_ReLU.jl @@ -25,13 +25,12 @@ function test_ReLU_BigM() model = Model(HiGHS.Optimizer) set_silent(model) @variable(model, x[1:2]) - f = Omelette.ReLUBigM(2, 100.0) - @test size(f) == (2, 2) + f = Omelette.ReLUBigM(100.0) y = Omelette.add_predictor(model, f, x) @test length(y) == 2 @test num_variables(model) == 6 @test num_constraints(model, AffExpr, MOI.LessThan{Float64}) == 4 - @test num_constraints(model, AffExpr, MOI.GreaterThan{Float64}) == 4 + @test num_constraints(model, AffExpr, MOI.GreaterThan{Float64}) == 2 @objective(model, Min, sum(y)) fix.(x, [-1, 2]) optimize!(model) @@ -43,8 +42,7 @@ end function test_ReLU_SOS1() model = Model() @variable(model, x[1:2]) - f = Omelette.ReLUSOS1(2) - @test size(f) == (2, 2) + f = Omelette.ReLUSOS1() y = Omelette.add_predictor(model, f, x) @test length(y) == 2 @test num_variables(model) == 6 @@ -57,8 +55,7 @@ function test_ReLU_Quadratic() model = Model(Ipopt.Optimizer) set_silent(model) @variable(model, x[1:2]) - f = Omelette.ReLUQuadratic(2) - @test size(f) == (2, 2) + f = Omelette.ReLUQuadratic() y = Omelette.add_predictor(model, f, x) @test length(y) == 2 @test num_variables(model) == 6