diff --git a/docs/src/manual/nonlinear.md b/docs/src/manual/nonlinear.md index bbe13e82fda..0c87243b58d 100644 --- a/docs/src/manual/nonlinear.md +++ b/docs/src/manual/nonlinear.md @@ -258,18 +258,15 @@ julia> expr = @expression(model, ifelse(x < -1 || x >= 1, x^2, 0.0)) ifelse((x < -1) || (x >= 1), x², 0.0) ``` -As an alternative, use the `JuMP.nonlinear_` functions, which fallback to the +As an alternative, use the `JuMP.op_` functions, which fallback to the various comparison and logical operators: ```jldoctest julia> model = Model(); julia> @variable(model, x); -julia> expr = nonlinear_ifelse( - nonlinear_or( - nonlinear_less_than(x, -1), - nonlinear_greater_equal(x, 1) - ), +julia> expr = op_ifelse( + op_or(op_less_than(x, -1), op_greater_equal(x, 1)), x^2, 0.0, ) @@ -280,14 +277,14 @@ The available functions are: | JuMP function | Julia function | | :-------------------------------- | :------------- | -| [`nonlinear_ifelse`](@ref) | `ifelse` | -| [`nonlinear_and`](@ref) | `&&` | -| [`nonlinear_or`](@ref) | `\|\|` | -| [`nonlinear_greater_than`](@ref) | `>` | -| [`nonlinear_greater_equal`](@ref) | `>=` | -| [`nonlinear_less_than`](@ref) | `<` | -| [`nonlinear_less_equal`](@ref) | `<=` | -| [`nonlinear_equal_to`](@ref) | `==` | +| [`op_ifelse`](@ref) | `ifelse` | +| [`op_and`](@ref) | `&&` | +| [`op_or`](@ref) | `\|\|` | +| [`op_greater_than`](@ref) | `>` | +| [`op_greater_equal`](@ref) | `>=` | +| [`op_less_than`](@ref) | `<` | +| [`op_less_equal`](@ref) | `<=` | +| [`op_equal_to`](@ref) | `==` | ### Fields @@ -327,10 +324,10 @@ using JuMP square(x) = x^2 f(x, y) = (x - 1)^2 + (y - 2)^2 model = Model(); -@register(model, udf_square, 1, square) -@register(model, udf_f, 2, f) +@register(model, op_square, 1, square) +@register(model, op_f, 2, f) @variable(model, x[1:2]); -@objective(model, Min, udf_f(x[1], udf_square(x[2]))) +@objective(model, Min, op_f(x[1], op_square(x[2]))) ``` The arguments to [`@register`](@ref) are: @@ -349,7 +346,7 @@ The arguments to [`@register`](@ref) are: ### Registered functions without macros The [`@register`](@ref) macro is syntactic sugar for the -[`add_user_defined_function`](@ref) method. Thus, the non-macro version of the +[`register_nonlinear_operator`](@ref) method. Thus, the non-macro version of the preceding example is: ```@repl @@ -357,12 +354,12 @@ using JuMP square(x) = x^2 f(x, y) = (x - 1)^2 + (y - 2)^2 model = Model(); -udf_square = add_user_defined_function(model, 1, square; name = :udf_square) -model[:udf_square] = udf_square -udf_f = add_user_defined_function(model, 2, f; name = :udf_f) -model[:udf_f] = udf_f +op_square = register_nonlinear_operator(model, 1, square; name = :op_square) +model[:op_square] = op_square +op_f = register_nonlinear_operator(model, 2, f; name = :op_f) +model[:op_f] = op_f @variable(model, x[1:2]); -@objective(model, Min, udf_f(x[1], udf_square(x[2]))) +@objective(model, Min, op_f(x[1], op_square(x[2]))) ``` This has two important consequences. @@ -374,7 +371,7 @@ julia> @register(model, square, 1, square) ``` will error because it is equivalent to: ```julia -julia> square = add_user_defined_function(model, 1, square; name = :square) +julia> square = register_nonlinear_operator(model, 1, square; name = :square) ERROR: invalid redefinition of constant square Stacktrace: [...] @@ -388,8 +385,8 @@ Second, you can obtain a reference to the user-defined function using the using JuMP square(x) = x^2 model = Model(); -@register(model, udf_square, 1, square) -udf_square_2 = model[:udf_square] +@register(model, op_square, 1, square) +op_square_2 = model[:op_square] ``` ### Invalid redefinition of constant @@ -419,11 +416,11 @@ x² ``` To force JuMP to treat `f` as a user-defined function and not trace it, register -the function using [`add_user_defined_function`](@ref) and define a new method +the function using [`register_nonlinear_operator`](@ref) and define a new method which manually creates a [`NonlinearExpr`](@ref): ```jldoctest nonlinear_invalid_redefinition -julia> _ = add_user_defined_function(model, 1, f; name = :f) -UserDefinedFunction{typeof(f)}(:f, f) +julia> _ = register_nonlinear_operator(model, 1, f; name = :f) +NonlinearOperator(:f, f) julia> f(x::AbstractJuMPScalar) = NonlinearExpr(:f, Any[x]) f (generic function with 2 methods) @@ -451,9 +448,9 @@ f(x) = x^2 ∇f(x) = 2x ∇²f(x) = 2 model = Model(); -@register(model, udf_f, 1, f, ∇f, ∇²f) # Providing ∇²f is optional +@register(model, op_f, 1, f, ∇f, ∇²f) # Providing ∇²f is optional @variable(model, x) -@objective(model, Min, udf_f(x)) +@objective(model, Min, op_f(x)) ``` #### Multivariate functions @@ -511,8 +508,8 @@ using JuMP model = Model(); @variable(model, x[1:5]) f(x::Vector) = sum(x[i]^i for i in 1:length(x)) -@register(model, udf_f, 5, (x...) -> f(collect(x))) -@objective(model, Min, udf_f(x...)) +@register(model, op_f, 5, (x...) -> f(collect(x))) +@objective(model, Min, op_f(x...)) ``` ### Automatic differentiation diff --git a/docs/src/tutorials/applications/power_systems.jl b/docs/src/tutorials/applications/power_systems.jl index 28b75c37d32..ddf11753127 100644 --- a/docs/src/tutorials/applications/power_systems.jl +++ b/docs/src/tutorials/applications/power_systems.jl @@ -513,14 +513,14 @@ function solve_nonlinear_economic_dispatch( if silent set_silent(model) end - @register(model, udf_tcf, 1, thermal_cost_function) + @register(model, op_tcf, 1, thermal_cost_function) N = length(generators) @variable(model, generators[i].min <= g[i = 1:N] <= generators[i].max) @variable(model, 0 <= w <= scenario.wind) @objective( model, Min, - sum(generators[i].variable_cost * udf_tcf(g[i]) for i in 1:N) + + sum(generators[i].variable_cost * op_tcf(g[i]) for i in 1:N) + wind.variable_cost * w, ) @constraint(model, sum(g[i] for i in 1:N) + sqrt(w) == scenario.demand) diff --git a/docs/src/tutorials/nonlinear/nested_problems.jl b/docs/src/tutorials/nonlinear/nested_problems.jl index 421a0b19039..cf20ac1b0a5 100644 --- a/docs/src/tutorials/nonlinear/nested_problems.jl +++ b/docs/src/tutorials/nonlinear/nested_problems.jl @@ -141,8 +141,8 @@ end model = Model(Ipopt.Optimizer) @variable(model, x[1:2] >= 0) -@register(model, udf_V, 2, V, ∇V, ∇²V) -@objective(model, Min, x[1]^2 + x[2]^2 + udf_V(x[1], x[2])) +@register(model, op_V, 2, V, ∇V, ∇²V) +@objective(model, Min, x[1]^2 + x[2]^2 + op_V(x[1], x[2])) optimize!(model) solution_summary(model) @@ -215,13 +215,13 @@ model = Model(Ipopt.Optimizer) cache = Cache(Float64[], NaN, Float64[]) @register( model, - udf_V, + op_V, 2, (x...) -> cached_f(cache, x...), (g, x...) -> cached_∇f(cache, g, x...), (H, x...) -> cached_∇²f(cache, H, x...), ) -@objective(model, Min, x[1]^2 + x[2]^2 + udf_V(x[1], x[2])) +@objective(model, Min, x[1]^2 + x[2]^2 + op_V(x[1], x[2])) optimize!(model) solution_summary(model) diff --git a/docs/src/tutorials/nonlinear/tips_and_tricks.jl b/docs/src/tutorials/nonlinear/tips_and_tricks.jl index a3daf79b270..48e7fe6d30a 100644 --- a/docs/src/tutorials/nonlinear/tips_and_tricks.jl +++ b/docs/src/tutorials/nonlinear/tips_and_tricks.jl @@ -46,10 +46,10 @@ foo_2(x, y) = foo(x, y)[2] model = Model(Ipopt.Optimizer) set_silent(model) @variable(model, x[1:2] >= 0, start = 0.1) -@register(model, udf_foo_1, 2, foo_1) -@register(model, udf_foo_2, 2, foo_2) -@objective(model, Max, udf_foo_1(x[1], x[2])) -@constraint(model, udf_foo_2(x[1], x[2]) <= 2) +@register(model, op_foo_1, 2, foo_1) +@register(model, op_foo_2, 2, foo_2) +@objective(model, Max, op_foo_1(x[1], x[2])) +@constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) Test.@test objective_value(model) ≈ √3 atol = 1e-4 @@ -114,10 +114,10 @@ println("function_calls = ", function_calls) model = Model(Ipopt.Optimizer) set_silent(model) @variable(model, x[1:2] >= 0, start = 0.1) -@register(model, udf_foo_1, 2, memoized_foo[1]) -@register(model, udf_foo_2, 2, memoized_foo[2]) -@objective(model, Max, udf_foo_1(x[1], x[2])) -@constraint(model, udf_foo_2(x[1], x[2]) <= 2) +@register(model, op_foo_1, 2, memoized_foo[1]) +@register(model, op_foo_2, 2, memoized_foo[2]) +@objective(model, Max, op_foo_1(x[1], x[2])) +@constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) Test.@test objective_value(model) ≈ √3 atol = 1e-4 diff --git a/docs/src/tutorials/nonlinear/user_defined_hessians.jl b/docs/src/tutorials/nonlinear/user_defined_hessians.jl index 75aa67ce8dc..9f49f33dc9b 100644 --- a/docs/src/tutorials/nonlinear/user_defined_hessians.jl +++ b/docs/src/tutorials/nonlinear/user_defined_hessians.jl @@ -69,7 +69,7 @@ end model = Model(Ipopt.Optimizer) @variable(model, x[1:2]) -@register(model, udf_rosenbrock, 2, rosenbrock, ∇rosenbrock, ∇²rosenbrock) -@objective(model, Min, udf_rosenbrock(x[1], x[2])) +@register(model, op_rosenbrock, 2, rosenbrock, ∇rosenbrock, ∇²rosenbrock) +@objective(model, Min, op_rosenbrock(x[1], x[2])) optimize!(model) solution_summary(model; verbose = true) diff --git a/src/macros.jl b/src/macros.jl index 92689525c77..69e558f6110 100644 --- a/src/macros.jl +++ b/src/macros.jl @@ -475,10 +475,11 @@ function parse_constraint_head( end """ - nonlinear_ifelse(a, x, y) + op_ifelse(a, x, y) -A function that falls back to `ifelse(a, x, y)`, but when called with JuMP -variables or expressions, returns a [`GenericNonlinearExpr`](@ref). +A function that falls back to `ifelse(a, x, y)`, but when called with a JuMP +variables or expression in the first argument, returns a +[`GenericNonlinearExpr`](@ref). ## Example @@ -487,19 +488,27 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_ifelse(true, 1.0, 2.0) +julia> op_ifelse(true, 1.0, 2.0) 1.0 -julia> nonlinear_ifelse(x, 1.0, 2.0) +julia> op_ifelse(x, 1.0, 2.0) ifelse(x, 1.0, 2.0) + +julia> op_ifelse(true, x, 2.0) +x ``` """ -nonlinear_ifelse(a, x, y) = ifelse(a, x, y) +op_ifelse(a, x, y) = ifelse(a, x, y) + +# We can't make this a generic `NonlinearOperator` because we only want to +# intercept `ifelse` if the first argument is an `AbstractJuMPScalar` (if it's a +# `Bool`, we want to return the correct branch). +op_ifelse(a::AbstractJuMPScalar, x, y) = NonlinearExpr(:ifelse, Any[a, x, y]) """ - nonlinear_and(x, y) + op_and(x, y) -A function that falls back to `x && y`, but when called with JuMP variables or +A function that falls back to `x & y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). ## Example @@ -509,19 +518,21 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_and(true, false) +julia> op_and(true, false) false -julia> nonlinear_and(true, x) +julia> op_and(true, x) true && x ``` """ -nonlinear_and(x, y) = x && y +const op_and = NonlinearOperator(:&&, &) +# Note that the function is `&` instead of `&&` because `&&` is special lowering +# syntax and is not a regular Julia function, but the MOI operator is `:&&`. """ - nonlinear_or(x, y) + op_or(x, y) -A function that falls back to `x || y`, but when called with JuMP variables or +A function that falls back to `x | y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). ## Example @@ -531,17 +542,19 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_or(true, false) +julia> op_or(true, false) true -julia> nonlinear_or(true, x) +julia> op_or(true, x) true || x ``` """ -nonlinear_or(x, y) = x || y +const op_or = NonlinearOperator(:||, |) +# Note that the function is `|` instead of `||` because `||` is special lowering +# syntax and is not a regular Julia function, but the MOI operator is `:||`. """ - nonlinear_less_than(x, y) + op_less_than(x, y) A function that falls back to `x < y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). @@ -553,17 +566,17 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_less_than(1, 2) +julia> op_less_than(1, 2) true -julia> nonlinear_less_than(x, 2) +julia> op_less_than(x, 2) x < 2 ``` """ -nonlinear_less_than(x, y) = x < y +const op_less_than = NonlinearOperator(:<, <) """ - nonlinear_greater_than(x, y) + op_greater_than(x, y) A function that falls back to `x > y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). @@ -575,17 +588,17 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_greater_than(1, 2) +julia> op_greater_than(1, 2) false -julia> nonlinear_greater_than(x, 2) +julia> op_greater_than(x, 2) x > 2 ``` """ -nonlinear_greater_than(x, y) = x > y +const op_greater_than = NonlinearOperator(:>, >) """ - nonlinear_less_equal(x, y) + op_less_equal(x, y) A function that falls back to `x <= y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). @@ -597,17 +610,17 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_less_equal(2, 2) +julia> op_less_equal(2, 2) true -julia> nonlinear_less_equal(x, 2) +julia> op_less_equal(x, 2) x <= 2 ``` """ -nonlinear_less_equal(x, y) = x <= y +const op_less_equal = NonlinearOperator(:<=, <=) """ - nonlinear_greater_equal(x, y) + op_greater_equal(x, y) A function that falls back to `x >= y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). @@ -619,17 +632,17 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_greater_equal(2, 2) +julia> op_greater_equal(2, 2) true -julia> nonlinear_greater_equal(x, 2) +julia> op_greater_equal(x, 2) x >= 2 ``` """ -nonlinear_greater_equal(x, y) = x >= y +const op_greater_equal = NonlinearOperator(:>=, >=) """ - nonlinear_equal_to(x, y) + op_equal_to(x, y) A function that falls back to `x == y`, but when called with JuMP variables or expressions, returns a [`GenericNonlinearExpr`](@ref). @@ -641,40 +654,40 @@ julia> model = Model(); julia> @variable(model, x); -julia> nonlinear_equal_to(2, 2) +julia> op_equal_to(2, 2) true -julia> nonlinear_equal_to(x, 2) +julia> op_equal_to(x, 2) x == 2 ``` """ -nonlinear_equal_to(x, y) = x == y +const op_equal_to = NonlinearOperator(:(==), ==) function _rewrite_to_jump_logic(x) if Meta.isexpr(x, :call) op = if x.args[1] == :ifelse - return Expr(:call, nonlinear_ifelse, x.args[2:end]...) + return Expr(:call, op_ifelse, x.args[2:end]...) elseif x.args[1] == :< - return Expr(:call, nonlinear_less_than, x.args[2:end]...) + return Expr(:call, op_less_than, x.args[2:end]...) elseif x.args[1] == :> - return Expr(:call, nonlinear_greater_than, x.args[2:end]...) + return Expr(:call, op_greater_than, x.args[2:end]...) elseif x.args[1] == :<= - return Expr(:call, nonlinear_less_equal, x.args[2:end]...) + return Expr(:call, op_less_equal, x.args[2:end]...) elseif x.args[1] == :>= - return Expr(:call, nonlinear_greater_equal, x.args[2:end]...) + return Expr(:call, op_greater_equal, x.args[2:end]...) elseif x.args[1] == :(==) - return Expr(:call, nonlinear_equal_to, x.args[2:end]...) + return Expr(:call, op_equal_to, x.args[2:end]...) end elseif Meta.isexpr(x, :||) - return Expr(:call, nonlinear_or, x.args...) + return Expr(:call, op_or, x.args...) elseif Meta.isexpr(x, :&&) - return Expr(:call, nonlinear_and, x.args...) + return Expr(:call, op_and, x.args...) elseif Meta.isexpr(x, :comparison) lhs = Expr(:call, x.args[2], x.args[1], x.args[3]) rhs = Expr(:call, x.args[4], x.args[3], x.args[5]) return Expr( :call, - nonlinear_and, + op_and, _rewrite_to_jump_logic(lhs), _rewrite_to_jump_logic(rhs), ) diff --git a/src/nlp_expr.jl b/src/nlp_expr.jl index 381f9b46123..de80e868297 100644 --- a/src/nlp_expr.jl +++ b/src/nlp_expr.jl @@ -25,8 +25,8 @@ and the default list of supported multivariate operators is given by: * [`MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS`](@ref) -Additional operators can be registered by setting a [`MOI.UserDefinedFunction`](@ref) -attribute. +Additional operators can be registered by setting a +[`MOI.UserDefinedFunction`](@ref) attribute. See the full list of operators supported by a [`MOI.ModelLike`](@ref) by querying [`MOI.ListOfSupportedNonlinearOperators`](@ref). @@ -234,8 +234,8 @@ function MOI.Nonlinear.parse_expression( if arg isa GenericNonlinearExpr _parse_without_recursion_inner(stack, data, expr, arg, parent_node) else - # We can use recursion here, because GenericNonlinearExpr only occur in - # other GenericNonlinearExpr. + # We can use recursion here, because GenericNonlinearExpr only occur + # in other GenericNonlinearExpr. MOI.Nonlinear.parse_expression(data, expr, arg, parent_node) end end @@ -470,33 +470,6 @@ function _needs_flatten(parent::GenericNonlinearExpr, arg::GenericNonlinearExpr) end end -function nonlinear_ifelse(a::AbstractJuMPScalar, x, y) - return GenericNonlinearExpr{variable_ref_type(a)}(:ifelse, Any[a, x, y]) -end - -for (f, op) in ( - :nonlinear_and => :&&, - :nonlinear_or => :||, - :nonlinear_less_than => :(<), - :nonlinear_greater_than => :(>), - :nonlinear_less_equal => :(<=), - :nonlinear_greater_equal => :(>=), - :nonlinear_equal_to => :(==), -) - op = Meta.quot(op) - @eval begin - function $(f)(x::AbstractJuMPScalar, y) - return GenericNonlinearExpr{variable_ref_type(x)}($op, x, y) - end - function $(f)(x, y::AbstractJuMPScalar) - return GenericNonlinearExpr{variable_ref_type(y)}($op, x, y) - end - function $(f)(x::AbstractJuMPScalar, y::AbstractJuMPScalar) - return GenericNonlinearExpr{variable_ref_type(x)}($op, x, y) - end - end -end - # JuMP interop function owner_model(expr::GenericNonlinearExpr) @@ -789,12 +762,19 @@ function _MA.promote_operation( end """ - UserDefinedFunction(head::Symbol, func::Function) + NonlinearOperator(head::Symbol, func::Function) -A struct representing a user-defined function named `head`. +A callable struct (functor) representing a function named `head`. -This function must have already been added to the model using -[`add_user_defined_function`](@ref) or [`@register`](@ref). +When called with [`AbstractJuMPScalar`](@ref)s, the struct returns a +[`GenericNonlinearExpr`](@ref). + +When called with non-JuMP types, the struct returns the evaluation of +`func(args...)`. + +Unless `head` is special-cased by the optimizer, the operator must have already +been added to the model using [`register_nonlinear_operator`](@ref) or +[`@register`](@ref). ## Example @@ -813,25 +793,54 @@ julia> ∇f(x::Float64) = 2 * x julia> ∇²f(x::Float64) = 2.0 ∇²f (generic function with 1 method) -julia> @register(model, udf_f, 1, f, ∇f, ∇²f) -UserDefinedFunction{typeof(f)}(:udf_f, f) +julia> @register(model, op_f, 1, f, ∇f, ∇²f) +NonlinearOperator(:op_f, f) -julia> bar = UserDefinedFunction(:udf_f, f) -UserDefinedFunction{typeof(f)}(:udf_f, f) +julia> bar = NonlinearOperator(:op_f, f) +NonlinearOperator(:op_f, f) julia> @objective(model, Min, bar(x)) -udf_f(x) +op_f(x) julia> bar(2.0) 4.0 ``` """ -struct UserDefinedFunction{F} +struct NonlinearOperator{F} head::Symbol func::F end -function (f::UserDefinedFunction)(args...) +# Make it so that we don't print the complicated type parameter +function Base.show(io::IO, f::NonlinearOperator) + return print(io, "NonlinearOperator(:$(f.head), $(f.func))") +end + +# Fast overload for unary calls + +(f::NonlinearOperator)(x) = f.func(x) + +(f::NonlinearOperator)(x::AbstractJuMPScalar) = NonlinearExpr(f.head, Any[x]) + +# Fast overload for binary calls + +(f::NonlinearOperator)(x, y) = f.func(x, y) + +function (f::NonlinearOperator)(x::AbstractJuMPScalar, y) + return GenericNonlinearExpr(f.head, Any[x, y]) +end + +function (f::NonlinearOperator)(x, y::AbstractJuMPScalar) + return GenericNonlinearExpr(f.head, Any[x, y]) +end + +function (f::NonlinearOperator)(x::AbstractJuMPScalar, y::AbstractJuMPScalar) + return GenericNonlinearExpr(f.head, Any[x, y]) +end + +# Fallback for more arguments +function (f::NonlinearOperator)(x, y, z...) + args = (x, y, z...) if any(Base.Fix2(isa, AbstractJuMPScalar), args) return GenericNonlinearExpr(f.head, Any[a for a in args]) end @@ -839,7 +848,7 @@ function (f::UserDefinedFunction)(args...) end """ - add_user_defined_function( + register_nonlinear_operator( model::Model, dim::Int, f::Function, @@ -848,12 +857,15 @@ end [name::Symbol = Symbol(f),] ) -Add a user-defined function with `dim` input arguments to `model` and associate -it with the operator `name`. +Register a new nonlinear operator with `dim` input arguments to `model` and +associate it with the name `name`. + +The function `f` evaluates the function and must return a scalar. + +The optional function `∇f` evaluates the first derivative, and the optional +function `∇²f` evaluates the second derivative. -The function `f` evaluates the function. The optional function `∇f` evaluates -the first derivative, and the optional function `∇²f` evaluates the second -derivative. `∇²f` may be provided only if `∇f` is also provided. +`∇²f` may be provided only if `∇f` is also provided. ## Example @@ -872,17 +884,17 @@ julia> ∇f(x::Float64) = 2 * x julia> ∇²f(x::Float64) = 2.0 ∇²f (generic function with 1 method) -julia> udf_f = add_user_defined_function(model, 1, f, ∇f, ∇²f) -UserDefinedFunction{typeof(f)}(:f, f) +julia> op_f = register_nonlinear_operator(model, 1, f, ∇f, ∇²f) +NonlinearOperator(:f, f) -julia> @objective(model, Min, udf_f(x)) +julia> @objective(model, Min, op_f(x)) f(x) -julia> udf_f(2.0) +julia> op_f(2.0) 4.0 ``` """ -function add_user_defined_function( +function register_nonlinear_operator( model::GenericModel, dim::Int, f::Function, @@ -892,10 +904,10 @@ function add_user_defined_function( nargs = 1 + N if !(1 <= nargs <= 3) error( - "Unable to register user-defined function $name: invalid number " * - "of functions provided. Got $nargs, but expected 1 (if function " * - "only), 2 (if function and gradient), or 3 (if function, " * - "gradient, and hesssian provided)", + "Unable to register operator $name: invalid number of functions " * + "provided. Got $nargs, but expected 1 (if function only), 2 (if " * + "function and gradient), or 3 (if function, gradient, and " * + "hesssian provided)", ) end # TODO(odow): we could add other checks here, but we won't for now because @@ -903,22 +915,22 @@ function add_user_defined_function( # MOI.Nonlinear will automatically check for autodiff and common mistakes # and throw a nice informative error. MOI.set(model, MOI.UserDefinedFunction(name, dim), tuple(f, args...)) - return UserDefinedFunction(name, f) + return NonlinearOperator(name, f) end -function add_user_defined_function(::GenericModel, ::Int; kwargs...) +function register_nonlinear_operator(::GenericModel, ::Int; kwargs...) return error( - "Unable to register user-defined function because no functions were " * - "provided. Expected 1 (if function only), 2 (if function and " * - "gradient), or 3 (if function, gradient, and hesssian provided)", + "Unable to register operator because no functions were provided. " * + "Expected 1 (if function only), 2 (if function and gradient), or 3 " * + "(if function, gradient, and hesssian provided)", ) end """ - @register(model, operator, dim, args...) + @register(model, operator, dim, f[, ∇f[, ∇²f]]) -Register a user-defined function in `model`, and create a new variable -[`UserDefinedFunction`](@ref) called `operator` in the current scope. +Register the nonlinear operator `operator` in `model`, and create a new +[`NonlinearOperator`](@ref) object called `operator` in the current scope. ## Example @@ -937,27 +949,27 @@ julia> ∇f(x::Float64) = 2 * x julia> ∇²f(x::Float64) = 2.0 ∇²f (generic function with 1 method) -julia> @register(model, udf_f, 1, f, ∇f, ∇²f) -UserDefinedFunction{typeof(f)}(:udf_f, f) +julia> @register(model, op_f, 1, f, ∇f, ∇²f) +NonlinearOperator(:op_f, f) -julia> @objective(model, Min, udf_f(x)) -udf_f(x) +julia> @objective(model, Min, op_f(x)) +op_f(x) -julia> udf_f(2.0) +julia> op_f(2.0) 4.0 -julia> model[:udf_f] -UserDefinedFunction{typeof(f)}(:udf_f, f) +julia> model[:op_f] +NonlinearOperator(:op_f, f) -julia> model[:udf_f](x) -udf_f(x) +julia> model[:op_f](x) +op_f(x) ``` ## Non-macro version This macro is provided as helpful syntax that matches the style of the rest of -the JuMP macros. However, you may also create user-defined functions outside the -macros using [`add_user_defined_function`](@ref). For example: +the JuMP macros. However, you may also register operators outside the macro +using [`register_nonlinear_operator`](@ref). For example: ```jldoctest julia> model = Model(); @@ -965,8 +977,8 @@ julia> model = Model(); julia> f(x) = x^2 f (generic function with 1 method) -julia> @register(model, udf_f, 1, f) -UserDefinedFunction{typeof(f)}(:udf_f, f) +julia> @register(model, op_f, 1, f) +NonlinearOperator(:op_f, f) ``` is equivalent to ```jldoctest @@ -975,14 +987,14 @@ julia> model = Model(); julia> f(x) = x^2 f (generic function with 1 method) -julia> udf_f = model[:udf_f] = add_user_defined_function(model, 1, f; name = :udf_f) -UserDefinedFunction{typeof(f)}(:udf_f, f) +julia> op_f = model[:op_f] = register_nonlinear_operator(model, 1, f; name = :op_f) +NonlinearOperator(:op_f, f) ``` """ macro register(model, op, args...) return _macro_assign_and_return( quote - add_user_defined_function( + register_nonlinear_operator( $(esc(model)), $(esc.(args)...); name = $(Meta.quot(op)), diff --git a/src/variables.jl b/src/variables.jl index c66feaf0642..46e63ff2702 100644 --- a/src/variables.jl +++ b/src/variables.jl @@ -2043,9 +2043,9 @@ There are three common mistakes that lead to this. ```julia foo(x) = x $(sym) 1 ? 0 : 1 - x model = Model() - @register(model, udf_f, 1, foo) + @register(model, op_f, 1, foo) @variable(model, x) - @expression(model, udf_f(x)) + @expression(model, op_f(x)) ``` 3. You tried to create a logical nonlinear expression outside a macro, for diff --git a/test/test_nlp_expr.jl b/test/test_nlp_expr.jl index 2d661ea8637..aa4a2dddc0a 100644 --- a/test/test_nlp_expr.jl +++ b/test/test_nlp_expr.jl @@ -469,6 +469,7 @@ function test_register_univariate() model = Model() @variable(model, x) @register(model, f, 1, x -> x^2) + @test f isa NonlinearOperator @test isequal_canonical(@expression(model, f(x)), f(x)) @test isequal_canonical(f(x), GenericNonlinearExpr(:f, Any[x])) attrs = MOI.get(model, MOI.ListOfModelAttributesSet()) @@ -555,7 +556,7 @@ function test_register_errors() model = Model() @test_throws( ErrorException( - "Unable to register user-defined function because no functions were " * + "Unable to register operator because no functions were " * "provided. Expected 1 (if function only), 2 (if function and " * "gradient), or 3 (if function, gradient, and hesssian provided)", ), @@ -564,7 +565,7 @@ function test_register_errors() f = x -> x^2 @test_throws( ErrorException( - "Unable to register user-defined function foo: invalid number of " * + "Unable to register operator foo: invalid number of " * "functions provided. Got 4, but expected 1 (if function only), " * "2 (if function and gradient), or 3 (if function, gradient, and " * "hesssian provided)", @@ -610,7 +611,7 @@ function test_value_expression() y = QuadExpr(x + 1) @test value(f, my_foo(y)) ≈ (value(f, y) - 1)^2 @test value(f, my_bar(2.2, x)) ≈ sqrt(2.2 - 1.1) - bad_udf = UserDefinedFunction(:bad_udf, f) + bad_udf = NonlinearOperator(:bad_udf, f) @test_throws( ErrorException( "Unable to evaluate nonlinear operator bad_udf because it is not " *