diff --git a/src/nlp_expr.jl b/src/nlp_expr.jl index 7d3f87c141f..e77e9cd09da 100644 --- a/src/nlp_expr.jl +++ b/src/nlp_expr.jl @@ -383,6 +383,11 @@ for f in (:+, :-, :*, :^, :/, :atan, :min, :max) end end +# Base has unary methods `min(x::Real) = x` and `max(x::Real) = x`, so I guess +# we need to replicate them. +Base.min(x::AbstractJuMPScalar) = x +Base.max(x::AbstractJuMPScalar) = x + function _MA.operate!!( ::typeof(_MA.add_mul), x::GenericNonlinearExpr, diff --git a/test/test_nlp_expr.jl b/test/test_nlp_expr.jl index 1c28c9ed6d0..e07dc1ed036 100644 --- a/test/test_nlp_expr.jl +++ b/test/test_nlp_expr.jl @@ -954,6 +954,9 @@ function test_operator_min() @variable(model, x) @test isequal_canonical(min(x, 1), NonlinearExpr(:min, Any[x, 1.0])) @test isequal_canonical(min(1, x, x^2), min(min(1.0, x), x^2)) + for f in (x, 1.0 * x + 2.0, x^2, sin(x)) + @test min(f) === f + end return end @@ -962,6 +965,9 @@ function test_operator_max() @variable(model, x) @test isequal_canonical(max(x, 1), NonlinearExpr(:max, Any[x, 1.0])) @test isequal_canonical(max(1, x, x^2), max(max(1.0, x), x^2)) + for f in (x, 1.0 * x + 2.0, x^2, sin(x)) + @test max(f) === f + end return end