From f8e78d46c183953d7b92986b69e302326123fe4f Mon Sep 17 00:00:00 2001 From: odow Date: Tue, 6 Feb 2024 10:28:30 +1300 Subject: [PATCH 01/19] [docs] check termination and primal status after optimize! --- .../tutorials/getting_started/debugging.jl | 1 + .../design_patterns_for_larger_models.jl | 18 ++++++++ .../getting_started_with_JuMP.jl | 8 ++++ .../getting_started_with_data_and_plotting.jl | 5 +++ docs/src/tutorials/linear/callbacks.jl | 42 +++++++++---------- docs/src/tutorials/linear/cannery.jl | 15 ++++--- .../linear/constraint_programming.jl | 16 +++++++ docs/src/tutorials/linear/diet.jl | 9 ++-- .../src/tutorials/linear/facility_location.jl | 4 ++ docs/src/tutorials/linear/factory_schedule.jl | 2 + docs/src/tutorials/linear/finance.jl | 7 ++-- .../tutorials/linear/geographic_clustering.jl | 2 + docs/src/tutorials/linear/knapsack.jl | 2 + docs/src/tutorials/linear/lp_sensitivity.jl | 2 + docs/src/tutorials/linear/mip_duality.jl | 8 ++++ docs/src/tutorials/linear/multi.jl | 6 +-- .../linear/multi_commodity_network.jl | 6 +-- .../linear/multi_objective_examples.jl | 6 +++ .../linear/multi_objective_knapsack.jl | 5 +++ docs/src/tutorials/linear/n-queens.jl | 2 + docs/src/tutorials/linear/network_flows.jl | 6 +++ docs/src/tutorials/linear/piecewise_linear.jl | 10 +++++ docs/src/tutorials/linear/sudoku.jl | 4 ++ docs/src/tutorials/linear/transp.jl | 2 + 24 files changed, 146 insertions(+), 42 deletions(-) diff --git a/docs/src/tutorials/getting_started/debugging.jl b/docs/src/tutorials/getting_started/debugging.jl index a86a89d1d3f..659810c622c 100644 --- a/docs/src/tutorials/getting_started/debugging.jl +++ b/docs/src/tutorials/getting_started/debugging.jl @@ -346,6 +346,7 @@ set_silent(model) # for variables with large positive or negative values in the optimal solution. optimize!(model) +@assert termination_status(model) == OPTIMAL for var in all_variables(model) if var == objective continue diff --git a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl index ea9530beccb..3de330b0a39 100644 --- a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl +++ b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl @@ -55,6 +55,8 @@ model = Model(HiGHS.Optimizer) @objective(model, Max, sum(profit[i] * x[i] for i in 1:N)) @constraint(model, sum(weight[i] * x[i] for i in 1:N) <= capacity) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value.(x) # The benefits of this approach are: @@ -87,6 +89,8 @@ function solve_knapsack_1(profit::Vector, weight::Vector, capacity::Real) @objective(model, Max, sum(profit[i] * x[i] for i in 1:N)) @constraint(model, sum(weight[i] * x[i] for i in 1:N) <= capacity) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value.(x) end @@ -159,6 +163,8 @@ function solve_knapsack_2(data::KnapsackData) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value.(x) end @@ -230,6 +236,8 @@ function solve_knapsack_3(data::KnapsackData; binary_knapsack::Bool) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value.(x) end @@ -272,6 +280,8 @@ function solve_knapsack_4(data::KnapsackData, config::AbstractConfiguration) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value.(x) end @@ -359,6 +369,8 @@ function solve_knapsack_5(data::KnapsackData, config::AbstractConfiguration) add_knapsack_constraints(model, data, config) add_knapsack_objective(model, data, config) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value.(model[:x]) end @@ -387,6 +399,9 @@ function solve_knapsack_6( if termination_status(model) != OPTIMAL @warn("Model not solved to optimality") return nothing + elseif primal_status(model) != FEASIBLE_POINT + @warn("No feasible point to return") + return nothing end return value.(model[:x]) end @@ -522,6 +537,9 @@ function _solve_knapsack( if JuMP.termination_status(model) != JuMP.OPTIMAL @warn("Model not solved to optimality") return nothing + elseif JuMP.primal_status(model) != JuMP.FEASIBLE_POINT + @warn("No feasible point to return") + return nothing end return JuMP.value.(model[:x]) end diff --git a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl index 8d04bfebab1..bd0dbcdcae9 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl @@ -198,6 +198,12 @@ dual_status(model) # Now we know that our solver found an optimal solution, and that it has a # primal and a dual solution to query. +# It is good practice in your code to include statements like this so that an +# error will be thrown if the solver did not find an optimal solution: + +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT + # Query the objective value using [`objective_value`](@ref): objective_value(model) @@ -506,4 +512,6 @@ c = [1, 3, 5, 2] @constraint(vector_model, A * x .== b) @objective(vector_model, Min, c' * x) optimize!(vector_model) +@assert termination_status(vector_model) == OPTIMAL +@assert primal_status(vector_model) == FEASIBLE_POINT objective_value(vector_model) diff --git a/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl b/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl index c977d57c5b9..fb543ea8844 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl @@ -364,6 +364,11 @@ optimize!(model) solution_summary(model) +# Just to be sure, check that the solver found an optimal solution: + +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT + # ### Solution # Let's have a look at the solution in more detail: diff --git a/docs/src/tutorials/linear/callbacks.jl b/docs/src/tutorials/linear/callbacks.jl index f4517c8f374..d73fd111b8d 100644 --- a/docs/src/tutorials/linear/callbacks.jl +++ b/docs/src/tutorials/linear/callbacks.jl @@ -13,7 +13,7 @@ using JuMP import GLPK import Random -import Test #src +import Test # !!! info # This tutorial uses the [MathOptInterface](@ref moi_documentation) API. @@ -60,11 +60,11 @@ function example_lazy_constraint() end set_attribute(model, MOI.LazyConstraintCallback(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL #src - Test.@test primal_status(model) == FEASIBLE_POINT #src - Test.@test lazy_called #src - Test.@test value(x) == 1 #src - Test.@test value(y) == 2 #src + Test.@test termination_status(model) == OPTIMAL + Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test lazy_called + Test.@test value(x) == 1 + Test.@test value(y) == 2 println("Optimal solution (x, y) = ($(value(x)), $(value(y)))") return end @@ -100,9 +100,9 @@ function example_user_cut_constraint() end set_attribute(model, MOI.UserCutCallback(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL #src - Test.@test primal_status(model) == FEASIBLE_POINT #src - Test.@test callback_called #src + Test.@test termination_status(model) == OPTIMAL + Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test callback_called @show callback_called return end @@ -128,16 +128,16 @@ function example_heuristic_solution() ret = MOI.submit(model, MOI.HeuristicSolution(cb_data), x, floor.(x_vals)) println("Heuristic solution status = $(ret)") - Test.@test ret in ( #src - MOI.HEURISTIC_SOLUTION_ACCEPTED, #src - MOI.HEURISTIC_SOLUTION_REJECTED, #src - ) #src + Test.@test ret in ( + MOI.HEURISTIC_SOLUTION_ACCEPTED, + MOI.HEURISTIC_SOLUTION_REJECTED, + ) end set_attribute(model, MOI.HeuristicCallback(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL #src - Test.@test primal_status(model) == FEASIBLE_POINT #src - Test.@test callback_called #src + Test.@test termination_status(model) == OPTIMAL + Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test callback_called return end @@ -174,11 +174,11 @@ function example_solver_dependent_callback() end set_attribute(model, GLPK.CallbackFunction(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL #src - Test.@test primal_status(model) == FEASIBLE_POINT #src - Test.@test lazy_called #src - Test.@test value(x) == 1 #src - Test.@test value(y) == 2 #src + Test.@test termination_status(model) == OPTIMAL + Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test lazy_called + Test.@test value(x) == 1 + Test.@test value(y) == 2 return end diff --git a/docs/src/tutorials/linear/cannery.jl b/docs/src/tutorials/linear/cannery.jl index aaf3c3b344e..fd0993e9390 100644 --- a/docs/src/tutorials/linear/cannery.jl +++ b/docs/src/tutorials/linear/cannery.jl @@ -21,19 +21,19 @@ using JuMP import HiGHS import JSON -import Test #src +import Test # ## Formulation -# The cannery problem assumes we are optimizing the shipment of cases of +# The cannery problem assumes we are optimizing the shipment of cases of # cans from production plants ``p \in P`` to markets ``m \in M``. # Each production plant ``p`` has a capacity ``c_p``, and each market ``m`` -# has a demand ``d_m``. The shipping cost per case of cans from plant ``p`` +# has a demand ``d_m``. The shipping cost per case of cans from plant ``p`` # to market ``m`` is ``d_{p,m}``. # We wish to find the distribution plan ``x_{p,m}``, the number of cases of cans -# to ship from plant ``p`` to market ``m``, for ``p \in P`` and ``m \in M`` +# to ship from plant ``p`` to market ``m``, for ``p \in P`` and ``m \in M`` # that minimizes the shipping costs. We can formulate our problem as the # following linear program: # ```math @@ -121,10 +121,9 @@ solution_summary(model) # What's the optimal shipment? +Test.@test termination_status(model) == OPTIMAL +Test.@test primal_status(model) == FEASIBLE_POINT +Test.@test objective_value(model) == 1_680.0 #src for p in P, m in M println(p, " => ", m, ": ", value(x[p, m])) end - -Test.@test termination_status(model) == OPTIMAL #src -Test.@test primal_status(model) == FEASIBLE_POINT #src -Test.@test objective_value(model) == 1_680.0 #src diff --git a/docs/src/tutorials/linear/constraint_programming.jl b/docs/src/tutorials/linear/constraint_programming.jl index ed47265c03f..5039c56ce12 100644 --- a/docs/src/tutorials/linear/constraint_programming.jl +++ b/docs/src/tutorials/linear/constraint_programming.jl @@ -29,6 +29,8 @@ set_silent(model) @variable(model, 1 <= x[1:4] <= 4, Int) @constraint(model, x in MOI.AllDifferent(4)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value.(x) # ## BinPacking @@ -44,6 +46,8 @@ set_silent(model) @variable(model, 1 <= x[1:length(weights)] <= number_of_bins, Int) @constraint(model, x in MOI.BinPacking(capacity, weights)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value.(x) # Here, the value of `x[i]` is the bin that item `i` was placed into. @@ -59,6 +63,8 @@ set_silent(model) @variable(model, x[1:4], Int) @constraint(model, x in MOI.Circuit(4)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT # Let's see what tour was found, starting at node number `1`: y = round.(Int, value.(x)) @@ -112,6 +118,8 @@ n = 1 # Let's check that we found a valid solution: optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value.(x) # ## CountBelongs @@ -130,6 +138,8 @@ set_silent(model) set = Set([2, 3]) @constraint(model, [n; x] in MOI.CountBelongs(1 + length(x), set)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value(n), value.(x) # ## CountDistinct @@ -144,6 +154,8 @@ set_silent(model) @objective(model, Max, sum(x)) @constraint(model, [n; x] in MOI.CountDistinct(1 + length(x))) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value(n), value.(x) # ## CountGreaterThan @@ -163,6 +175,8 @@ set_silent(model) @objective(model, Max, sum(x)) @constraint(model, [n; y; x] in MOI.CountGreaterThan(1 + 1 + length(x))) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value(n), value(y), value.(x) # Here `n` is strictly greater than the count, and there is no limit on how @@ -187,4 +201,6 @@ set_silent(model) @variable(model, x[i = 1:3], Int) @constraint(model, x in MOI.Table(table)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT value.(x) diff --git a/docs/src/tutorials/linear/diet.jl b/docs/src/tutorials/linear/diet.jl index 6e9ef81915a..3789e9a0de0 100644 --- a/docs/src/tutorials/linear/diet.jl +++ b/docs/src/tutorials/linear/diet.jl @@ -16,7 +16,7 @@ using JuMP import CSV import DataFrames import HiGHS -import Test #hide +import Test # ## Formulation @@ -145,7 +145,8 @@ print(model) # Let's optimize and take a look at the solution: optimize!(model) -Test.@test primal_status(model) == FEASIBLE_POINT #hide +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT Test.@test objective_value(model) ≈ 11.8288 atol = 1e-4 #hide solution_summary(model) @@ -178,8 +179,8 @@ dairy_foods = ["milk", "ice cream"] is_dairy = map(name -> name in dairy_foods, foods.name) dairy_constraint = @constraint(model, sum(foods[is_dairy, :x]) <= 6) optimize!(model) -Test.@test termination_status(model) == INFEASIBLE #hide -Test.@test primal_status(model) == NO_SOLUTION #hide +Test.@test termination_status(model) == INFEASIBLE +Test.@test primal_status(model) == NO_SOLUTION solution_summary(model) # There exists no feasible solution to our problem. Looks like we're stuck diff --git a/docs/src/tutorials/linear/facility_location.jl b/docs/src/tutorials/linear/facility_location.jl index b435f45f0fc..495229d8b9e 100644 --- a/docs/src/tutorials/linear/facility_location.jl +++ b/docs/src/tutorials/linear/facility_location.jl @@ -130,6 +130,8 @@ set_silent(model) # Solve the uncapacitated facility location problem with HiGHS optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT println("Optimal value: ", objective_value(model)) # ### Visualizing the solution @@ -256,6 +258,8 @@ set_silent(model) # Solve the problem optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT println("Optimal value: ", objective_value(model)) # ### Visualizing the solution diff --git a/docs/src/tutorials/linear/factory_schedule.jl b/docs/src/tutorials/linear/factory_schedule.jl index f034a93532b..d94d5c9f582 100644 --- a/docs/src/tutorials/linear/factory_schedule.jl +++ b/docs/src/tutorials/linear/factory_schedule.jl @@ -186,6 +186,8 @@ function solve_factory_scheduling( ) ) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT schedules = Dict{Symbol,Vector{Float64}}( Symbol(f) => value.(production[:, f]) for f in factories ) diff --git a/docs/src/tutorials/linear/finance.jl b/docs/src/tutorials/linear/finance.jl index 2e376dfd308..1e717d95e26 100644 --- a/docs/src/tutorials/linear/finance.jl +++ b/docs/src/tutorials/linear/finance.jl @@ -92,7 +92,8 @@ end) ) optimize!(financing) - +@assert termination_status(financing) == OPTIMAL +@assert primal_status(financing) == FEASIBLE_POINT objective_value(financing) # ## Combinatorial auctions @@ -136,9 +137,9 @@ auction = Model(HiGHS.Optimizer) for i in 1:6 @constraint(auction, sum(y[j] for j in 1:6 if i in bid_items[j]) <= 1) end - optimize!(auction) - +@assert termination_status(auction) == OPTIMAL +@assert primal_status(auction) == FEASIBLE_POINT objective_value(auction) #- diff --git a/docs/src/tutorials/linear/geographic_clustering.jl b/docs/src/tutorials/linear/geographic_clustering.jl index 448ae2e53df..0ea500bfaba 100644 --- a/docs/src/tutorials/linear/geographic_clustering.jl +++ b/docs/src/tutorials/linear/geographic_clustering.jl @@ -151,6 +151,8 @@ end # We can then call `optimize!` and review the results. optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT # ### Reviewing the Results diff --git a/docs/src/tutorials/linear/knapsack.jl b/docs/src/tutorials/linear/knapsack.jl index 6e85075a29a..e7eca7ce265 100644 --- a/docs/src/tutorials/linear/knapsack.jl +++ b/docs/src/tutorials/linear/knapsack.jl @@ -96,6 +96,8 @@ print(model) # We can now solve the optimization problem and inspect the results. optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # The items chosen are diff --git a/docs/src/tutorials/linear/lp_sensitivity.jl b/docs/src/tutorials/linear/lp_sensitivity.jl index 5d9e3deecf1..cdd545a5897 100644 --- a/docs/src/tutorials/linear/lp_sensitivity.jl +++ b/docs/src/tutorials/linear/lp_sensitivity.jl @@ -39,6 +39,8 @@ model = Model(HiGHS.Optimizer) @constraint(model, c2, 7x + 12y >= 120) @constraint(model, c3, x + y <= 20) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model; verbose = true) # Can you identify: diff --git a/docs/src/tutorials/linear/mip_duality.jl b/docs/src/tutorials/linear/mip_duality.jl index cb03cacf41b..3c0e5dee0ea 100644 --- a/docs/src/tutorials/linear/mip_duality.jl +++ b/docs/src/tutorials/linear/mip_duality.jl @@ -58,6 +58,8 @@ print(model) # If we optimize this model, we obtain a [`dual_status`](@ref) of [`NO_SOLUTION`](@ref): optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT dual_status(model) # This is because HiGHS cannot compute the duals of a mixed-integer program. We @@ -73,6 +75,8 @@ print(model) # dual: optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT dual_status(model) # and a marginal price of electricity of \$100/MWh: @@ -94,6 +98,8 @@ print(model) # the [`fix_discrete_variables`](@ref) function: optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT dual_status(model) #- @@ -113,6 +119,8 @@ print(model) #- optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT dual_status(model) #- diff --git a/docs/src/tutorials/linear/multi.jl b/docs/src/tutorials/linear/multi.jl index 91cef976613..9cd6d8fcfa3 100644 --- a/docs/src/tutorials/linear/multi.jl +++ b/docs/src/tutorials/linear/multi.jl @@ -24,7 +24,7 @@ import DataFrames import HiGHS import SQLite import Tables -import Test #src +import Test const DBInterface = SQLite.DBInterface @@ -177,8 +177,8 @@ end # Finally, we can optimize the model: optimize!(model) -Test.@test termination_status(model) == OPTIMAL #src -Test.@test primal_status(model) == FEASIBLE_POINT #src +Test.@test termination_status(model) == OPTIMAL +Test.@test primal_status(model) == FEASIBLE_POINT Test.@test objective_value(model) == 225_700.0 #src solution_summary(model) diff --git a/docs/src/tutorials/linear/multi_commodity_network.jl b/docs/src/tutorials/linear/multi_commodity_network.jl index 3f6f234a76b..5623d94107a 100644 --- a/docs/src/tutorials/linear/multi_commodity_network.jl +++ b/docs/src/tutorials/linear/multi_commodity_network.jl @@ -20,7 +20,7 @@ import DataFrames import HiGHS import SQLite import SQLite.DBInterface -import Test #src +import Test # ## Formulation @@ -201,8 +201,8 @@ df = DataFrames.leftjoin( # Finally, we can optimize the model: optimize!(model) -Test.@test termination_status(model) == OPTIMAL #src -Test.@test primal_status(model) == FEASIBLE_POINT #src +Test.@test termination_status(model) == OPTIMAL +Test.@test primal_status(model) == FEASIBLE_POINT solution_summary(model) # update the solution in the DataFrames: diff --git a/docs/src/tutorials/linear/multi_objective_examples.jl b/docs/src/tutorials/linear/multi_objective_examples.jl index b4d9c6fa170..87e4fdc412f 100644 --- a/docs/src/tutorials/linear/multi_objective_examples.jl +++ b/docs/src/tutorials/linear/multi_objective_examples.jl @@ -32,11 +32,13 @@ set_silent(model) set_optimizer(model, () -> MOA.Optimizer(HiGHS.Optimizer)) set_attribute(model, MOA.Algorithm(), MOA.Lexicographic()) optimize!(model) +@assert termination_status(model) == OPTIMAL solution_summary(model) #- for i in 1:result_count(model) + @assert primal_status(model; result = i) == FEASIBLE_POINT print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") println("x = ", value.([x1, x2]; result = i)) end @@ -60,11 +62,13 @@ set_silent(model) set_optimizer(model, () -> MOA.Optimizer(HiGHS.Optimizer)) set_attribute(model, MOA.Algorithm(), MOA.EpsilonConstraint()) optimize!(model) +@assert termination_status(model) == OPTIMAL solution_summary(model) #- for i in 1:result_count(model) + @assert primal_status(model; result = i) == FEASIBLE_POINT print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") println("x = ", round.(Int, value.(x; result = i))) end @@ -104,11 +108,13 @@ set_silent(model) set_optimizer(model, () -> MOA.Optimizer(HiGHS.Optimizer)) set_attribute(model, MOA.Algorithm(), MOA.EpsilonConstraint()) optimize!(model) +@assert termination_status(model) == OPTIMAL solution_summary(model) #- for i in 1:result_count(model) + @assert primal_status(model; result = i) == FEASIBLE_POINT print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") X = round.(Int, value.(x; result = i)) print("Path:") diff --git a/docs/src/tutorials/linear/multi_objective_knapsack.jl b/docs/src/tutorials/linear/multi_objective_knapsack.jl index e8f81f92528..5235b114fdb 100644 --- a/docs/src/tutorials/linear/multi_objective_knapsack.jl +++ b/docs/src/tutorials/linear/multi_objective_knapsack.jl @@ -121,6 +121,7 @@ set_attribute(model, MOA.Algorithm(), MOA.EpsilonConstraint()) # Let's solve the problem and see the solution optimize!(model) +@assert termination_status(model) == OPTIMAL solution_summary(model) # There are 9 solutions available. We can also use [`result_count`](@ref) to see @@ -137,6 +138,10 @@ solution_summary(model; result = 5) #- +@assert primal_status(model; result = 5) == FEASIBLE_POINT + +#- + objective_value(model; result = 5) # Note that because we set a vector of two objective functions, the objective diff --git a/docs/src/tutorials/linear/n-queens.jl b/docs/src/tutorials/linear/n-queens.jl index 36fba85372f..44e795a38e6 100644 --- a/docs/src/tutorials/linear/n-queens.jl +++ b/docs/src/tutorials/linear/n-queens.jl @@ -66,6 +66,8 @@ end # a feasible solution: optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT # We can now review the solution that our model found: diff --git a/docs/src/tutorials/linear/network_flows.jl b/docs/src/tutorials/linear/network_flows.jl index b87030a028e..12c87e44c14 100644 --- a/docs/src/tutorials/linear/network_flows.jl +++ b/docs/src/tutorials/linear/network_flows.jl @@ -79,6 +79,8 @@ set_silent(shortest_path) @constraint(shortest_path, [i = 1:n], sum(x[i, :]) - sum(x[:, i]) == b[i],) @objective(shortest_path, Min, sum(G .* x)) optimize!(shortest_path) +@assert termination_status(shortest_path) == OPTIMAL +@assert primal_status(shortest_path) == FEASIBLE_POINT objective_value(shortest_path) #- value.(x) @@ -123,6 +125,8 @@ set_silent(assignment) @constraint(assignment, [j = 1:n], sum(y[j, :]) == 1) @objective(assignment, Max, sum(G .* y)) optimize!(assignment) +@assert termination_status(assignment) == OPTIMAL +@assert primal_status(assignment) == FEASIBLE_POINT objective_value(assignment) #- value.(y) @@ -163,6 +167,8 @@ max_flow = Model(HiGHS.Optimizer) @constraint(max_flow, [i = 1:n; i != 1 && i != 8], sum(f[i, :]) == sum(f[:, i])) @objective(max_flow, Max, sum(f[1, :])) optimize!(max_flow) +@assert termination_status(max_flow) == OPTIMAL +@assert primal_status(max_flow) == FEASIBLE_POINT objective_value(max_flow) #- value.(f) diff --git a/docs/src/tutorials/linear/piecewise_linear.jl b/docs/src/tutorials/linear/piecewise_linear.jl index 493604437c0..a6fbfdb05fa 100644 --- a/docs/src/tutorials/linear/piecewise_linear.jl +++ b/docs/src/tutorials/linear/piecewise_linear.jl @@ -52,6 +52,8 @@ function outer_approximate_x_squared(x̄) @objective(model, Min, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value(y) end @@ -102,6 +104,8 @@ function outer_approximate_log(x̄) @objective(model, Max, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value(y) end @@ -167,6 +171,8 @@ function inner_approximate_x_squared(x̄) @objective(model, Min, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value(y) end @@ -209,6 +215,8 @@ function inner_approximate_log(x̄) @objective(model, Max, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value(y) end @@ -262,6 +270,8 @@ function piecewise_linear_sin(x̄) end) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return value(y) end diff --git a/docs/src/tutorials/linear/sudoku.jl b/docs/src/tutorials/linear/sudoku.jl index 8e2c207188c..27b3af247ea 100644 --- a/docs/src/tutorials/linear/sudoku.jl +++ b/docs/src/tutorials/linear/sudoku.jl @@ -134,6 +134,8 @@ end # solve problem optimize!(sudoku) +@assert termination_status(sudoku) == OPTIMAL +@assert primal_status(sudoku) == FEASIBLE_POINT # Extract the values of x x_val = value.(x); @@ -202,6 +204,8 @@ for i in 1:9, j in 1:9 end optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT # Display the solution diff --git a/docs/src/tutorials/linear/transp.jl b/docs/src/tutorials/linear/transp.jl index dd5ab0e253e..9b16de51c42 100644 --- a/docs/src/tutorials/linear/transp.jl +++ b/docs/src/tutorials/linear/transp.jl @@ -120,6 +120,8 @@ function solve_transportation_problem(data::Containers.DenseAxisArray) @constraint(model, [o in O], sum(x[o, :]) <= data[o, "SUPPLY"]) @constraint(model, [d in D], sum(x[:, d]) == data["DEMAND", d]) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT ## Pretty print the solution in the format of the input print(" ", join(lpad.(D, 7, ' '))) for o in O From dcc77d84ed00ffb4c66f6b39de6d14a8b2e76d5c Mon Sep 17 00:00:00 2001 From: odow Date: Tue, 6 Feb 2024 10:41:43 +1300 Subject: [PATCH 02/19] More status checks --- .../tutorials/algorithms/benders_decomposition.jl | 14 ++++++++++++++ .../algorithms/cutting_stock_column_generation.jl | 12 ++++++++++++ docs/src/tutorials/algorithms/parallelism.md | 6 ++++++ .../tutorials/algorithms/tsp_lazy_constraints.jl | 6 ++++++ .../tutorials/applications/optimal_power_flow.jl | 10 +++++++--- docs/src/tutorials/applications/power_systems.jl | 7 +++++++ .../tutorials/applications/two_stage_stochastic.jl | 6 ++++++ docs/src/tutorials/applications/web_app.jl | 2 +- 8 files changed, 59 insertions(+), 4 deletions(-) diff --git a/docs/src/tutorials/algorithms/benders_decomposition.jl b/docs/src/tutorials/algorithms/benders_decomposition.jl index 7ca882bedee..4ce1670e174 100644 --- a/docs/src/tutorials/algorithms/benders_decomposition.jl +++ b/docs/src/tutorials/algorithms/benders_decomposition.jl @@ -165,6 +165,8 @@ function solve_subproblem(x) @objective(model, Min, c_2' * y) optimize!(model) @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT + @assert dual_solution(model) == FEASIBLE_POINT return (obj = objective_value(model), y = value.(y), π = dual.(con)) end @@ -194,6 +196,8 @@ ABSOLUTE_OPTIMALITY_GAP = 1e-6 println("Iteration Lower Bound Upper Bound Gap") for k in 1:MAXIMUM_ITERATIONS optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT lower_bound = objective_value(model) x_k = value.(x) ret = solve_subproblem(x_k) @@ -211,6 +215,8 @@ end # Finally, we can obtain the optimal solution optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT Test.@test value.(x) == [0.0, 1.0] #src x_optimal = value.(x) @@ -268,6 +274,8 @@ set_attribute(lazy_model, MOI.LazyConstraintCallback(), my_callback) # Now when we optimize!, our callback is run: optimize!(lazy_model) +@assert termination_status(lazy_model) == OPTIMAL +@assert primal_status(lazy_model) == FEASIBLE_POINT # For this model, the callback algorithm required more solves of the subproblem: @@ -324,6 +332,8 @@ function solve_subproblem(model, x) fix.(model[:x_copy], x) optimize!(model) @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT + @assert dual_solution(model) == FEASIBLE_POINT return ( obj = objective_value(model), y = value.(model[:y]), @@ -341,6 +351,8 @@ end println("Iteration Lower Bound Upper Bound Gap") for k in 1:MAXIMUM_ITERATIONS optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT lower_bound = objective_value(model) x_k = value.(x) ret = solve_subproblem(subproblem, x_k) @@ -358,6 +370,8 @@ end # Finally, we can obtain the optimal solution: optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT Test.@test value.(x) == [0.0, 1.0] #src x_optimal = value.(x) diff --git a/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl b/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl index 93e2623e2d1..1ab7b2e6030 100644 --- a/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl +++ b/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl @@ -235,6 +235,8 @@ set_silent(model) @objective(model, Min, sum(x)) @constraint(model, demand[i in 1:I], patterns[i]' * x >= data.pieces[i].d) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # This solution requires 421 rolls. This solution is sub-optimal because the @@ -252,6 +254,9 @@ solution_summary(model) unset_integer.(x) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT +@assert dual_status(model) == FEASIBLE_POINT π_13 = dual(demand[13]) # Using the economic interpretation of the dual variable, we can say that a one @@ -282,6 +287,8 @@ function solve_pricing(data::Data, π::Vector{Float64}) @constraint(model, sum(data.pieces[i].w * y[i] for i in 1:I) <= data.W) @objective(model, Max, sum(π[i] * y[i] for i in 1:I)) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT number_of_rolls_saved = objective_value(model) if number_of_rolls_saved > 1 + 1e-8 ## Benefit of pattern is more than the cost of a new roll plus some @@ -312,6 +319,9 @@ solve_pricing(data, zeros(I)) while true ## Solve the linear relaxation optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT + @assert dual_status(model) == FEASIBLE_POINT ## Obtain a new dual vector π = dual.(demand) ## Solve the pricing problem @@ -362,6 +372,8 @@ sum(ceil.(Int, solution.rolls)) set_integer.(x) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT solution = DataFrames.DataFrame([ (pattern = p, rolls = value(x_p)) for (p, x_p) in enumerate(x) ]) diff --git a/docs/src/tutorials/algorithms/parallelism.md b/docs/src/tutorials/algorithms/parallelism.md index 2af638522cf..919eded26bb 100644 --- a/docs/src/tutorials/algorithms/parallelism.md +++ b/docs/src/tutorials/algorithms/parallelism.md @@ -215,6 +215,8 @@ my_lock = Threads.ReentrantLock() Threads.@threads for i in 1:10 set_lower_bound(x, i) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT Threads.lock(my_lock) do push!(solutions, i => objective_value(model)) end @@ -251,6 +253,8 @@ julia> Threads.@threads for i in 1:10 @objective(model, Min, x) set_lower_bound(x, i) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT Threads.lock(my_lock) do push!(solutions, i => objective_value(model)) end @@ -295,6 +299,8 @@ julia> Distributed.@everywhere begin @objective(model, Min, x) set_lower_bound(x, i) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return objective_value(model) end end diff --git a/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl b/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl index 62eb3d94978..de7e6dcc2c9 100644 --- a/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl +++ b/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl @@ -199,6 +199,8 @@ subtour(x::AbstractMatrix{VariableRef}) = subtour(value.(x)) iterative_model = build_tsp_model(d, n) optimize!(iterative_model) +@assert termination_status(iterative_model) == OPTIMAL +@assert primal_status(iterative_model) == FEASIBLE_POINT time_iterated = solve_time(iterative_model) cycle = subtour(iterative_model[:x]) while 1 < length(cycle) < n @@ -209,6 +211,8 @@ while 1 < length(cycle) < n sum(iterative_model[:x][i, j] for (i, j) in S) <= length(cycle) - 1, ) optimize!(iterative_model) + @assert termination_status(iterative_model) == OPTIMAL + @assert primal_status(iterative_model) == FEASIBLE_POINT global time_iterated += solve_time(iterative_model) global cycle = subtour(iterative_model[:x]) end @@ -262,6 +266,8 @@ set_attribute( subtour_elimination_callback, ) optimize!(lazy_model) +@assert termination_status(lazy_model) == OPTIMAL +@assert primal_status(lazy_model) == FEASIBLE_POINT objective_value(lazy_model) # This finds the same optimal tour: diff --git a/docs/src/tutorials/applications/optimal_power_flow.jl b/docs/src/tutorials/applications/optimal_power_flow.jl index d23b09034ee..e08f72d737f 100644 --- a/docs/src/tutorials/applications/optimal_power_flow.jl +++ b/docs/src/tutorials/applications/optimal_power_flow.jl @@ -41,7 +41,7 @@ import DataFrames import Ipopt import LinearAlgebra import SparseArrays -import Test #src +import Test # ## Initial formulation @@ -137,6 +137,8 @@ println("Objective value (basic lower bound) : $basic_lower_bound") @constraint(model, sum(P_G) >= sum(P_Demand)) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT better_lower_bound = round(objective_value(model); digits = 2) println("Objective value (better lower bound): $better_lower_bound") @@ -280,6 +282,8 @@ P_G = real(S_G) # We're finally ready to solve our nonlinear AC-OPF problem: optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test isapprox(objective_value(model), 3087.84; atol = 1e-2) #src solution_summary(model) @@ -419,8 +423,8 @@ optimize!(model) #- sdp_relaxation_lower_bound = round(objective_value(model); digits = 2) -Test.@test termination_status(model) in (OPTIMAL, ALMOST_OPTIMAL) #src -Test.@test primal_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT) #src +Test.@test termination_status(model) in (OPTIMAL, ALMOST_OPTIMAL) +Test.@test primal_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT) Test.@test isapprox(sdp_relaxation_lower_bound, 2753.04; rtol = 1e-3) #src println( "Objective value (W & V relax. lower bound): $sdp_relaxation_lower_bound", diff --git a/docs/src/tutorials/applications/power_systems.jl b/docs/src/tutorials/applications/power_systems.jl index b5e9a5d289a..307335418aa 100644 --- a/docs/src/tutorials/applications/power_systems.jl +++ b/docs/src/tutorials/applications/power_systems.jl @@ -115,6 +115,8 @@ function solve_economic_dispatch(generators::Vector, wind, scenario) @constraint(model, sum(g[i] for i in 1:N) + w == scenario.demand) ## Solve statement optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT ## return the optimal value of the objective function and its minimizers return ( g = value.(g), @@ -216,6 +218,8 @@ function solve_economic_dispatch_inplace( wind.variable_cost * w, ) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT push!(obj_out, objective_value(model)) push!(w_out, value(w)) push!(g1_out, value(g[1])) @@ -382,6 +386,7 @@ function solve_unit_commitment(generators::Vector, wind, scenario) if status != OPTIMAL return (status = status,) end + @assert primal_status(model) == FEASIBLE_POINT return ( status = status, g = value.(g), @@ -525,6 +530,8 @@ function solve_nonlinear_economic_dispatch( ) @constraint(model, sum(g[i] for i in 1:N) + sqrt(w) == scenario.demand) optimize!(model) + @assert termination_status(model) == LOCALLY_SOLVED + @assert primal_status(model) == FEASIBLE_POINT return ( g = value.(g), w = value(w), diff --git a/docs/src/tutorials/applications/two_stage_stochastic.jl b/docs/src/tutorials/applications/two_stage_stochastic.jl index 8b87bd745fb..91b2169c31d 100644 --- a/docs/src/tutorials/applications/two_stage_stochastic.jl +++ b/docs/src/tutorials/applications/two_stage_stochastic.jl @@ -86,6 +86,8 @@ set_silent(model) @expression(model, z[ω in Ω], 5y[ω] - 0.1 * (x - y[ω])) @objective(model, Max, -2x + sum(P[ω] * z[ω] for ω in Ω)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # The optimal number of pies to make is: @@ -158,6 +160,8 @@ function CVaR(Z::Vector{Float64}, P::Vector{Float64}; γ::Float64) @constraint(model, [i in 1:N], z[i] >= ξ - Z[i]) @objective(model, Max, ξ - 1 / γ * sum(P[i] * z[i] for i in 1:N)) optimize!(model) + @assert termination_status(model) == OPTIMAL + @assert primal_status(model) == FEASIBLE_POINT return objective_value(model) end @@ -216,6 +220,8 @@ set_silent(model) @constraint(model, [ω in Ω], z[ω] >= ξ - Z[ω]) @objective(model, Max, -2x + ξ - 1 / γ * sum(P[ω] * z[ω] for ω in Ω)) optimize!(model) +@assert termination_status(model) == OPTIMAL +@assert primal_status(model) == FEASIBLE_POINT # When ``\gamma = 0.4``, the optimal number of pies to bake is: diff --git a/docs/src/tutorials/applications/web_app.jl b/docs/src/tutorials/applications/web_app.jl index 7481899871e..4d90fe619e1 100644 --- a/docs/src/tutorials/applications/web_app.jl +++ b/docs/src/tutorials/applications/web_app.jl @@ -57,7 +57,7 @@ function endpoint_solve(params::Dict{String,Any}) "primal_status" => primal_status(model), ) ## Only include the `x` key if it has a value. - if has_values(model) + if primal_status(model) == FEASIBLE_POINT ret["x"] = value(x) end return ret From e9cfb57f0b9ace11e318f37fe0c60f10ade6d56e Mon Sep 17 00:00:00 2001 From: odow Date: Tue, 6 Feb 2024 10:46:38 +1300 Subject: [PATCH 03/19] Add more --- docs/src/tutorials/nonlinear/classifiers.jl | 14 +++++++------- docs/src/tutorials/nonlinear/complementarity.jl | 10 ++++++++++ docs/src/tutorials/nonlinear/nested_problems.jl | 5 +++++ docs/src/tutorials/nonlinear/portfolio.jl | 3 +++ docs/src/tutorials/nonlinear/querying_hessians.jl | 2 ++ docs/src/tutorials/nonlinear/rocket_control.jl | 2 ++ docs/src/tutorials/nonlinear/simple_examples.jl | 12 ++++++++---- .../nonlinear/space_shuttle_reentry_trajectory.jl | 2 ++ docs/src/tutorials/nonlinear/tips_and_tricks.jl | 4 ++++ .../tutorials/nonlinear/user_defined_hessians.jl | 2 ++ 10 files changed, 45 insertions(+), 11 deletions(-) diff --git a/docs/src/tutorials/nonlinear/classifiers.jl b/docs/src/tutorials/nonlinear/classifiers.jl index 73a47342e1a..9d2a051c2fe 100644 --- a/docs/src/tutorials/nonlinear/classifiers.jl +++ b/docs/src/tutorials/nonlinear/classifiers.jl @@ -25,7 +25,7 @@ import Ipopt import LinearAlgebra import Plots import Random -import Test #src +import Test # ## Data and visualisation @@ -127,8 +127,8 @@ function solve_SVM_classifier(P::Matrix, labels::Vector; C::Float64 = C_0) D = LinearAlgebra.Diagonal(labels) @constraint(model, D * (P * w .- g) .+ y .>= 1) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED #src - Test.@test primal_status(model) == FEASIBLE_POINT #src + Test.@test termination_status(model) == LOCALLY_SOLVED + Test.@test primal_status(model) == FEASIBLE_POINT slack = extrema(value.(y)) println("Minimum slack: ", slack[1], "\nMaximum slack: ", slack[2]) classifier(x) = line(x; w = value.(w), g = value(g)) @@ -234,8 +234,8 @@ function solve_dual_SVM_classifier(P::Matrix, labels::Vector; C::Float64 = C_0) @objective(model, Min, 1 / 2 * u' * D * P * P' * D * u - sum(u)) @constraint(model, con, sum(D * u) == 0) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED #src - Test.@test primal_status(model) == FEASIBLE_POINT #src + Test.@test termination_status(model) == LOCALLY_SOLVED + Test.@test primal_status(model) == FEASIBLE_POINT w = P' * D * value.(u) g = dual(con) classifier(x) = line(x; w = w, g = g) @@ -322,8 +322,8 @@ function solve_kernel_SVM_classifier( con = @constraint(model, sum(D * u) == 0) @objective(model, Min, 1 / 2 * u' * D * K * D * u - sum(u)) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED #src - Test.@test primal_status(model) == FEASIBLE_POINT #src + Test.@test termination_status(model) == LOCALLY_SOLVED + Test.@test primal_status(model) == FEASIBLE_POINT u_sol, g_sol = value.(u), dual(con) function classifier(v::Vector) return sum( diff --git a/docs/src/tutorials/nonlinear/complementarity.jl b/docs/src/tutorials/nonlinear/complementarity.jl index 3b5024a018a..702c86406a9 100644 --- a/docs/src/tutorials/nonlinear/complementarity.jl +++ b/docs/src/tutorials/nonlinear/complementarity.jl @@ -47,6 +47,8 @@ set_silent(model) @variable(model, 0 <= x[1:4] <= 10, start = 0) @constraint(model, M * x + q ⟂ x) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test value.(x) ≈ [2.8, 0.0, 0.8, 1.2] #src value.(x) @@ -67,6 +69,8 @@ set_silent(model) @constraint(model, w + 2x - 2y + 4z - 6 ⟂ z) @constraint(model, w - x + 2y - 2z - 2 ⟂ y) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test value.([w, x, y, z]) ≈ [2.8, 0.0, 0.8, 1.2] #src value.([w, x, y, z]) @@ -102,6 +106,8 @@ set_silent(model) end ) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test isapprox(value(p["new-york"]), 0.225; atol = 1e-3) #src value.(p) @@ -137,6 +143,8 @@ set_silent(model) end ) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test isapprox(value(C_G), 0.996; atol = 1e-3) #src value(K) @@ -189,6 +197,8 @@ set_silent(model) ## Production does not exceed capacity @constraint(model, [ω = 1:5], x - Y[ω] ⟂ μ[ω]) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # An equilibrium solution is to build 389 MW: diff --git a/docs/src/tutorials/nonlinear/nested_problems.jl b/docs/src/tutorials/nonlinear/nested_problems.jl index 5ee010472e3..9a6972e93d5 100644 --- a/docs/src/tutorials/nonlinear/nested_problems.jl +++ b/docs/src/tutorials/nonlinear/nested_problems.jl @@ -87,6 +87,7 @@ function solve_lower_level(x...) @constraint(model, (y[1] - 10)^2 + (y[2] - 10)^2 <= 25) optimize!(model) @assert termination_status(model) == LOCALLY_SOLVED + @assert primal_status(model) == FEASIBLE_POINT return objective_value(model), value.(y) end @@ -149,6 +150,8 @@ model = Model(Ipopt.Optimizer) @operator(model, op_V, 2, V, ∇V, ∇²V) @objective(model, Min, x[1]^2 + x[2]^2 + op_V(x[1], x[2])) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # The optimal objective value is: @@ -228,6 +231,8 @@ cache = Cache(Float64[], NaN, Float64[]) ) @objective(model, Min, x[1]^2 + x[2]^2 + op_cached_f(x[1], x[2])) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # an we can check we get the same objective value: diff --git a/docs/src/tutorials/nonlinear/portfolio.jl b/docs/src/tutorials/nonlinear/portfolio.jl index 666d92b8ab9..4fc9dd66283 100644 --- a/docs/src/tutorials/nonlinear/portfolio.jl +++ b/docs/src/tutorials/nonlinear/portfolio.jl @@ -158,6 +158,8 @@ set_silent(model) @constraint(model, sum(x) <= 1000) @constraint(model, r' * x >= 50) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # The optimal allocation of our assets is: @@ -209,6 +211,7 @@ set_optimizer_attribute(model, MOA.SolutionLimit(), 50) ## a single objective sense `Min`, and negate any `Max` objectives: @objective(model, Min, [variance, -expected_return]) optimize!(model) +@assert termination_status(model) == OPTIMAL solution_summary(model) # The algorithm found 50 different solutions. Let's plot them to see how they diff --git a/docs/src/tutorials/nonlinear/querying_hessians.jl b/docs/src/tutorials/nonlinear/querying_hessians.jl index d4a44ee72cc..f488416e459 100644 --- a/docs/src/tutorials/nonlinear/querying_hessians.jl +++ b/docs/src/tutorials/nonlinear/querying_hessians.jl @@ -71,6 +71,8 @@ set_silent(model) @constraint(model, g_2, (x[1] + x[2])^2 <= 2) @objective(model, Min, (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT # ## The analytic solution diff --git a/docs/src/tutorials/nonlinear/rocket_control.jl b/docs/src/tutorials/nonlinear/rocket_control.jl index 33b3895b834..ceb4710e53a 100644 --- a/docs/src/tutorials/nonlinear/rocket_control.jl +++ b/docs/src/tutorials/nonlinear/rocket_control.jl @@ -127,6 +127,8 @@ ddt(x::Vector, t::Int) = (x[t] - x[t-1]) / Δt # Now we optimize the model and check that we found a solution: optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model) # Finally, we plot the solution: diff --git a/docs/src/tutorials/nonlinear/simple_examples.jl b/docs/src/tutorials/nonlinear/simple_examples.jl index 3a8ae666cba..000857d44d3 100644 --- a/docs/src/tutorials/nonlinear/simple_examples.jl +++ b/docs/src/tutorials/nonlinear/simple_examples.jl @@ -87,8 +87,8 @@ function example_clnlbeam() primal_status = $(primal_status(model)) objective_value = $(objective_value(model)) """) - Test.@test termination_status(model) == LOCALLY_SOLVED #src - Test.@test primal_status(model) == FEASIBLE_POINT #src + Test.@test termination_status(model) == LOCALLY_SOLVED + Test.@test primal_status(model) == FEASIBLE_POINT Test.@test objective_value(model) ≈ 350.0 #src return end @@ -116,6 +116,8 @@ function example_mle() sum((data[i] - μ)^2 for i in 1:n) / (2 * σ^2) ) optimize!(model) + @assert termination_status(model) == LOCALLY_SOLVED + @assert primal_status(model) == FEASIBLE_POINT println("μ = ", value(μ)) println("mean(data) = ", Statistics.mean(data)) println("σ^2 = ", value(σ)^2) @@ -126,6 +128,8 @@ function example_mle() ## You can even do constrained MLE! @constraint(model, μ == σ^2) optimize!(model) + @assert termination_status(model) == LOCALLY_SOLVED + @assert primal_status(model) == FEASIBLE_POINT Test.@test value(μ) ≈ value(σ)^2 println() println("With constraint μ == σ^2:") @@ -153,12 +157,12 @@ function example_qcp() @constraint(model, x * x + y * y - z * z <= 0) @constraint(model, x * x - y * z <= 0) optimize!(model) + Test.@test termination_status(model) == LOCALLY_SOLVED + Test.@test primal_status(model) == FEASIBLE_POINT print(model) println("Objective value: ", objective_value(model)) println("x = ", value(x)) println("y = ", value(y)) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT Test.@test objective_value(model) ≈ 0.32699 atol = 1e-5 Test.@test value(x) ≈ 0.32699 atol = 1e-5 Test.@test value(y) ≈ 0.25707 atol = 1e-5 diff --git a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl index a775ee2c509..ed69515e8e7 100644 --- a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl +++ b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl @@ -304,6 +304,8 @@ end set_silent(model) # Hide solver's verbose output optimize!(model) # Solve for the control and state @assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT + ## Show final cross-range of the solution println( diff --git a/docs/src/tutorials/nonlinear/tips_and_tricks.jl b/docs/src/tutorials/nonlinear/tips_and_tricks.jl index 26f96d0d97f..4720373de7b 100644 --- a/docs/src/tutorials/nonlinear/tips_and_tricks.jl +++ b/docs/src/tutorials/nonlinear/tips_and_tricks.jl @@ -52,6 +52,8 @@ set_silent(model) @constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test objective_value(model) ≈ √3 atol = 1e-4 Test.@test value.(x) ≈ [1.0, 1.0] atol = 1e-4 println("Naive approach: function calls = $(function_calls)") @@ -120,6 +122,8 @@ set_silent(model) @constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT Test.@test objective_value(model) ≈ √3 atol = 1e-4 Test.@test value.(x) ≈ [1.0, 1.0] atol = 1e-4 println("Memoized approach: function_calls = $(function_calls)") diff --git a/docs/src/tutorials/nonlinear/user_defined_hessians.jl b/docs/src/tutorials/nonlinear/user_defined_hessians.jl index b7f75e42d20..9fc8eb2b9d4 100644 --- a/docs/src/tutorials/nonlinear/user_defined_hessians.jl +++ b/docs/src/tutorials/nonlinear/user_defined_hessians.jl @@ -72,4 +72,6 @@ model = Model(Ipopt.Optimizer) @operator(model, op_rosenbrock, 2, rosenbrock, ∇rosenbrock, ∇²rosenbrock) @objective(model, Min, op_rosenbrock(x[1], x[2])) optimize!(model) +@assert termination_status(model) == LOCALLY_SOLVED +@assert primal_status(model) == FEASIBLE_POINT solution_summary(model; verbose = true) From 5334fbd4153dac46afd71c709d16a46ed5ed8252 Mon Sep 17 00:00:00 2001 From: odow Date: Tue, 6 Feb 2024 10:52:01 +1300 Subject: [PATCH 04/19] Update --- docs/src/tutorials/algorithms/benders_decomposition.jl | 4 ++-- .../tutorials/nonlinear/space_shuttle_reentry_trajectory.jl | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/src/tutorials/algorithms/benders_decomposition.jl b/docs/src/tutorials/algorithms/benders_decomposition.jl index 4ce1670e174..5492265d40e 100644 --- a/docs/src/tutorials/algorithms/benders_decomposition.jl +++ b/docs/src/tutorials/algorithms/benders_decomposition.jl @@ -166,7 +166,7 @@ function solve_subproblem(x) optimize!(model) @assert termination_status(model) == OPTIMAL @assert primal_status(model) == FEASIBLE_POINT - @assert dual_solution(model) == FEASIBLE_POINT + @assert dual_status(model) == FEASIBLE_POINT return (obj = objective_value(model), y = value.(y), π = dual.(con)) end @@ -333,7 +333,7 @@ function solve_subproblem(model, x) optimize!(model) @assert termination_status(model) == OPTIMAL @assert primal_status(model) == FEASIBLE_POINT - @assert dual_solution(model) == FEASIBLE_POINT + @assert dual_status(model) == FEASIBLE_POINT return ( obj = objective_value(model), y = value.(model[:y]), diff --git a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl index ed69515e8e7..6f9e4da41b6 100644 --- a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl +++ b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl @@ -306,7 +306,6 @@ optimize!(model) # Solve for the control and state @assert termination_status(model) == LOCALLY_SOLVED @assert primal_status(model) == FEASIBLE_POINT - ## Show final cross-range of the solution println( "Final latitude θ = ", From 736c7455fe38a5cf0e3f43ef364420fe1c74e740 Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 09:22:04 +1300 Subject: [PATCH 05/19] Add has_optimal_solution --- docs/src/manual/solutions.md | 28 +++++++++++++ .../algorithms/benders_decomposition.jl | 23 ++++------- .../cutting_stock_column_generation.jl | 17 +++----- docs/src/tutorials/algorithms/parallelism.md | 9 ++--- .../algorithms/tsp_lazy_constraints.jl | 9 ++--- .../applications/optimal_power_flow.jl | 6 +-- .../tutorials/applications/power_systems.jl | 9 ++--- .../applications/two_stage_stochastic.jl | 9 ++--- docs/src/tutorials/conic/ellipse_approx.jl | 3 +- docs/src/tutorials/conic/min_ellipse.jl | 5 +-- docs/src/tutorials/conic/simple_examples.jl | 6 +-- .../tutorials/getting_started/debugging.jl | 2 +- .../design_patterns_for_larger_models.jl | 23 ++++------- .../getting_started_with_JuMP.jl | 8 ++-- .../getting_started_with_data_and_plotting.jl | 3 +- docs/src/tutorials/linear/callbacks.jl | 12 ++---- docs/src/tutorials/linear/cannery.jl | 3 +- .../linear/constraint_programming.jl | 24 ++++-------- docs/src/tutorials/linear/diet.jl | 4 +- .../src/tutorials/linear/facility_location.jl | 6 +-- docs/src/tutorials/linear/factory_schedule.jl | 3 +- docs/src/tutorials/linear/finance.jl | 6 +-- .../tutorials/linear/geographic_clustering.jl | 3 +- docs/src/tutorials/linear/knapsack.jl | 6 +-- docs/src/tutorials/linear/lp_sensitivity.jl | 3 +- docs/src/tutorials/linear/mip_duality.jl | 12 ++---- docs/src/tutorials/linear/multi.jl | 3 +- .../linear/multi_commodity_network.jl | 3 +- .../linear/multi_objective_examples.jl | 9 ++--- .../linear/multi_objective_knapsack.jl | 4 ++ docs/src/tutorials/linear/n-queens.jl | 3 +- docs/src/tutorials/linear/network_flows.jl | 9 ++--- docs/src/tutorials/linear/piecewise_linear.jl | 15 +++---- docs/src/tutorials/linear/sudoku.jl | 6 +-- docs/src/tutorials/linear/transp.jl | 3 +- docs/src/tutorials/nonlinear/classifiers.jl | 9 ++--- .../tutorials/nonlinear/complementarity.jl | 15 +++---- .../tutorials/nonlinear/nested_problems.jl | 9 ++--- docs/src/tutorials/nonlinear/portfolio.jl | 3 +- .../tutorials/nonlinear/querying_hessians.jl | 3 +- .../src/tutorials/nonlinear/rocket_control.jl | 3 +- .../tutorials/nonlinear/simple_examples.jl | 15 +++---- .../space_shuttle_reentry_trajectory.jl | 3 +- .../tutorials/nonlinear/tips_and_tricks.jl | 6 +-- .../nonlinear/user_defined_hessians.jl | 3 +- src/optimizer_interface.jl | 39 +++++++++++++++++++ 46 files changed, 182 insertions(+), 223 deletions(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 823f9c22734..ef814441d73 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -32,6 +32,34 @@ Subject to y[b] ≤ 1 ``` +## Check if optimal solution exists + +Use [`has_optimal_solution`](@ref) to check if the solver found an optimal +solution: +```julia solutions +julia> has_optimal_solution(model) +true +``` + +By default [`has_optimal_solution`](@ref) returns `true` for both global and +local optima. Pass `allow_local = false` to check if the solver found a globally +optimal solution: +```julia solutions +julia> has_optimal_solution(model; allow_local = false) +true +``` + +Pass `dual = true` to check if the solver found an optimal dual solution in +addition to an optimal primal solution: +```solutions +julia> has_optimal_solution(model; dual = true) +true +``` + +If this function returns `false`, use the functions mentioned below like +[`solution_summary`](@ref), [`termination_status`](@ref), [`primal_status`](@ref), +and [`dual_status`](@ref) to understand what solution (if any) the solver found. + ## Solutions summary [`solution_summary`](@ref) can be used for checking the summary of the diff --git a/docs/src/tutorials/algorithms/benders_decomposition.jl b/docs/src/tutorials/algorithms/benders_decomposition.jl index 5492265d40e..f96905fb2aa 100644 --- a/docs/src/tutorials/algorithms/benders_decomposition.jl +++ b/docs/src/tutorials/algorithms/benders_decomposition.jl @@ -164,9 +164,7 @@ function solve_subproblem(x) con = @constraint(model, A_2 * y .<= b - A_1 * x) @objective(model, Min, c_2' * y) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT - @assert dual_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model; dual = true) return (obj = objective_value(model), y = value.(y), π = dual.(con)) end @@ -196,8 +194,7 @@ ABSOLUTE_OPTIMALITY_GAP = 1e-6 println("Iteration Lower Bound Upper Bound Gap") for k in 1:MAXIMUM_ITERATIONS optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) lower_bound = objective_value(model) x_k = value.(x) ret = solve_subproblem(x_k) @@ -215,8 +212,7 @@ end # Finally, we can obtain the optimal solution optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test value.(x) == [0.0, 1.0] #src x_optimal = value.(x) @@ -274,8 +270,7 @@ set_attribute(lazy_model, MOI.LazyConstraintCallback(), my_callback) # Now when we optimize!, our callback is run: optimize!(lazy_model) -@assert termination_status(lazy_model) == OPTIMAL -@assert primal_status(lazy_model) == FEASIBLE_POINT +@assert has_optimal_solution(lazy_model) # For this model, the callback algorithm required more solves of the subproblem: @@ -331,9 +326,7 @@ print(subproblem) function solve_subproblem(model, x) fix.(model[:x_copy], x) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT - @assert dual_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model; dual = true) return ( obj = objective_value(model), y = value.(model[:y]), @@ -351,8 +344,7 @@ end println("Iteration Lower Bound Upper Bound Gap") for k in 1:MAXIMUM_ITERATIONS optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) lower_bound = objective_value(model) x_k = value.(x) ret = solve_subproblem(subproblem, x_k) @@ -370,8 +362,7 @@ end # Finally, we can obtain the optimal solution: optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test value.(x) == [0.0, 1.0] #src x_optimal = value.(x) diff --git a/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl b/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl index 1ab7b2e6030..1a2ef3ff6d7 100644 --- a/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl +++ b/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl @@ -235,8 +235,7 @@ set_silent(model) @objective(model, Min, sum(x)) @constraint(model, demand[i in 1:I], patterns[i]' * x >= data.pieces[i].d) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # This solution requires 421 rolls. This solution is sub-optimal because the @@ -254,9 +253,7 @@ solution_summary(model) unset_integer.(x) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT -@assert dual_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model; dual = true) π_13 = dual(demand[13]) # Using the economic interpretation of the dual variable, we can say that a one @@ -287,8 +284,7 @@ function solve_pricing(data::Data, π::Vector{Float64}) @constraint(model, sum(data.pieces[i].w * y[i] for i in 1:I) <= data.W) @objective(model, Max, sum(π[i] * y[i] for i in 1:I)) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) number_of_rolls_saved = objective_value(model) if number_of_rolls_saved > 1 + 1e-8 ## Benefit of pattern is more than the cost of a new roll plus some @@ -319,9 +315,7 @@ solve_pricing(data, zeros(I)) while true ## Solve the linear relaxation optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT - @assert dual_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model; dual = true) ## Obtain a new dual vector π = dual.(demand) ## Solve the pricing problem @@ -372,8 +366,7 @@ sum(ceil.(Int, solution.rolls)) set_integer.(x) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution = DataFrames.DataFrame([ (pattern = p, rolls = value(x_p)) for (p, x_p) in enumerate(x) ]) diff --git a/docs/src/tutorials/algorithms/parallelism.md b/docs/src/tutorials/algorithms/parallelism.md index 919eded26bb..97f10b8d3d2 100644 --- a/docs/src/tutorials/algorithms/parallelism.md +++ b/docs/src/tutorials/algorithms/parallelism.md @@ -215,8 +215,7 @@ my_lock = Threads.ReentrantLock() Threads.@threads for i in 1:10 set_lower_bound(x, i) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) Threads.lock(my_lock) do push!(solutions, i => objective_value(model)) end @@ -253,8 +252,7 @@ julia> Threads.@threads for i in 1:10 @objective(model, Min, x) set_lower_bound(x, i) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(sudoku) Threads.lock(my_lock) do push!(solutions, i => objective_value(model)) end @@ -299,8 +297,7 @@ julia> Distributed.@everywhere begin @objective(model, Min, x) set_lower_bound(x, i) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(sudoku) return objective_value(model) end end diff --git a/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl b/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl index de7e6dcc2c9..735b3c8bc9f 100644 --- a/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl +++ b/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl @@ -199,8 +199,7 @@ subtour(x::AbstractMatrix{VariableRef}) = subtour(value.(x)) iterative_model = build_tsp_model(d, n) optimize!(iterative_model) -@assert termination_status(iterative_model) == OPTIMAL -@assert primal_status(iterative_model) == FEASIBLE_POINT +@assert has_optimal_solution(iterative_model) time_iterated = solve_time(iterative_model) cycle = subtour(iterative_model[:x]) while 1 < length(cycle) < n @@ -211,8 +210,7 @@ while 1 < length(cycle) < n sum(iterative_model[:x][i, j] for (i, j) in S) <= length(cycle) - 1, ) optimize!(iterative_model) - @assert termination_status(iterative_model) == OPTIMAL - @assert primal_status(iterative_model) == FEASIBLE_POINT + @assert has_optimal_solution(iterative_model) global time_iterated += solve_time(iterative_model) global cycle = subtour(iterative_model[:x]) end @@ -266,8 +264,7 @@ set_attribute( subtour_elimination_callback, ) optimize!(lazy_model) -@assert termination_status(lazy_model) == OPTIMAL -@assert primal_status(lazy_model) == FEASIBLE_POINT +@assert has_optimal_solution(lazy_model) objective_value(lazy_model) # This finds the same optimal tour: diff --git a/docs/src/tutorials/applications/optimal_power_flow.jl b/docs/src/tutorials/applications/optimal_power_flow.jl index e08f72d737f..94934ff74a4 100644 --- a/docs/src/tutorials/applications/optimal_power_flow.jl +++ b/docs/src/tutorials/applications/optimal_power_flow.jl @@ -137,8 +137,7 @@ println("Objective value (basic lower bound) : $basic_lower_bound") @constraint(model, sum(P_G) >= sum(P_Demand)) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) better_lower_bound = round(objective_value(model); digits = 2) println("Objective value (better lower bound): $better_lower_bound") @@ -282,8 +281,7 @@ P_G = real(S_G) # We're finally ready to solve our nonlinear AC-OPF problem: optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test isapprox(objective_value(model), 3087.84; atol = 1e-2) #src solution_summary(model) diff --git a/docs/src/tutorials/applications/power_systems.jl b/docs/src/tutorials/applications/power_systems.jl index 307335418aa..7079a9c7253 100644 --- a/docs/src/tutorials/applications/power_systems.jl +++ b/docs/src/tutorials/applications/power_systems.jl @@ -115,8 +115,7 @@ function solve_economic_dispatch(generators::Vector, wind, scenario) @constraint(model, sum(g[i] for i in 1:N) + w == scenario.demand) ## Solve statement optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) ## return the optimal value of the objective function and its minimizers return ( g = value.(g), @@ -218,8 +217,7 @@ function solve_economic_dispatch_inplace( wind.variable_cost * w, ) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(sudoku) push!(obj_out, objective_value(model)) push!(w_out, value(w)) push!(g1_out, value(g[1])) @@ -530,8 +528,7 @@ function solve_nonlinear_economic_dispatch( ) @constraint(model, sum(g[i] for i in 1:N) + sqrt(w) == scenario.demand) optimize!(model) - @assert termination_status(model) == LOCALLY_SOLVED - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return ( g = value.(g), w = value(w), diff --git a/docs/src/tutorials/applications/two_stage_stochastic.jl b/docs/src/tutorials/applications/two_stage_stochastic.jl index 91b2169c31d..e62835ee8b8 100644 --- a/docs/src/tutorials/applications/two_stage_stochastic.jl +++ b/docs/src/tutorials/applications/two_stage_stochastic.jl @@ -86,8 +86,7 @@ set_silent(model) @expression(model, z[ω in Ω], 5y[ω] - 0.1 * (x - y[ω])) @objective(model, Max, -2x + sum(P[ω] * z[ω] for ω in Ω)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # The optimal number of pies to make is: @@ -160,8 +159,7 @@ function CVaR(Z::Vector{Float64}, P::Vector{Float64}; γ::Float64) @constraint(model, [i in 1:N], z[i] >= ξ - Z[i]) @objective(model, Max, ξ - 1 / γ * sum(P[i] * z[i] for i in 1:N)) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return objective_value(model) end @@ -220,8 +218,7 @@ set_silent(model) @constraint(model, [ω in Ω], z[ω] >= ξ - Z[ω]) @objective(model, Max, -2x + ξ - 1 / γ * sum(P[ω] * z[ω] for ω in Ω)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # When ``\gamma = 0.4``, the optimal number of pies to bake is: diff --git a/docs/src/tutorials/conic/ellipse_approx.jl b/docs/src/tutorials/conic/ellipse_approx.jl index 6ebff779540..632928f5fa2 100644 --- a/docs/src/tutorials/conic/ellipse_approx.jl +++ b/docs/src/tutorials/conic/ellipse_approx.jl @@ -110,8 +110,7 @@ m, n = size(S) @constraint(model, [t; vec(Z)] in MOI.RootDetConeSquare(n)) @objective(model, Max, t) optimize!(model) -Test.@test termination_status(model) == OPTIMAL #src -Test.@test primal_status(model) == FEASIBLE_POINT #src +Test.@test has_optimal_solution(model) solution_summary(model) # ## Results diff --git a/docs/src/tutorials/conic/min_ellipse.jl b/docs/src/tutorials/conic/min_ellipse.jl index 1fc33027a85..be82f69e1c0 100644 --- a/docs/src/tutorials/conic/min_ellipse.jl +++ b/docs/src/tutorials/conic/min_ellipse.jl @@ -45,7 +45,7 @@ using JuMP import LinearAlgebra import Plots import SCS -import Test #src +import Test # ## Data @@ -125,8 +125,7 @@ end # Now, solve the program: optimize!(model) -Test.@test termination_status(model) == OPTIMAL #src -Test.@test primal_status(model) == FEASIBLE_POINT #src +Test.@test has_optimal_solution(model) solution_summary(model) # ## Results diff --git a/docs/src/tutorials/conic/simple_examples.jl b/docs/src/tutorials/conic/simple_examples.jl index a89e8de8c79..7aad8d02f81 100644 --- a/docs/src/tutorials/conic/simple_examples.jl +++ b/docs/src/tutorials/conic/simple_examples.jl @@ -265,8 +265,7 @@ function example_minimum_distortion() fix(Q[1, 1], 0) @objective(model, Min, c²) optimize!(model) - Test.@test termination_status(model) == OPTIMAL - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test objective_value(model) ≈ 4 / 3 atol = 1e-4 ## Recover the minimal distorted embedding: X = [zeros(3) sqrt(value.(Q)[2:end, 2:end])] @@ -350,8 +349,7 @@ function example_theta_problem() J = ones(Int, 5, 5) @objective(model, Max, LinearAlgebra.dot(J, X)) optimize!(model) - Test.@test termination_status(model) == OPTIMAL - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test objective_value(model) ≈ sqrt(5) rtol = 1e-4 println("The Lovász number is: $(objective_value(model))") return diff --git a/docs/src/tutorials/getting_started/debugging.jl b/docs/src/tutorials/getting_started/debugging.jl index 659810c622c..694a16c8b26 100644 --- a/docs/src/tutorials/getting_started/debugging.jl +++ b/docs/src/tutorials/getting_started/debugging.jl @@ -346,7 +346,7 @@ set_silent(model) # for variables with large positive or negative values in the optimal solution. optimize!(model) -@assert termination_status(model) == OPTIMAL +@assert has_optimal_solution(model) for var in all_variables(model) if var == objective continue diff --git a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl index 3de330b0a39..77dbcadfa3c 100644 --- a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl +++ b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl @@ -55,8 +55,7 @@ model = Model(HiGHS.Optimizer) @objective(model, Max, sum(profit[i] * x[i] for i in 1:N)) @constraint(model, sum(weight[i] * x[i] for i in 1:N) <= capacity) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value.(x) # The benefits of this approach are: @@ -89,8 +88,7 @@ function solve_knapsack_1(profit::Vector, weight::Vector, capacity::Real) @objective(model, Max, sum(profit[i] * x[i] for i in 1:N)) @constraint(model, sum(weight[i] * x[i] for i in 1:N) <= capacity) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value.(x) end @@ -163,8 +161,7 @@ function solve_knapsack_2(data::KnapsackData) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value.(x) end @@ -236,8 +233,7 @@ function solve_knapsack_3(data::KnapsackData; binary_knapsack::Bool) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value.(x) end @@ -280,8 +276,7 @@ function solve_knapsack_4(data::KnapsackData, config::AbstractConfiguration) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value.(x) end @@ -369,8 +364,7 @@ function solve_knapsack_5(data::KnapsackData, config::AbstractConfiguration) add_knapsack_constraints(model, data, config) add_knapsack_objective(model, data, config) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value.(model[:x]) end @@ -396,12 +390,9 @@ function solve_knapsack_6( add_knapsack_constraints(model, data, config) add_knapsack_objective(model, data, config) optimize!(model) - if termination_status(model) != OPTIMAL + if !has_optimal_solution(model) @warn("Model not solved to optimality") return nothing - elseif primal_status(model) != FEASIBLE_POINT - @warn("No feasible point to return") - return nothing end return value.(model[:x]) end diff --git a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl index bd0dbcdcae9..8cb2432d92b 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl @@ -201,8 +201,7 @@ dual_status(model) # It is good practice in your code to include statements like this so that an # error will be thrown if the solver did not find an optimal solution: -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # Query the objective value using [`objective_value`](@ref): @@ -275,7 +274,7 @@ function solve_infeasible() @constraint(model, x + y >= 3) @objective(model, Max, x + 2y) optimize!(model) - if termination_status(model) != OPTIMAL + if !has_optimal_solution(model) @warn("The model was not solved correctly.") return end @@ -512,6 +511,5 @@ c = [1, 3, 5, 2] @constraint(vector_model, A * x .== b) @objective(vector_model, Min, c' * x) optimize!(vector_model) -@assert termination_status(vector_model) == OPTIMAL -@assert primal_status(vector_model) == FEASIBLE_POINT +@assert has_optimal_solution(vector_model) objective_value(vector_model) diff --git a/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl b/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl index fb543ea8844..34e8cf9882d 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl @@ -366,8 +366,7 @@ solution_summary(model) # Just to be sure, check that the solver found an optimal solution: -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # ### Solution diff --git a/docs/src/tutorials/linear/callbacks.jl b/docs/src/tutorials/linear/callbacks.jl index d73fd111b8d..920c5906eb7 100644 --- a/docs/src/tutorials/linear/callbacks.jl +++ b/docs/src/tutorials/linear/callbacks.jl @@ -60,8 +60,7 @@ function example_lazy_constraint() end set_attribute(model, MOI.LazyConstraintCallback(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test lazy_called Test.@test value(x) == 1 Test.@test value(y) == 2 @@ -100,8 +99,7 @@ function example_user_cut_constraint() end set_attribute(model, MOI.UserCutCallback(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test callback_called @show callback_called return @@ -135,8 +133,7 @@ function example_heuristic_solution() end set_attribute(model, MOI.HeuristicCallback(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test callback_called return end @@ -174,8 +171,7 @@ function example_solver_dependent_callback() end set_attribute(model, GLPK.CallbackFunction(), my_callback_function) optimize!(model) - Test.@test termination_status(model) == OPTIMAL - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test lazy_called Test.@test value(x) == 1 Test.@test value(y) == 2 diff --git a/docs/src/tutorials/linear/cannery.jl b/docs/src/tutorials/linear/cannery.jl index fd0993e9390..6d84c9bf998 100644 --- a/docs/src/tutorials/linear/cannery.jl +++ b/docs/src/tutorials/linear/cannery.jl @@ -121,8 +121,7 @@ solution_summary(model) # What's the optimal shipment? -Test.@test termination_status(model) == OPTIMAL -Test.@test primal_status(model) == FEASIBLE_POINT +Test.@test has_optimal_solution(model) Test.@test objective_value(model) == 1_680.0 #src for p in P, m in M println(p, " => ", m, ": ", value(x[p, m])) diff --git a/docs/src/tutorials/linear/constraint_programming.jl b/docs/src/tutorials/linear/constraint_programming.jl index 5039c56ce12..d897b2f7303 100644 --- a/docs/src/tutorials/linear/constraint_programming.jl +++ b/docs/src/tutorials/linear/constraint_programming.jl @@ -29,8 +29,7 @@ set_silent(model) @variable(model, 1 <= x[1:4] <= 4, Int) @constraint(model, x in MOI.AllDifferent(4)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value.(x) # ## BinPacking @@ -46,8 +45,7 @@ set_silent(model) @variable(model, 1 <= x[1:length(weights)] <= number_of_bins, Int) @constraint(model, x in MOI.BinPacking(capacity, weights)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value.(x) # Here, the value of `x[i]` is the bin that item `i` was placed into. @@ -63,8 +61,7 @@ set_silent(model) @variable(model, x[1:4], Int) @constraint(model, x in MOI.Circuit(4)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # Let's see what tour was found, starting at node number `1`: y = round.(Int, value.(x)) @@ -118,8 +115,7 @@ n = 1 # Let's check that we found a valid solution: optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value.(x) # ## CountBelongs @@ -138,8 +134,7 @@ set_silent(model) set = Set([2, 3]) @constraint(model, [n; x] in MOI.CountBelongs(1 + length(x), set)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value(n), value.(x) # ## CountDistinct @@ -154,8 +149,7 @@ set_silent(model) @objective(model, Max, sum(x)) @constraint(model, [n; x] in MOI.CountDistinct(1 + length(x))) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value(n), value.(x) # ## CountGreaterThan @@ -175,8 +169,7 @@ set_silent(model) @objective(model, Max, sum(x)) @constraint(model, [n; y; x] in MOI.CountGreaterThan(1 + 1 + length(x))) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value(n), value(y), value.(x) # Here `n` is strictly greater than the count, and there is no limit on how @@ -201,6 +194,5 @@ set_silent(model) @variable(model, x[i = 1:3], Int) @constraint(model, x in MOI.Table(table)) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) value.(x) diff --git a/docs/src/tutorials/linear/diet.jl b/docs/src/tutorials/linear/diet.jl index 3789e9a0de0..a5c0061023a 100644 --- a/docs/src/tutorials/linear/diet.jl +++ b/docs/src/tutorials/linear/diet.jl @@ -145,8 +145,7 @@ print(model) # Let's optimize and take a look at the solution: optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test objective_value(model) ≈ 11.8288 atol = 1e-4 #hide solution_summary(model) @@ -179,6 +178,7 @@ dairy_foods = ["milk", "ice cream"] is_dairy = map(name -> name in dairy_foods, foods.name) dairy_constraint = @constraint(model, sum(foods[is_dairy, :x]) <= 6) optimize!(model) +Test.@test !has_optimal_solution(model) Test.@test termination_status(model) == INFEASIBLE Test.@test primal_status(model) == NO_SOLUTION solution_summary(model) diff --git a/docs/src/tutorials/linear/facility_location.jl b/docs/src/tutorials/linear/facility_location.jl index 495229d8b9e..9badd958295 100644 --- a/docs/src/tutorials/linear/facility_location.jl +++ b/docs/src/tutorials/linear/facility_location.jl @@ -130,8 +130,7 @@ set_silent(model) # Solve the uncapacitated facility location problem with HiGHS optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) println("Optimal value: ", objective_value(model)) # ### Visualizing the solution @@ -258,8 +257,7 @@ set_silent(model) # Solve the problem optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) println("Optimal value: ", objective_value(model)) # ### Visualizing the solution diff --git a/docs/src/tutorials/linear/factory_schedule.jl b/docs/src/tutorials/linear/factory_schedule.jl index d94d5c9f582..661247c843c 100644 --- a/docs/src/tutorials/linear/factory_schedule.jl +++ b/docs/src/tutorials/linear/factory_schedule.jl @@ -186,8 +186,7 @@ function solve_factory_scheduling( ) ) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) schedules = Dict{Symbol,Vector{Float64}}( Symbol(f) => value.(production[:, f]) for f in factories ) diff --git a/docs/src/tutorials/linear/finance.jl b/docs/src/tutorials/linear/finance.jl index 1e717d95e26..39db0640cfd 100644 --- a/docs/src/tutorials/linear/finance.jl +++ b/docs/src/tutorials/linear/finance.jl @@ -92,8 +92,7 @@ end) ) optimize!(financing) -@assert termination_status(financing) == OPTIMAL -@assert primal_status(financing) == FEASIBLE_POINT +@assert has_optimal_solution(financing) objective_value(financing) # ## Combinatorial auctions @@ -138,8 +137,7 @@ for i in 1:6 @constraint(auction, sum(y[j] for j in 1:6 if i in bid_items[j]) <= 1) end optimize!(auction) -@assert termination_status(auction) == OPTIMAL -@assert primal_status(auction) == FEASIBLE_POINT +@assert has_optimal_solution(auction) objective_value(auction) #- diff --git a/docs/src/tutorials/linear/geographic_clustering.jl b/docs/src/tutorials/linear/geographic_clustering.jl index 0ea500bfaba..4598a04ebac 100644 --- a/docs/src/tutorials/linear/geographic_clustering.jl +++ b/docs/src/tutorials/linear/geographic_clustering.jl @@ -151,8 +151,7 @@ end # We can then call `optimize!` and review the results. optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # ### Reviewing the Results diff --git a/docs/src/tutorials/linear/knapsack.jl b/docs/src/tutorials/linear/knapsack.jl index e7eca7ce265..82ac83c847e 100644 --- a/docs/src/tutorials/linear/knapsack.jl +++ b/docs/src/tutorials/linear/knapsack.jl @@ -96,8 +96,7 @@ print(model) # We can now solve the optimization problem and inspect the results. optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # The items chosen are @@ -126,8 +125,7 @@ function solve_knapsack_problem(; @objective(model, Max, profit' * x) @constraint(model, weight' * x <= capacity) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) println("Objective is: ", objective_value(model)) println("Solution is:") for i in 1:n diff --git a/docs/src/tutorials/linear/lp_sensitivity.jl b/docs/src/tutorials/linear/lp_sensitivity.jl index cdd545a5897..ec8eee83000 100644 --- a/docs/src/tutorials/linear/lp_sensitivity.jl +++ b/docs/src/tutorials/linear/lp_sensitivity.jl @@ -39,8 +39,7 @@ model = Model(HiGHS.Optimizer) @constraint(model, c2, 7x + 12y >= 120) @constraint(model, c3, x + y <= 20) optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model; verbose = true) # Can you identify: diff --git a/docs/src/tutorials/linear/mip_duality.jl b/docs/src/tutorials/linear/mip_duality.jl index 3c0e5dee0ea..92b2ca58843 100644 --- a/docs/src/tutorials/linear/mip_duality.jl +++ b/docs/src/tutorials/linear/mip_duality.jl @@ -58,8 +58,7 @@ print(model) # If we optimize this model, we obtain a [`dual_status`](@ref) of [`NO_SOLUTION`](@ref): optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) dual_status(model) # This is because HiGHS cannot compute the duals of a mixed-integer program. We @@ -75,8 +74,7 @@ print(model) # dual: optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) dual_status(model) # and a marginal price of electricity of \$100/MWh: @@ -98,8 +96,7 @@ print(model) # the [`fix_discrete_variables`](@ref) function: optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) dual_status(model) #- @@ -119,8 +116,7 @@ print(model) #- optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) dual_status(model) #- diff --git a/docs/src/tutorials/linear/multi.jl b/docs/src/tutorials/linear/multi.jl index 9cd6d8fcfa3..bf7fbe66e60 100644 --- a/docs/src/tutorials/linear/multi.jl +++ b/docs/src/tutorials/linear/multi.jl @@ -177,8 +177,7 @@ end # Finally, we can optimize the model: optimize!(model) -Test.@test termination_status(model) == OPTIMAL -Test.@test primal_status(model) == FEASIBLE_POINT +Test.@test has_optimal_solution(model) Test.@test objective_value(model) == 225_700.0 #src solution_summary(model) diff --git a/docs/src/tutorials/linear/multi_commodity_network.jl b/docs/src/tutorials/linear/multi_commodity_network.jl index 5623d94107a..e7778f7eda8 100644 --- a/docs/src/tutorials/linear/multi_commodity_network.jl +++ b/docs/src/tutorials/linear/multi_commodity_network.jl @@ -201,8 +201,7 @@ df = DataFrames.leftjoin( # Finally, we can optimize the model: optimize!(model) -Test.@test termination_status(model) == OPTIMAL -Test.@test primal_status(model) == FEASIBLE_POINT +Test.@test has_optimal_solution(model) solution_summary(model) # update the solution in the DataFrames: diff --git a/docs/src/tutorials/linear/multi_objective_examples.jl b/docs/src/tutorials/linear/multi_objective_examples.jl index 87e4fdc412f..d3aa67cb926 100644 --- a/docs/src/tutorials/linear/multi_objective_examples.jl +++ b/docs/src/tutorials/linear/multi_objective_examples.jl @@ -32,13 +32,12 @@ set_silent(model) set_optimizer(model, () -> MOA.Optimizer(HiGHS.Optimizer)) set_attribute(model, MOA.Algorithm(), MOA.Lexicographic()) optimize!(model) -@assert termination_status(model) == OPTIMAL solution_summary(model) #- for i in 1:result_count(model) - @assert primal_status(model; result = i) == FEASIBLE_POINT + @assert has_optimal_solution(model; result = i) print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") println("x = ", value.([x1, x2]; result = i)) end @@ -62,13 +61,12 @@ set_silent(model) set_optimizer(model, () -> MOA.Optimizer(HiGHS.Optimizer)) set_attribute(model, MOA.Algorithm(), MOA.EpsilonConstraint()) optimize!(model) -@assert termination_status(model) == OPTIMAL solution_summary(model) #- for i in 1:result_count(model) - @assert primal_status(model; result = i) == FEASIBLE_POINT + @assert has_optimal_solution(model; result = i) print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") println("x = ", round.(Int, value.(x; result = i))) end @@ -108,13 +106,12 @@ set_silent(model) set_optimizer(model, () -> MOA.Optimizer(HiGHS.Optimizer)) set_attribute(model, MOA.Algorithm(), MOA.EpsilonConstraint()) optimize!(model) -@assert termination_status(model) == OPTIMAL solution_summary(model) #- for i in 1:result_count(model) - @assert primal_status(model; result = i) == FEASIBLE_POINT + @assert has_optimal_solution(model; result = i) print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") X = round.(Int, value.(x; result = i)) print("Path:") diff --git a/docs/src/tutorials/linear/multi_objective_knapsack.jl b/docs/src/tutorials/linear/multi_objective_knapsack.jl index 5235b114fdb..bc9ecc0da77 100644 --- a/docs/src/tutorials/linear/multi_objective_knapsack.jl +++ b/docs/src/tutorials/linear/multi_objective_knapsack.jl @@ -142,6 +142,10 @@ solution_summary(model; result = 5) #- +@assert has_optimal_solution(model; result = 5) + +#- + objective_value(model; result = 5) # Note that because we set a vector of two objective functions, the objective diff --git a/docs/src/tutorials/linear/n-queens.jl b/docs/src/tutorials/linear/n-queens.jl index 44e795a38e6..aae78b045ba 100644 --- a/docs/src/tutorials/linear/n-queens.jl +++ b/docs/src/tutorials/linear/n-queens.jl @@ -66,8 +66,7 @@ end # a feasible solution: optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # We can now review the solution that our model found: diff --git a/docs/src/tutorials/linear/network_flows.jl b/docs/src/tutorials/linear/network_flows.jl index 12c87e44c14..45f11692e02 100644 --- a/docs/src/tutorials/linear/network_flows.jl +++ b/docs/src/tutorials/linear/network_flows.jl @@ -79,8 +79,7 @@ set_silent(shortest_path) @constraint(shortest_path, [i = 1:n], sum(x[i, :]) - sum(x[:, i]) == b[i],) @objective(shortest_path, Min, sum(G .* x)) optimize!(shortest_path) -@assert termination_status(shortest_path) == OPTIMAL -@assert primal_status(shortest_path) == FEASIBLE_POINT +@assert has_optimal_solution(shortest_path) objective_value(shortest_path) #- value.(x) @@ -125,8 +124,7 @@ set_silent(assignment) @constraint(assignment, [j = 1:n], sum(y[j, :]) == 1) @objective(assignment, Max, sum(G .* y)) optimize!(assignment) -@assert termination_status(assignment) == OPTIMAL -@assert primal_status(assignment) == FEASIBLE_POINT +@assert has_optimal_solution(assignment) objective_value(assignment) #- value.(y) @@ -167,8 +165,7 @@ max_flow = Model(HiGHS.Optimizer) @constraint(max_flow, [i = 1:n; i != 1 && i != 8], sum(f[i, :]) == sum(f[:, i])) @objective(max_flow, Max, sum(f[1, :])) optimize!(max_flow) -@assert termination_status(max_flow) == OPTIMAL -@assert primal_status(max_flow) == FEASIBLE_POINT +@assert has_optimal_solution(max_flow) objective_value(max_flow) #- value.(f) diff --git a/docs/src/tutorials/linear/piecewise_linear.jl b/docs/src/tutorials/linear/piecewise_linear.jl index a6fbfdb05fa..c816f4d9417 100644 --- a/docs/src/tutorials/linear/piecewise_linear.jl +++ b/docs/src/tutorials/linear/piecewise_linear.jl @@ -52,8 +52,7 @@ function outer_approximate_x_squared(x̄) @objective(model, Min, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value(y) end @@ -104,8 +103,7 @@ function outer_approximate_log(x̄) @objective(model, Max, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value(y) end @@ -171,8 +169,7 @@ function inner_approximate_x_squared(x̄) @objective(model, Min, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value(y) end @@ -215,8 +212,7 @@ function inner_approximate_log(x̄) @objective(model, Max, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value(y) end @@ -270,8 +266,7 @@ function piecewise_linear_sin(x̄) end) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return value(y) end diff --git a/docs/src/tutorials/linear/sudoku.jl b/docs/src/tutorials/linear/sudoku.jl index 27b3af247ea..da27ae8d39f 100644 --- a/docs/src/tutorials/linear/sudoku.jl +++ b/docs/src/tutorials/linear/sudoku.jl @@ -134,8 +134,7 @@ end # solve problem optimize!(sudoku) -@assert termination_status(sudoku) == OPTIMAL -@assert primal_status(sudoku) == FEASIBLE_POINT +@assert has_optimal_solution(sudoku) # Extract the values of x x_val = value.(x); @@ -204,8 +203,7 @@ for i in 1:9, j in 1:9 end optimize!(model) -@assert termination_status(model) == OPTIMAL -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # Display the solution diff --git a/docs/src/tutorials/linear/transp.jl b/docs/src/tutorials/linear/transp.jl index 9b16de51c42..3cce287cc97 100644 --- a/docs/src/tutorials/linear/transp.jl +++ b/docs/src/tutorials/linear/transp.jl @@ -120,8 +120,7 @@ function solve_transportation_problem(data::Containers.DenseAxisArray) @constraint(model, [o in O], sum(x[o, :]) <= data[o, "SUPPLY"]) @constraint(model, [d in D], sum(x[:, d]) == data["DEMAND", d]) optimize!(model) - @assert termination_status(model) == OPTIMAL - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) ## Pretty print the solution in the format of the input print(" ", join(lpad.(D, 7, ' '))) for o in O diff --git a/docs/src/tutorials/nonlinear/classifiers.jl b/docs/src/tutorials/nonlinear/classifiers.jl index 9d2a051c2fe..896a10c792e 100644 --- a/docs/src/tutorials/nonlinear/classifiers.jl +++ b/docs/src/tutorials/nonlinear/classifiers.jl @@ -127,8 +127,7 @@ function solve_SVM_classifier(P::Matrix, labels::Vector; C::Float64 = C_0) D = LinearAlgebra.Diagonal(labels) @constraint(model, D * (P * w .- g) .+ y .>= 1) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) slack = extrema(value.(y)) println("Minimum slack: ", slack[1], "\nMaximum slack: ", slack[2]) classifier(x) = line(x; w = value.(w), g = value(g)) @@ -234,8 +233,7 @@ function solve_dual_SVM_classifier(P::Matrix, labels::Vector; C::Float64 = C_0) @objective(model, Min, 1 / 2 * u' * D * P * P' * D * u - sum(u)) @constraint(model, con, sum(D * u) == 0) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) w = P' * D * value.(u) g = dual(con) classifier(x) = line(x; w = w, g = g) @@ -322,8 +320,7 @@ function solve_kernel_SVM_classifier( con = @constraint(model, sum(D * u) == 0) @objective(model, Min, 1 / 2 * u' * D * K * D * u - sum(u)) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) u_sol, g_sol = value.(u), dual(con) function classifier(v::Vector) return sum( diff --git a/docs/src/tutorials/nonlinear/complementarity.jl b/docs/src/tutorials/nonlinear/complementarity.jl index 702c86406a9..8a0f65d1ac6 100644 --- a/docs/src/tutorials/nonlinear/complementarity.jl +++ b/docs/src/tutorials/nonlinear/complementarity.jl @@ -47,8 +47,7 @@ set_silent(model) @variable(model, 0 <= x[1:4] <= 10, start = 0) @constraint(model, M * x + q ⟂ x) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test value.(x) ≈ [2.8, 0.0, 0.8, 1.2] #src value.(x) @@ -69,8 +68,7 @@ set_silent(model) @constraint(model, w + 2x - 2y + 4z - 6 ⟂ z) @constraint(model, w - x + 2y - 2z - 2 ⟂ y) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test value.([w, x, y, z]) ≈ [2.8, 0.0, 0.8, 1.2] #src value.([w, x, y, z]) @@ -106,8 +104,7 @@ set_silent(model) end ) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test isapprox(value(p["new-york"]), 0.225; atol = 1e-3) #src value.(p) @@ -143,8 +140,7 @@ set_silent(model) end ) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test isapprox(value(C_G), 0.996; atol = 1e-3) #src value(K) @@ -197,8 +193,7 @@ set_silent(model) ## Production does not exceed capacity @constraint(model, [ω = 1:5], x - Y[ω] ⟂ μ[ω]) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # An equilibrium solution is to build 389 MW: diff --git a/docs/src/tutorials/nonlinear/nested_problems.jl b/docs/src/tutorials/nonlinear/nested_problems.jl index 9a6972e93d5..370646120c2 100644 --- a/docs/src/tutorials/nonlinear/nested_problems.jl +++ b/docs/src/tutorials/nonlinear/nested_problems.jl @@ -86,8 +86,7 @@ function solve_lower_level(x...) ) @constraint(model, (y[1] - 10)^2 + (y[2] - 10)^2 <= 25) optimize!(model) - @assert termination_status(model) == LOCALLY_SOLVED - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) return objective_value(model), value.(y) end @@ -150,8 +149,7 @@ model = Model(Ipopt.Optimizer) @operator(model, op_V, 2, V, ∇V, ∇²V) @objective(model, Min, x[1]^2 + x[2]^2 + op_V(x[1], x[2])) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # The optimal objective value is: @@ -231,8 +229,7 @@ cache = Cache(Float64[], NaN, Float64[]) ) @objective(model, Min, x[1]^2 + x[2]^2 + op_cached_f(x[1], x[2])) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # an we can check we get the same objective value: diff --git a/docs/src/tutorials/nonlinear/portfolio.jl b/docs/src/tutorials/nonlinear/portfolio.jl index 4fc9dd66283..47f02182784 100644 --- a/docs/src/tutorials/nonlinear/portfolio.jl +++ b/docs/src/tutorials/nonlinear/portfolio.jl @@ -158,8 +158,7 @@ set_silent(model) @constraint(model, sum(x) <= 1000) @constraint(model, r' * x >= 50) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # The optimal allocation of our assets is: diff --git a/docs/src/tutorials/nonlinear/querying_hessians.jl b/docs/src/tutorials/nonlinear/querying_hessians.jl index f488416e459..da0e18a574a 100644 --- a/docs/src/tutorials/nonlinear/querying_hessians.jl +++ b/docs/src/tutorials/nonlinear/querying_hessians.jl @@ -71,8 +71,7 @@ set_silent(model) @constraint(model, g_2, (x[1] + x[2])^2 <= 2) @objective(model, Min, (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) # ## The analytic solution diff --git a/docs/src/tutorials/nonlinear/rocket_control.jl b/docs/src/tutorials/nonlinear/rocket_control.jl index ceb4710e53a..2cf3d1ac55e 100644 --- a/docs/src/tutorials/nonlinear/rocket_control.jl +++ b/docs/src/tutorials/nonlinear/rocket_control.jl @@ -127,8 +127,7 @@ ddt(x::Vector, t::Int) = (x[t] - x[t-1]) / Δt # Now we optimize the model and check that we found a solution: optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model) # Finally, we plot the solution: diff --git a/docs/src/tutorials/nonlinear/simple_examples.jl b/docs/src/tutorials/nonlinear/simple_examples.jl index 000857d44d3..5d138dbcc8a 100644 --- a/docs/src/tutorials/nonlinear/simple_examples.jl +++ b/docs/src/tutorials/nonlinear/simple_examples.jl @@ -25,8 +25,7 @@ function example_rosenbrock() @variable(model, y) @objective(model, Min, (1 - x)^2 + 100 * (y - x^2)^2) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test objective_value(model) ≈ 0.0 atol = 1e-10 Test.@test value(x) ≈ 1.0 Test.@test value(y) ≈ 1.0 @@ -87,8 +86,7 @@ function example_clnlbeam() primal_status = $(primal_status(model)) objective_value = $(objective_value(model)) """) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) Test.@test objective_value(model) ≈ 350.0 #src return end @@ -116,8 +114,7 @@ function example_mle() sum((data[i] - μ)^2 for i in 1:n) / (2 * σ^2) ) optimize!(model) - @assert termination_status(model) == LOCALLY_SOLVED - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) println("μ = ", value(μ)) println("mean(data) = ", Statistics.mean(data)) println("σ^2 = ", value(σ)^2) @@ -128,8 +125,7 @@ function example_mle() ## You can even do constrained MLE! @constraint(model, μ == σ^2) optimize!(model) - @assert termination_status(model) == LOCALLY_SOLVED - @assert primal_status(model) == FEASIBLE_POINT + @assert has_optimal_solution(model) Test.@test value(μ) ≈ value(σ)^2 println() println("With constraint μ == σ^2:") @@ -157,8 +153,7 @@ function example_qcp() @constraint(model, x * x + y * y - z * z <= 0) @constraint(model, x * x - y * z <= 0) optimize!(model) - Test.@test termination_status(model) == LOCALLY_SOLVED - Test.@test primal_status(model) == FEASIBLE_POINT + Test.@test has_optimal_solution(model) print(model) println("Objective value: ", objective_value(model)) println("x = ", value(x)) diff --git a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl index 6f9e4da41b6..b5bb6324518 100644 --- a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl +++ b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl @@ -303,8 +303,7 @@ end set_silent(model) # Hide solver's verbose output optimize!(model) # Solve for the control and state -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) ## Show final cross-range of the solution println( diff --git a/docs/src/tutorials/nonlinear/tips_and_tricks.jl b/docs/src/tutorials/nonlinear/tips_and_tricks.jl index 4720373de7b..1394d0640c7 100644 --- a/docs/src/tutorials/nonlinear/tips_and_tricks.jl +++ b/docs/src/tutorials/nonlinear/tips_and_tricks.jl @@ -52,8 +52,7 @@ set_silent(model) @constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test objective_value(model) ≈ √3 atol = 1e-4 Test.@test value.(x) ≈ [1.0, 1.0] atol = 1e-4 println("Naive approach: function calls = $(function_calls)") @@ -122,8 +121,7 @@ set_silent(model) @constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) Test.@test objective_value(model) ≈ √3 atol = 1e-4 Test.@test value.(x) ≈ [1.0, 1.0] atol = 1e-4 println("Memoized approach: function_calls = $(function_calls)") diff --git a/docs/src/tutorials/nonlinear/user_defined_hessians.jl b/docs/src/tutorials/nonlinear/user_defined_hessians.jl index 9fc8eb2b9d4..568ba6b5803 100644 --- a/docs/src/tutorials/nonlinear/user_defined_hessians.jl +++ b/docs/src/tutorials/nonlinear/user_defined_hessians.jl @@ -72,6 +72,5 @@ model = Model(Ipopt.Optimizer) @operator(model, op_rosenbrock, 2, rosenbrock, ∇rosenbrock, ∇²rosenbrock) @objective(model, Min, op_rosenbrock(x[1], x[2])) optimize!(model) -@assert termination_status(model) == LOCALLY_SOLVED -@assert primal_status(model) == FEASIBLE_POINT +@assert has_optimal_solution(model) solution_summary(model; verbose = true) diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 533b4ed0c5b..ead7b12073e 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -582,6 +582,45 @@ function dual_status(model::GenericModel; result::Int = 1) return MOI.get(model, MOI.DualStatus(result))::MOI.ResultStatusCode end +""" + has_optimal_solution( + model::Model; + dual::Bool = false, + allow_local::Bool = true, + result::Int = 1, + ) + +Return `true` if the model has an optimal primal solution associated with result +index `result`. + +If `dual`, additionally check that a feasible dual solution is available. + +If `allow_local`, the [`termination_status`](@ref) may be [`OPTIMAL`](@ref) (the +global optimum is obtained) or [`LOCALLY_SOLVED`](@ref) (a local optimum is +obtained, which may be the global optimum, but the solver could not prove so). + +If `allow_local = false`, then this function returns `true` only if the +[`termination_status`](@ref) is [`OPTIMAL`](@ref). + +If this function returns `false`, use [`termination_status`](@ref), +[`result_count`](@ref), [`primal_status`](@ref) and [`dual_status`](@ref) to +understand what solutions are available (if any). +""" +function has_optimal_solution( + model::GenericModel; + dual::Bool = false, + allow_local::Bool = true, + result::Int = 1, +) + status = termination_status(model) + ret = status == OPTIMAL || (allow_local && status == LOCALLY_SOLVED) + ret &= primal_status(model; result) == FEASIBLE_POINT + if dual + ret &= dual_status(model; result) == FEASIBLE_POINT + end + return ret +end + """ solve_time(model::GenericModel) From d6d561dba80be2170c07e424ca00b30dc754150c Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 09:38:05 +1300 Subject: [PATCH 06/19] Update --- docs/src/manual/solutions.md | 40 +++++++++++++++++-- .../tutorials/applications/power_systems.jl | 2 +- .../design_patterns_for_larger_models.jl | 5 +-- .../getting_started_with_JuMP.jl | 21 +++++++--- 4 files changed, 54 insertions(+), 14 deletions(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index ef814441d73..3f409bcbda8 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -295,11 +295,37 @@ And data, a 2-element Vector{Float64}: ## Recommended workflow -The recommended workflow for solving a model and querying the solution is -something like the following: +You should always check whether the solver found a solution before calling +solution functions like [`value`](@ref) or [`objective_value`](@ref). + +A simple approach is to use [`has_optimal_solution`](@ref): + ```jldoctest solutions -julia> begin - if termination_status(model) == OPTIMAL +julia> function solve_and_print_solution(model) + optimize!(model) + if !has_optimal_solution(model; dual = true) + error("The model was not solved correctly.") + end + println("Solution is optimal") + println(" objective value = ", objective_value(model)) + println(" primal solution: x = ", value(x)) + println(" dual solution: c1 = ", dual(c1)) + return + end +solve_and_print_solution (generic function with 1 method) + +julia> solve_and_print_solution(model) +Solution is optimal + objective value = -205.14285714285714 + primal solution: x = 15.428571428571429 + dual solution: c1 = 1.7142857142857142 +``` + +You can also use a more advanced workflow that deals with a broader range of +statues: +```jldoctest solutions +julia> function solve_and_print_solution(model) + if termination_status(model) in (OPTIMAL, LOCALLY_SOLVED) println("Solution is optimal") elseif termination_status(model) == TIME_LIMIT && has_values(model) println("Solution is suboptimal due to a time limit, but a primal solution is available") @@ -312,8 +338,14 @@ julia> begin end if dual_status(model) == FEASIBLE_POINT println(" dual solution: c1 = ", dual(c1)) + else + println(" dual solution: NO SOLUTION") end + return end +solve_and_print_solution (generic function with 1 method) + +julia> solve_and_print_solution(model) Solution is optimal objective value = -205.14285714285714 primal solution: x = 15.428571428571429 diff --git a/docs/src/tutorials/applications/power_systems.jl b/docs/src/tutorials/applications/power_systems.jl index 7079a9c7253..4f6f386443e 100644 --- a/docs/src/tutorials/applications/power_systems.jl +++ b/docs/src/tutorials/applications/power_systems.jl @@ -217,7 +217,7 @@ function solve_economic_dispatch_inplace( wind.variable_cost * w, ) optimize!(model) - @assert has_optimal_solution(sudoku) + @assert has_optimal_solution(model) push!(obj_out, objective_value(model)) push!(w_out, value(w)) push!(g1_out, value(g[1])) diff --git a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl index 77dbcadfa3c..f5e8382631c 100644 --- a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl +++ b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl @@ -525,12 +525,9 @@ function _solve_knapsack( _add_knapsack_constraints(model, data, config) _add_knapsack_objective(model, data, config) JuMP.optimize!(model) - if JuMP.termination_status(model) != JuMP.OPTIMAL + if !JuMP.has_optimal_solution(model) @warn("Model not solved to optimality") return nothing - elseif JuMP.primal_status(model) != JuMP.FEASIBLE_POINT - @warn("No feasible point to return") - return nothing end return JuMP.value.(model[:x]) end diff --git a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl index 8cb2432d92b..5475a44725a 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl @@ -180,6 +180,11 @@ optimize!(model) # Now let's see what information we can query about the solution. +has_optimal_solution(model) + +# We can get more information about the solution by querying the three types of +# statuses. + # [`termination_status`](@ref) tells us why the solver stopped: termination_status(model) @@ -198,11 +203,6 @@ dual_status(model) # Now we know that our solver found an optimal solution, and that it has a # primal and a dual solution to query. -# It is good practice in your code to include statements like this so that an -# error will be thrown if the solver did not find an optimal solution: - -@assert has_optimal_solution(model) - # Query the objective value using [`objective_value`](@ref): objective_value(model) @@ -217,6 +217,17 @@ value(y) shadow_price(c1) shadow_price(c2) +# !!! warning +# You should always check whether the solver found a solution before calling +# solution functions like [`value`](@ref) or [`objective_value`](@ref). A +# common workflow is: +# ```julia +# optimize!(model) +# if !has_optimal_solution(model) +# error("Solver did not find an optimal solution") +# end +# ``` + # That's it for our simple model. In the rest of this tutorial, we expand on # some of the basic JuMP operations. From 03697e28d49ea9e9aecd032d5868f8ae3d81c501 Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 09:49:08 +1300 Subject: [PATCH 07/19] Update --- .../applications/optimal_power_flow.jl | 3 +-- src/optimizer_interface.jl | 23 ++++++++++++++++--- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/docs/src/tutorials/applications/optimal_power_flow.jl b/docs/src/tutorials/applications/optimal_power_flow.jl index 94934ff74a4..70d679b3ee9 100644 --- a/docs/src/tutorials/applications/optimal_power_flow.jl +++ b/docs/src/tutorials/applications/optimal_power_flow.jl @@ -421,8 +421,7 @@ optimize!(model) #- sdp_relaxation_lower_bound = round(objective_value(model); digits = 2) -Test.@test termination_status(model) in (OPTIMAL, ALMOST_OPTIMAL) -Test.@test primal_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT) +Test.@test has_optimal_solution(model; allow_almost = true) Test.@test isapprox(sdp_relaxation_lower_bound, 2753.04; rtol = 1e-3) #src println( "Objective value (W & V relax. lower bound): $sdp_relaxation_lower_bound", diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index ead7b12073e..5673f171244 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -587,6 +587,7 @@ end model::Model; dual::Bool = false, allow_local::Bool = true, + allow_almost::Bool = false, result::Int = 1, ) @@ -602,6 +603,11 @@ obtained, which may be the global optimum, but the solver could not prove so). If `allow_local = false`, then this function returns `true` only if the [`termination_status`](@ref) is [`OPTIMAL`](@ref). +If `allow_almost = true`, then the [`termination_status`](@ref) may additionally +be [`ALMOST_OPTIMAL`](@ref) or [`ALMOST_LOCALLY_SOLVED`](@ref) (if `allow_local`), +and the [`primal_status`](@ref) and [`dual_status`](@ref) may additionally be +[`NEARLY_FEASIBLE_POINT`](@ref). + If this function returns `false`, use [`termination_status`](@ref), [`result_count`](@ref), [`primal_status`](@ref) and [`dual_status`](@ref) to understand what solutions are available (if any). @@ -610,13 +616,24 @@ function has_optimal_solution( model::GenericModel; dual::Bool = false, allow_local::Bool = true, + allow_almost::Bool = false, result::Int = 1, ) status = termination_status(model) - ret = status == OPTIMAL || (allow_local && status == LOCALLY_SOLVED) - ret &= primal_status(model; result) == FEASIBLE_POINT + ret = + (status == OPTIMAL) || + (allow_local && (status == LOCALLY_SOLVED)) || + (allow_almost && (status == ALMOST_OPTIMAL)) || + (allow_almost && allow_local && (status == ALMOST_LOCALLY_SOLVED)) + primal = primal_status(model; result) + ret &= + (primal == FEASIBLE_POINT) || + (allow_almost && (primal == NEARLY_FEASIBLE_POINT)) if dual - ret &= dual_status(model; result) == FEASIBLE_POINT + dual_stat = dual_status(model; result) + ret &= + (dual_stat == FEASIBLE_POINT) || + (allow_almost && (dual_stat == NEARLY_FEASIBLE_POINT)) end return ret end From 7fdcdec89647b712273107ec1a7b42d3ce59bd99 Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 10:33:53 +1300 Subject: [PATCH 08/19] Add tests --- docs/src/tutorials/linear/cannery.jl | 2 +- test/test_model.jl | 69 ++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 1 deletion(-) diff --git a/docs/src/tutorials/linear/cannery.jl b/docs/src/tutorials/linear/cannery.jl index 6d84c9bf998..9986c8e97a5 100644 --- a/docs/src/tutorials/linear/cannery.jl +++ b/docs/src/tutorials/linear/cannery.jl @@ -122,7 +122,7 @@ solution_summary(model) # What's the optimal shipment? Test.@test has_optimal_solution(model) -Test.@test objective_value(model) == 1_680.0 #src +Test.@test isapprox(objective_value(model), 1_680.0, atol = 1e-6) #src for p in P, m in M println(p, " => ", m, ": ", value(x[p, m])) end diff --git a/test/test_model.jl b/test/test_model.jl index 20bcb345d6e..8f9e2954770 100644 --- a/test/test_model.jl +++ b/test/test_model.jl @@ -1244,4 +1244,73 @@ function test_caching_mps_model() return end +function test_has_optimal_solution() + mock = MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Float64}()) + model = direct_model(mock) + for term in [ + MOI.OPTIMAL, + MOI.LOCALLY_SOLVED, + MOI.ALMOST_OPTIMAL, + MOI.ALMOST_LOCALLY_SOLVED, + MOI.TIME_LIMIT, + ] + _global = term == MOI.OPTIMAL + has_local = _global || (term == MOI.LOCALLY_SOLVED) + _almost_global = _global || (term == MOI.ALMOST_OPTIMAL) + _almost_local = + has_local || _almost_global || (term == MOI.ALMOST_LOCALLY_SOLVED) + for primal in + [MOI.FEASIBLE_POINT, MOI.NEARLY_FEASIBLE_POINT, MOI.NO_SOLUTION] + _primal = primal == MOI.FEASIBLE_POINT + _almost_primal = _primal || primal == MOI.NEARLY_FEASIBLE_POINT + for dual in + [MOI.FEASIBLE_POINT, MOI.NEARLY_FEASIBLE_POINT, MOI.NO_SOLUTION] + _dual = dual == MOI.FEASIBLE_POINT + _almost_dual = _dual || dual == MOI.NEARLY_FEASIBLE_POINT + MOI.set(mock, MOI.TerminationStatus(), term) + MOI.set(mock, MOI.PrimalStatus(), primal) + MOI.set(mock, MOI.DualStatus(), dual) + @test has_optimal_solution(model) == (has_local && _primal) + @test has_optimal_solution(model; dual = true) == + (has_local && _primal && _dual) + @test has_optimal_solution(model; allow_local = false) == + (_global && _primal) + @test has_optimal_solution( + model; + dual = true, + allow_local = false, + ) == (_global && _primal && _dual) + @test has_optimal_solution(model; allow_almost = true) == + (_almost_local && _almost_primal) + @test has_optimal_solution( + model; + dual = true, + allow_almost = true, + ) == (_almost_local && _almost_primal && _almost_dual) + @test has_optimal_solution( + model; + allow_local = false, + allow_almost = true, + ) == (_almost_global && _almost_primal) + @test has_optimal_solution( + model; + dual = true, + allow_local = false, + allow_almost = true, + ) == (_almost_global && _almost_primal && _almost_dual) + MOI.set(mock, MOI.ResultCount(), 3) + MOI.set(mock, MOI.PrimalStatus(3), primal) + MOI.set(mock, MOI.DualStatus(3), dual) + @test !has_optimal_solution(model; result = 2) + @test !has_optimal_solution(model; dual = true, result = 2) + @test has_optimal_solution(model; result = 3) == + (has_local && _primal) + @test has_optimal_solution(model; dual = true, result = 3) == + (has_local && _primal && _dual) + end + end + end + return +end + end # module TestModels From f431e1fee94911b3d6833e533598ce1da762f103 Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 10:37:15 +1300 Subject: [PATCH 09/19] Fix docs --- docs/src/tutorials/conic/ellipse_approx.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/src/tutorials/conic/ellipse_approx.jl b/docs/src/tutorials/conic/ellipse_approx.jl index 632928f5fa2..caf5a57df45 100644 --- a/docs/src/tutorials/conic/ellipse_approx.jl +++ b/docs/src/tutorials/conic/ellipse_approx.jl @@ -49,7 +49,7 @@ import LinearAlgebra import Plots import Random import SCS -import Test #src +import Test # ## Data @@ -210,6 +210,7 @@ f = [1 - S[i, :]' * Z * S[i, :] + 2 * S[i, :]' * z - s for i in 1:m] ## The former @objective(model, Max, t) @objective(model, Max, 1 * t + 0) optimize!(model) +Test.@test has_optimal_solution(model) Test.@test isapprox(D, value.(Z); atol = 1e-6) #src solve_time_1 = solve_time(model) @@ -232,6 +233,7 @@ print_active_bridges(model) remove_bridge(model, MOI.Bridges.Constraint.GeoMeanToPowerBridge) optimize!(model) +Test.@test has_optimal_solution(model) # This time, the solve took: From 2469bda66fc3e2298023e8d7298c5973af7702a9 Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 10:41:37 +1300 Subject: [PATCH 10/19] Fix docstring --- src/optimizer_interface.jl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 5673f171244..76e0b5e9728 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -584,7 +584,7 @@ end """ has_optimal_solution( - model::Model; + model::GenericModel; dual::Bool = false, allow_local::Bool = true, allow_almost::Bool = false, @@ -597,8 +597,9 @@ index `result`. If `dual`, additionally check that a feasible dual solution is available. If `allow_local`, the [`termination_status`](@ref) may be [`OPTIMAL`](@ref) (the -global optimum is obtained) or [`LOCALLY_SOLVED`](@ref) (a local optimum is -obtained, which may be the global optimum, but the solver could not prove so). +solver found a global optimum) or [`LOCALLY_SOLVED`](@ref) (the solver found a +local optimum, which may also be the global optimum, but the solver could not +prove so). If `allow_local = false`, then this function returns `true` only if the [`termination_status`](@ref) is [`OPTIMAL`](@ref). From 29a93862b119fcab64d2e24911d62b9603cfe1e4 Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 7 Feb 2024 10:42:02 +1300 Subject: [PATCH 11/19] Update --- src/optimizer_interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 76e0b5e9728..358beb04a32 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -594,7 +594,7 @@ end Return `true` if the model has an optimal primal solution associated with result index `result`. -If `dual`, additionally check that a feasible dual solution is available. +If `dual`, additionally check that an optimal dual solution is available. If `allow_local`, the [`termination_status`](@ref) may be [`OPTIMAL`](@ref) (the solver found a global optimum) or [`LOCALLY_SOLVED`](@ref) (the solver found a From 5ac8b785c9fd3941a5f2ad1adc37d783c1ce2714 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 7 Feb 2024 11:15:23 +1300 Subject: [PATCH 12/19] Apply suggestions from code review --- docs/src/manual/solutions.md | 6 +++--- .../tutorials/getting_started/getting_started_with_JuMP.jl | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 3f409bcbda8..7ce66acba02 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -36,7 +36,7 @@ Subject to Use [`has_optimal_solution`](@ref) to check if the solver found an optimal solution: -```julia solutions +```jldoctest solutions julia> has_optimal_solution(model) true ``` @@ -44,14 +44,14 @@ true By default [`has_optimal_solution`](@ref) returns `true` for both global and local optima. Pass `allow_local = false` to check if the solver found a globally optimal solution: -```julia solutions +```jldoctest solutions julia> has_optimal_solution(model; allow_local = false) true ``` Pass `dual = true` to check if the solver found an optimal dual solution in addition to an optimal primal solution: -```solutions +```jldoctest solutions julia> has_optimal_solution(model; dual = true) true ``` diff --git a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl index 5475a44725a..f3a21c7ca22 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl @@ -178,7 +178,8 @@ optimize!(model) # Julia has a convention that functions which mutate their arguments should # end in `!`. A common example is `push!`. -# Now let's see what information we can query about the solution. +# Now let's see what information we can query about the solution, +# starting with [`has_optimal_solution`](@ref): has_optimal_solution(model) From 6a18c4d77aff51fb0a8daccf628244cf245060b2 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 7 Feb 2024 12:28:51 +1300 Subject: [PATCH 13/19] Apply suggestions from code review --- docs/src/manual/solutions.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 7ce66acba02..14582cbfbc3 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -32,7 +32,7 @@ Subject to y[b] ≤ 1 ``` -## Check if optimal solution exists +## Check if an optimal solution exists Use [`has_optimal_solution`](@ref) to check if the solver found an optimal solution: @@ -41,7 +41,7 @@ julia> has_optimal_solution(model) true ``` -By default [`has_optimal_solution`](@ref) returns `true` for both global and +By default, [`has_optimal_solution`](@ref) returns `true` for both global and local optima. Pass `allow_local = false` to check if the solver found a globally optimal solution: ```jldoctest solutions @@ -322,7 +322,7 @@ Solution is optimal ``` You can also use a more advanced workflow that deals with a broader range of -statues: +statuses: ```jldoctest solutions julia> function solve_and_print_solution(model) if termination_status(model) in (OPTIMAL, LOCALLY_SOLVED) From 7987d0db5809edfcf898a366866936bf917048f7 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 7 Feb 2024 17:26:01 +1300 Subject: [PATCH 14/19] Update optimizer_interface.jl --- src/optimizer_interface.jl | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 358beb04a32..02d81aafd04 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -626,11 +626,13 @@ function has_optimal_solution( (allow_local && (status == LOCALLY_SOLVED)) || (allow_almost && (status == ALMOST_OPTIMAL)) || (allow_almost && allow_local && (status == ALMOST_LOCALLY_SOLVED)) - primal = primal_status(model; result) - ret &= - (primal == FEASIBLE_POINT) || - (allow_almost && (primal == NEARLY_FEASIBLE_POINT)) - if dual + if ret + primal = primal_status(model; result) + ret &= + (primal == FEASIBLE_POINT) || + (allow_almost && (primal == NEARLY_FEASIBLE_POINT)) + end + if ret && dual dual_stat = dual_status(model; result) ret &= (dual_stat == FEASIBLE_POINT) || From 3056246bd8395c31607ba9532b441ba2cad26b2e Mon Sep 17 00:00:00 2001 From: odow Date: Thu, 8 Feb 2024 11:58:00 +1300 Subject: [PATCH 15/19] Add more checks --- docs/src/background/algebraic_modeling_languages.md | 12 ++++++++++++ .../src/tutorials/applications/optimal_power_flow.jl | 2 +- docs/src/tutorials/conic/arbitrary_precision.jl | 3 +++ docs/src/tutorials/conic/dualization.jl | 4 ++++ docs/src/tutorials/conic/experiment_design.jl | 3 +++ docs/src/tutorials/conic/logistic_regression.jl | 10 ++++++---- docs/src/tutorials/conic/quantum_discrimination.jl | 2 ++ docs/src/tutorials/conic/simple_examples.jl | 5 +++++ docs/src/tutorials/conic/start_values.jl | 1 + docs/src/tutorials/conic/tips_and_tricks.jl | 12 ++++++++++++ 10 files changed, 49 insertions(+), 5 deletions(-) diff --git a/docs/src/background/algebraic_modeling_languages.md b/docs/src/background/algebraic_modeling_languages.md index 2256b849238..6ff34d1d948 100644 --- a/docs/src/background/algebraic_modeling_languages.md +++ b/docs/src/background/algebraic_modeling_languages.md @@ -138,6 +138,9 @@ julia> function algebraic_knapsack(c, w, b) @objective(model, Max, sum(c[i] * x[i] for i = 1:n)) @constraint(model, sum(w[i] * x[i] for i = 1:n) <= b) optimize!(model) + if termination_status(model) != OPTIMAL + error("Not solved correctly") + end return value.(x) end algebraic_knapsack (generic function with 1 method) @@ -179,6 +182,9 @@ julia> function nonalgebraic_knapsack(c, w, b) con = build_constraint(error, lhs, MOI.LessThan(b)) add_constraint(model, con) optimize!(model) + if termination_status(model) != OPTIMAL + error("Not solved correctly") + end return value.(x) end nonalgebraic_knapsack (generic function with 1 method) @@ -219,6 +225,9 @@ julia> function mathoptinterface_knapsack(optimizer, c, w, b) MOI.LessThan(b), ) MOI.optimize!(model) + if MOI.get(model, MOI.TerminationStatus()) != MOI.OPTIMAL + error("Not solved correctly") + end return MOI.get.(model, MOI.VariablePrimal(), x) end mathoptinterface_knapsack (generic function with 1 method) @@ -257,6 +266,9 @@ julia> function highs_knapsack(c, w, b) w, ) Highs_run(model) + if Highs_getModelStatus(model) != kHighsModelStatusOptimal + error("Not solved correctly") + end x = fill(NaN, 2) Highs_getSolution(model, x, C_NULL, C_NULL, C_NULL) Highs_destroy(model) diff --git a/docs/src/tutorials/applications/optimal_power_flow.jl b/docs/src/tutorials/applications/optimal_power_flow.jl index 70d679b3ee9..cbf682a0a21 100644 --- a/docs/src/tutorials/applications/optimal_power_flow.jl +++ b/docs/src/tutorials/applications/optimal_power_flow.jl @@ -420,8 +420,8 @@ optimize!(model) #- -sdp_relaxation_lower_bound = round(objective_value(model); digits = 2) Test.@test has_optimal_solution(model; allow_almost = true) +sdp_relaxation_lower_bound = round(objective_value(model); digits = 2) Test.@test isapprox(sdp_relaxation_lower_bound, 2753.04; rtol = 1e-3) #src println( "Objective value (W & V relax. lower bound): $sdp_relaxation_lower_bound", diff --git a/docs/src/tutorials/conic/arbitrary_precision.jl b/docs/src/tutorials/conic/arbitrary_precision.jl index c06a3def722..4d0e84fdec9 100644 --- a/docs/src/tutorials/conic/arbitrary_precision.jl +++ b/docs/src/tutorials/conic/arbitrary_precision.jl @@ -76,6 +76,7 @@ print(model) # Let's solve and inspect the solution: optimize!(model) +@assert has_optimal_solution(model; dual = true) solution_summary(model) # The value of each decision variable is a `BigFloat`: @@ -100,6 +101,7 @@ value.(x) .- [3 // 7, 3 // 14] set_attribute(model, "tol_gap_abs", 1e-32) set_attribute(model, "tol_gap_rel", 1e-32) optimize!(model) +@assert has_optimal_solution(model) value.(x) .- [3 // 7, 3 // 14] # ## Rational arithmetic @@ -141,6 +143,7 @@ print(model) # Let's solve and inspect the solution: optimize!(model) +@assert has_optimal_solution(model) solution_summary(model) # The optimal values are given in exact rational arithmetic: diff --git a/docs/src/tutorials/conic/dualization.jl b/docs/src/tutorials/conic/dualization.jl index 149c2d3f5f2..045fc92f9da 100644 --- a/docs/src/tutorials/conic/dualization.jl +++ b/docs/src/tutorials/conic/dualization.jl @@ -130,6 +130,7 @@ print(model_dual) set_optimizer(model_primal, SCS.Optimizer) optimize!(model_primal) +@assert has_optimal_solution(model_primal; dual = true) # (There are five rows in the constraint matrix because SCS expects problems in # geometric conic form, and so JuMP has reformulated the `X, PSD` variable @@ -153,6 +154,7 @@ objective_value(model_primal) set_optimizer(model_dual, SCS.Optimizer) optimize!(model_dual) +@assert has_optimal_solution(model_dual; dual = true) # and the solution we obtain is: @@ -182,6 +184,7 @@ objective_value(model_dual) set_optimizer(model_primal, Dualization.dual_optimizer(SCS.Optimizer)) optimize!(model_primal) +@assert has_optimal_solution(model_primal; dual = true) # The performance is the same as if we solved `model_dual`, and the correct # solution is returned to `X`: @@ -197,6 +200,7 @@ dual.(primal_c) set_optimizer(model_dual, Dualization.dual_optimizer(SCS.Optimizer)) optimize!(model_dual) +@assert has_optimal_solution(model_dual; dual = true) #- diff --git a/docs/src/tutorials/conic/experiment_design.jl b/docs/src/tutorials/conic/experiment_design.jl index 02c07b15fab..fe45c620dfc 100644 --- a/docs/src/tutorials/conic/experiment_design.jl +++ b/docs/src/tutorials/conic/experiment_design.jl @@ -141,6 +141,7 @@ for i in 1:q end @objective(aOpt, Min, sum(u)) optimize!(aOpt) +@assert has_optimal_solution(aOpt) objective_value(aOpt) #- @@ -182,6 +183,7 @@ set_silent(eOpt) @constraint(eOpt, sum(np) <= n) @objective(eOpt, Max, t) optimize!(eOpt) +@assert has_optimal_solution(eOpt) objective_value(eOpt) #- value.(np) @@ -212,6 +214,7 @@ set_silent(dOpt) E = V * LinearAlgebra.diagm(0 => np ./ n) * V' @constraint(dOpt, [t; 1; triangle_vec(E)] in MOI.LogDetConeTriangle(q)) optimize!(dOpt) +@assert has_optimal_solution(dOpt) objective_value(dOpt) #- value.(np) diff --git a/docs/src/tutorials/conic/logistic_regression.jl b/docs/src/tutorials/conic/logistic_regression.jl index cdfa33fe74a..63f72256283 100644 --- a/docs/src/tutorials/conic/logistic_regression.jl +++ b/docs/src/tutorials/conic/logistic_regression.jl @@ -195,11 +195,12 @@ X, y = generate_dataset(n, p; shift = 10.0); model = build_logit_model(X, y, λ) set_optimizer(model, SCS.Optimizer) set_silent(model) -JuMP.optimize!(model) +optimize!(model) +@assert has_optimal_solution(model) #- -θ♯ = JuMP.value.(model[:θ]) +θ♯ = value.(model[:θ]) # It appears that the speed of convergence is not that impacted by the correlation # of the dataset, nor by the penalty $\lambda$. @@ -237,11 +238,12 @@ count_nonzero(v::Vector; tol = 1e-6) = sum(abs.(v) .>= tol) sparse_model = build_sparse_logit_model(X, y, λ) set_optimizer(sparse_model, SCS.Optimizer) set_silent(sparse_model) -JuMP.optimize!(sparse_model) +optimize!(sparse_model) +@assert has_optimal_solution(sparse_model) #- -θ♯ = JuMP.value.(sparse_model[:θ]) +θ♯ = value.(sparse_model[:θ]) println( "Number of non-zero components: ", count_nonzero(θ♯), diff --git a/docs/src/tutorials/conic/quantum_discrimination.jl b/docs/src/tutorials/conic/quantum_discrimination.jl index d743cc0b005..27303e2fa40 100644 --- a/docs/src/tutorials/conic/quantum_discrimination.jl +++ b/docs/src/tutorials/conic/quantum_discrimination.jl @@ -98,6 +98,7 @@ E = [@variable(model, [1:d, 1:d] in HermitianPSDCone()) for i in 1:N] # Now we optimize: optimize!(model) +@assert has_optimal_solution(model) solution_summary(model) # The probability of guessing correctly is: @@ -140,6 +141,7 @@ push!(E, E_N) # Then we can check that we get the same solution: optimize!(model) +@assert has_optimal_solution(model) solution_summary(model) #- diff --git a/docs/src/tutorials/conic/simple_examples.jl b/docs/src/tutorials/conic/simple_examples.jl index 7aad8d02f81..98e5541bcd2 100644 --- a/docs/src/tutorials/conic/simple_examples.jl +++ b/docs/src/tutorials/conic/simple_examples.jl @@ -64,6 +64,7 @@ function solve_max_cut_sdp(weights) @objective(model, Max, 0.25 * LinearAlgebra.dot(L, X)) @constraint(model, LinearAlgebra.diag(X) .== 1) optimize!(model) + @assert has_optimal_solution(model) V = svd_cholesky(value(X)) Random.seed!(N) r = rand(N) @@ -133,6 +134,7 @@ function example_k_means_clustering() @constraint(model, [i = 1:m], sum(Z[i, :]) .== 1) @constraint(model, LinearAlgebra.tr(Z) == num_clusters) optimize!(model) + @assert has_optimal_solution(model) Z_val = value.(Z) current_cluster, visited = 0, Set{Int}() solution = [1, 1, 2, 1, 2, 2] #src @@ -185,10 +187,12 @@ function example_correlation_problem() @constraint(model, 0.4 <= ρ["B", "C"] <= 0.5) @objective(model, Max, ρ["A", "C"]) optimize!(model) + @assert has_optimal_solution(model) println("An upper bound for ρ_AC is $(value(ρ["A", "C"]))") Test.@test value(ρ["A", "C"]) ≈ 0.87195 atol = 1e-4 #src @objective(model, Min, ρ["A", "C"]) optimize!(model) + @assert has_optimal_solution(model) println("A lower bound for ρ_AC is $(value(ρ["A", "C"]))") Test.@test value(ρ["A", "C"]) ≈ -0.978 atol = 1e-3 #src return @@ -380,6 +384,7 @@ function example_robust_uncertainty_sets() @constraint(model, [((1-ɛ)/ɛ) (u - μ)'; (u-μ) Σ] >= 0, PSDCone()) @objective(model, Max, c' * u) optimize!(model) + @assert has_optimal_solution(model) exact = μhat' * c + Γ1(𝛿 / 2, N) * LinearAlgebra.norm(c) + diff --git a/docs/src/tutorials/conic/start_values.jl b/docs/src/tutorials/conic/start_values.jl index d632fc62045..49d09916458 100644 --- a/docs/src/tutorials/conic/start_values.jl +++ b/docs/src/tutorials/conic/start_values.jl @@ -67,6 +67,7 @@ model = Model(SCS.Optimizer) @constraint(model, sum(x) <= 1) @objective(model, Max, sum(i * x[i] for i in 1:3)) optimize!(model) +@assert has_optimal_solution(model) # By looking at the log, we can see that SCS took 75 iterations to find the optimal # solution. Now we set the optimal solution as our starting point: diff --git a/docs/src/tutorials/conic/tips_and_tricks.jl b/docs/src/tutorials/conic/tips_and_tricks.jl index 6d558a2116c..217499e63c9 100644 --- a/docs/src/tutorials/conic/tips_and_tricks.jl +++ b/docs/src/tutorials/conic/tips_and_tricks.jl @@ -98,6 +98,7 @@ set_silent(model) @constraint(model, [t; x] in SecondOrderCone()) @objective(model, Min, t) optimize!(model) +@assert has_optimal_solution(model) value(t), value.(x) # ## Rotated Second-Order Cone @@ -120,6 +121,7 @@ set_silent(model) @constraint(model, [t; 0.5; residuals] in RotatedSecondOrderCone()) @objective(model, Min, t) optimize!(model) +@assert has_optimal_solution(model) value(θ), value(t) # ## Exponential Cone @@ -142,6 +144,7 @@ set_silent(model) @objective(model, Min, z) @constraint(model, [x, 1, z] in MOI.ExponentialCone()) optimize!(model) +@assert has_optimal_solution(model) value(z), exp(1.5) # ### Logarithm @@ -155,6 +158,7 @@ set_silent(model) @objective(model, Max, x) @constraint(model, [x, 1, z] in MOI.ExponentialCone()) optimize!(model) +@assert has_optimal_solution(model) value(x), log(1.5) # ### Log-sum-exp @@ -216,6 +220,7 @@ set_silent(model) @constraint(model, A * x .<= b) @constraint(model, [i = 1:n], [t[i], x[i], 1] in MOI.ExponentialCone()) optimize!(model) +@assert has_optimal_solution(model) objective_value(model) # The [`MOI.ExponentialCone`](@ref) has a dual, the [`MOI.DualExponentialCone`](@ref), @@ -234,6 +239,7 @@ set_silent(model) @constraint(model, A * x .<= b) @constraint(model, [t; ones(n); x] in MOI.RelativeEntropyCone(2n + 1)) optimize!(model) +@assert has_optimal_solution(model) objective_value(model) # ## PowerCone @@ -255,6 +261,7 @@ set_silent(model) @constraint(model, [t, 1, x] in MOI.PowerCone(1 / 3)) @objective(model, Min, t) optimize!(model) +@assert has_optimal_solution(model) value(t), value(x) # The [`MOI.PowerCone`](@ref) has a dual, the [`MOI.DualPowerCone`](@ref), @@ -277,6 +284,7 @@ function p_norm(x::Vector, p) @constraint(model, sum(r) == t) @objective(model, Min, t) optimize!(model) + @assert has_optimal_solution(model) return value(t) end @@ -320,6 +328,7 @@ set_silent(model) @objective(model, Min, t) @constraint(model, t .* I - A in PSDCone()) optimize!(model) +@assert has_optimal_solution(model) objective_value(model) # ## GeometricMeanCone @@ -353,6 +362,7 @@ set_silent(model) @constraint(model, [t; vec(X)] in MOI.RootDetConeSquare(2)) @constraint(model, X .== [2 1; 1 3]) optimize!(model) +@assert has_optimal_solution(model) value(t), sqrt(LinearAlgebra.det(value.(X))) # If `X` is symmetric, then you can use [`MOI.RootDetConeTriangle`](@ref) @@ -390,6 +400,7 @@ set_silent(model) @constraint(model, X .== [2 1; 1 3]) @constraint(model, u == 0.5) optimize!(model) +@assert has_optimal_solution(model) value(t), 0.5 * log(LinearAlgebra.det(value.(X) ./ 0.5)) # If `X` is symmetric, then you can use [`MOI.LogDetConeTriangle`](@ref) @@ -410,6 +421,7 @@ set_silent(model) @constraint(model, X .== [2 1; 1 3]) @constraint(model, u == 0.5) optimize!(model) +@assert has_optimal_solution(model) value(t), 0.5 * log(LinearAlgebra.det(value.(X) ./ 0.5)) # ## Other Cones and Functions From 5364a3105300633bcc3d09c043ec99cead3261c6 Mon Sep 17 00:00:00 2001 From: odow Date: Tue, 13 Feb 2024 15:13:30 +1300 Subject: [PATCH 16/19] Change to s_solved_and_feasible --- docs/src/manual/solutions.md | 14 +++++----- .../algorithms/benders_decomposition.jl | 14 +++++----- .../cutting_stock_column_generation.jl | 10 +++---- docs/src/tutorials/algorithms/parallelism.md | 6 ++--- .../algorithms/tsp_lazy_constraints.jl | 6 ++--- .../applications/optimal_power_flow.jl | 6 ++--- .../tutorials/applications/power_systems.jl | 6 ++--- .../applications/two_stage_stochastic.jl | 6 ++--- .../tutorials/conic/arbitrary_precision.jl | 6 ++--- docs/src/tutorials/conic/dualization.jl | 8 +++--- docs/src/tutorials/conic/ellipse_approx.jl | 6 ++--- docs/src/tutorials/conic/experiment_design.jl | 6 ++--- .../tutorials/conic/logistic_regression.jl | 4 +-- docs/src/tutorials/conic/min_ellipse.jl | 2 +- .../tutorials/conic/quantum_discrimination.jl | 4 +-- docs/src/tutorials/conic/simple_examples.jl | 14 +++++----- docs/src/tutorials/conic/start_values.jl | 2 +- docs/src/tutorials/conic/tips_and_tricks.jl | 24 ++++++++--------- .../tutorials/getting_started/debugging.jl | 2 +- .../design_patterns_for_larger_models.jl | 16 ++++++------ .../getting_started_with_JuMP.jl | 10 +++---- .../getting_started_with_data_and_plotting.jl | 2 +- docs/src/tutorials/linear/callbacks.jl | 8 +++--- docs/src/tutorials/linear/cannery.jl | 2 +- .../linear/constraint_programming.jl | 16 ++++++------ docs/src/tutorials/linear/diet.jl | 4 +-- .../src/tutorials/linear/facility_location.jl | 4 +-- docs/src/tutorials/linear/factory_schedule.jl | 2 +- docs/src/tutorials/linear/finance.jl | 4 +-- .../tutorials/linear/geographic_clustering.jl | 2 +- docs/src/tutorials/linear/knapsack.jl | 4 +-- docs/src/tutorials/linear/lp_sensitivity.jl | 2 +- docs/src/tutorials/linear/mip_duality.jl | 8 +++--- docs/src/tutorials/linear/multi.jl | 2 +- .../linear/multi_commodity_network.jl | 2 +- .../linear/multi_objective_examples.jl | 6 ++--- .../linear/multi_objective_knapsack.jl | 2 +- docs/src/tutorials/linear/n-queens.jl | 2 +- docs/src/tutorials/linear/network_flows.jl | 6 ++--- docs/src/tutorials/linear/piecewise_linear.jl | 10 +++---- docs/src/tutorials/linear/sudoku.jl | 4 +-- docs/src/tutorials/linear/transp.jl | 2 +- docs/src/tutorials/nonlinear/classifiers.jl | 6 ++--- .../tutorials/nonlinear/complementarity.jl | 10 +++---- .../tutorials/nonlinear/nested_problems.jl | 6 ++--- docs/src/tutorials/nonlinear/portfolio.jl | 2 +- .../tutorials/nonlinear/querying_hessians.jl | 2 +- .../src/tutorials/nonlinear/rocket_control.jl | 2 +- .../tutorials/nonlinear/simple_examples.jl | 10 +++---- .../space_shuttle_reentry_trajectory.jl | 2 +- .../tutorials/nonlinear/tips_and_tricks.jl | 4 +-- .../nonlinear/user_defined_hessians.jl | 2 +- src/optimizer_interface.jl | 16 +++++------- test/test_model.jl | 26 +++++++++---------- 54 files changed, 176 insertions(+), 178 deletions(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 14582cbfbc3..8bf3db83b00 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -34,25 +34,25 @@ Subject to ## Check if an optimal solution exists -Use [`has_optimal_solution`](@ref) to check if the solver found an optimal +Use [`is_solved_and_feasible`](@ref) to check if the solver found an optimal solution: ```jldoctest solutions -julia> has_optimal_solution(model) +julia> is_solved_and_feasible(model) true ``` -By default, [`has_optimal_solution`](@ref) returns `true` for both global and +By default, [`is_solved_and_feasible`](@ref) returns `true` for both global and local optima. Pass `allow_local = false` to check if the solver found a globally optimal solution: ```jldoctest solutions -julia> has_optimal_solution(model; allow_local = false) +julia> is_solved_and_feasible(model; allow_local = false) true ``` Pass `dual = true` to check if the solver found an optimal dual solution in addition to an optimal primal solution: ```jldoctest solutions -julia> has_optimal_solution(model; dual = true) +julia> is_solved_and_feasible(model; dual = true) true ``` @@ -298,12 +298,12 @@ And data, a 2-element Vector{Float64}: You should always check whether the solver found a solution before calling solution functions like [`value`](@ref) or [`objective_value`](@ref). -A simple approach is to use [`has_optimal_solution`](@ref): +A simple approach is to use [`is_solved_and_feasible`](@ref): ```jldoctest solutions julia> function solve_and_print_solution(model) optimize!(model) - if !has_optimal_solution(model; dual = true) + if !is_solved_and_feasible(model; dual = true) error("The model was not solved correctly.") end println("Solution is optimal") diff --git a/docs/src/tutorials/algorithms/benders_decomposition.jl b/docs/src/tutorials/algorithms/benders_decomposition.jl index f96905fb2aa..fcab4850e91 100644 --- a/docs/src/tutorials/algorithms/benders_decomposition.jl +++ b/docs/src/tutorials/algorithms/benders_decomposition.jl @@ -164,7 +164,7 @@ function solve_subproblem(x) con = @constraint(model, A_2 * y .<= b - A_1 * x) @objective(model, Min, c_2' * y) optimize!(model) - @assert has_optimal_solution(model; dual = true) + @assert is_solved_and_feasible(model; dual = true) return (obj = objective_value(model), y = value.(y), π = dual.(con)) end @@ -194,7 +194,7 @@ ABSOLUTE_OPTIMALITY_GAP = 1e-6 println("Iteration Lower Bound Upper Bound Gap") for k in 1:MAXIMUM_ITERATIONS optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) lower_bound = objective_value(model) x_k = value.(x) ret = solve_subproblem(x_k) @@ -212,7 +212,7 @@ end # Finally, we can obtain the optimal solution optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test value.(x) == [0.0, 1.0] #src x_optimal = value.(x) @@ -270,7 +270,7 @@ set_attribute(lazy_model, MOI.LazyConstraintCallback(), my_callback) # Now when we optimize!, our callback is run: optimize!(lazy_model) -@assert has_optimal_solution(lazy_model) +@assert is_solved_and_feasible(lazy_model) # For this model, the callback algorithm required more solves of the subproblem: @@ -326,7 +326,7 @@ print(subproblem) function solve_subproblem(model, x) fix.(model[:x_copy], x) optimize!(model) - @assert has_optimal_solution(model; dual = true) + @assert is_solved_and_feasible(model; dual = true) return ( obj = objective_value(model), y = value.(model[:y]), @@ -344,7 +344,7 @@ end println("Iteration Lower Bound Upper Bound Gap") for k in 1:MAXIMUM_ITERATIONS optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) lower_bound = objective_value(model) x_k = value.(x) ret = solve_subproblem(subproblem, x_k) @@ -362,7 +362,7 @@ end # Finally, we can obtain the optimal solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test value.(x) == [0.0, 1.0] #src x_optimal = value.(x) diff --git a/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl b/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl index 1a2ef3ff6d7..9e7809772b4 100644 --- a/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl +++ b/docs/src/tutorials/algorithms/cutting_stock_column_generation.jl @@ -235,7 +235,7 @@ set_silent(model) @objective(model, Min, sum(x)) @constraint(model, demand[i in 1:I], patterns[i]' * x >= data.pieces[i].d) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # This solution requires 421 rolls. This solution is sub-optimal because the @@ -253,7 +253,7 @@ solution_summary(model) unset_integer.(x) optimize!(model) -@assert has_optimal_solution(model; dual = true) +@assert is_solved_and_feasible(model; dual = true) π_13 = dual(demand[13]) # Using the economic interpretation of the dual variable, we can say that a one @@ -284,7 +284,7 @@ function solve_pricing(data::Data, π::Vector{Float64}) @constraint(model, sum(data.pieces[i].w * y[i] for i in 1:I) <= data.W) @objective(model, Max, sum(π[i] * y[i] for i in 1:I)) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) number_of_rolls_saved = objective_value(model) if number_of_rolls_saved > 1 + 1e-8 ## Benefit of pattern is more than the cost of a new roll plus some @@ -315,7 +315,7 @@ solve_pricing(data, zeros(I)) while true ## Solve the linear relaxation optimize!(model) - @assert has_optimal_solution(model; dual = true) + @assert is_solved_and_feasible(model; dual = true) ## Obtain a new dual vector π = dual.(demand) ## Solve the pricing problem @@ -366,7 +366,7 @@ sum(ceil.(Int, solution.rolls)) set_integer.(x) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution = DataFrames.DataFrame([ (pattern = p, rolls = value(x_p)) for (p, x_p) in enumerate(x) ]) diff --git a/docs/src/tutorials/algorithms/parallelism.md b/docs/src/tutorials/algorithms/parallelism.md index 97f10b8d3d2..978760b5bd0 100644 --- a/docs/src/tutorials/algorithms/parallelism.md +++ b/docs/src/tutorials/algorithms/parallelism.md @@ -215,7 +215,7 @@ my_lock = Threads.ReentrantLock() Threads.@threads for i in 1:10 set_lower_bound(x, i) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) Threads.lock(my_lock) do push!(solutions, i => objective_value(model)) end @@ -252,7 +252,7 @@ julia> Threads.@threads for i in 1:10 @objective(model, Min, x) set_lower_bound(x, i) optimize!(model) - @assert has_optimal_solution(sudoku) + @assert is_solved_and_feasible(sudoku) Threads.lock(my_lock) do push!(solutions, i => objective_value(model)) end @@ -297,7 +297,7 @@ julia> Distributed.@everywhere begin @objective(model, Min, x) set_lower_bound(x, i) optimize!(model) - @assert has_optimal_solution(sudoku) + @assert is_solved_and_feasible(sudoku) return objective_value(model) end end diff --git a/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl b/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl index 735b3c8bc9f..51add8abeb0 100644 --- a/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl +++ b/docs/src/tutorials/algorithms/tsp_lazy_constraints.jl @@ -199,7 +199,7 @@ subtour(x::AbstractMatrix{VariableRef}) = subtour(value.(x)) iterative_model = build_tsp_model(d, n) optimize!(iterative_model) -@assert has_optimal_solution(iterative_model) +@assert is_solved_and_feasible(iterative_model) time_iterated = solve_time(iterative_model) cycle = subtour(iterative_model[:x]) while 1 < length(cycle) < n @@ -210,7 +210,7 @@ while 1 < length(cycle) < n sum(iterative_model[:x][i, j] for (i, j) in S) <= length(cycle) - 1, ) optimize!(iterative_model) - @assert has_optimal_solution(iterative_model) + @assert is_solved_and_feasible(iterative_model) global time_iterated += solve_time(iterative_model) global cycle = subtour(iterative_model[:x]) end @@ -264,7 +264,7 @@ set_attribute( subtour_elimination_callback, ) optimize!(lazy_model) -@assert has_optimal_solution(lazy_model) +@assert is_solved_and_feasible(lazy_model) objective_value(lazy_model) # This finds the same optimal tour: diff --git a/docs/src/tutorials/applications/optimal_power_flow.jl b/docs/src/tutorials/applications/optimal_power_flow.jl index cbf682a0a21..a54bbafa338 100644 --- a/docs/src/tutorials/applications/optimal_power_flow.jl +++ b/docs/src/tutorials/applications/optimal_power_flow.jl @@ -137,7 +137,7 @@ println("Objective value (basic lower bound) : $basic_lower_bound") @constraint(model, sum(P_G) >= sum(P_Demand)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) better_lower_bound = round(objective_value(model); digits = 2) println("Objective value (better lower bound): $better_lower_bound") @@ -281,7 +281,7 @@ P_G = real(S_G) # We're finally ready to solve our nonlinear AC-OPF problem: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test isapprox(objective_value(model), 3087.84; atol = 1e-2) #src solution_summary(model) @@ -420,7 +420,7 @@ optimize!(model) #- -Test.@test has_optimal_solution(model; allow_almost = true) +Test.@test is_solved_and_feasible(model; allow_almost = true) sdp_relaxation_lower_bound = round(objective_value(model); digits = 2) Test.@test isapprox(sdp_relaxation_lower_bound, 2753.04; rtol = 1e-3) #src println( diff --git a/docs/src/tutorials/applications/power_systems.jl b/docs/src/tutorials/applications/power_systems.jl index 4f6f386443e..9cd58397271 100644 --- a/docs/src/tutorials/applications/power_systems.jl +++ b/docs/src/tutorials/applications/power_systems.jl @@ -115,7 +115,7 @@ function solve_economic_dispatch(generators::Vector, wind, scenario) @constraint(model, sum(g[i] for i in 1:N) + w == scenario.demand) ## Solve statement optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) ## return the optimal value of the objective function and its minimizers return ( g = value.(g), @@ -217,7 +217,7 @@ function solve_economic_dispatch_inplace( wind.variable_cost * w, ) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) push!(obj_out, objective_value(model)) push!(w_out, value(w)) push!(g1_out, value(g[1])) @@ -528,7 +528,7 @@ function solve_nonlinear_economic_dispatch( ) @constraint(model, sum(g[i] for i in 1:N) + sqrt(w) == scenario.demand) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return ( g = value.(g), w = value(w), diff --git a/docs/src/tutorials/applications/two_stage_stochastic.jl b/docs/src/tutorials/applications/two_stage_stochastic.jl index e62835ee8b8..ac7a8bdce70 100644 --- a/docs/src/tutorials/applications/two_stage_stochastic.jl +++ b/docs/src/tutorials/applications/two_stage_stochastic.jl @@ -86,7 +86,7 @@ set_silent(model) @expression(model, z[ω in Ω], 5y[ω] - 0.1 * (x - y[ω])) @objective(model, Max, -2x + sum(P[ω] * z[ω] for ω in Ω)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # The optimal number of pies to make is: @@ -159,7 +159,7 @@ function CVaR(Z::Vector{Float64}, P::Vector{Float64}; γ::Float64) @constraint(model, [i in 1:N], z[i] >= ξ - Z[i]) @objective(model, Max, ξ - 1 / γ * sum(P[i] * z[i] for i in 1:N)) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return objective_value(model) end @@ -218,7 +218,7 @@ set_silent(model) @constraint(model, [ω in Ω], z[ω] >= ξ - Z[ω]) @objective(model, Max, -2x + ξ - 1 / γ * sum(P[ω] * z[ω] for ω in Ω)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # When ``\gamma = 0.4``, the optimal number of pies to bake is: diff --git a/docs/src/tutorials/conic/arbitrary_precision.jl b/docs/src/tutorials/conic/arbitrary_precision.jl index 4d0e84fdec9..2323d25bbb5 100644 --- a/docs/src/tutorials/conic/arbitrary_precision.jl +++ b/docs/src/tutorials/conic/arbitrary_precision.jl @@ -76,7 +76,7 @@ print(model) # Let's solve and inspect the solution: optimize!(model) -@assert has_optimal_solution(model; dual = true) +@assert is_solved_and_feasible(model; dual = true) solution_summary(model) # The value of each decision variable is a `BigFloat`: @@ -101,7 +101,7 @@ value.(x) .- [3 // 7, 3 // 14] set_attribute(model, "tol_gap_abs", 1e-32) set_attribute(model, "tol_gap_rel", 1e-32) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value.(x) .- [3 // 7, 3 // 14] # ## Rational arithmetic @@ -143,7 +143,7 @@ print(model) # Let's solve and inspect the solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # The optimal values are given in exact rational arithmetic: diff --git a/docs/src/tutorials/conic/dualization.jl b/docs/src/tutorials/conic/dualization.jl index 045fc92f9da..8a2fc18fde2 100644 --- a/docs/src/tutorials/conic/dualization.jl +++ b/docs/src/tutorials/conic/dualization.jl @@ -130,7 +130,7 @@ print(model_dual) set_optimizer(model_primal, SCS.Optimizer) optimize!(model_primal) -@assert has_optimal_solution(model_primal; dual = true) +@assert is_solved_and_feasible(model_primal; dual = true) # (There are five rows in the constraint matrix because SCS expects problems in # geometric conic form, and so JuMP has reformulated the `X, PSD` variable @@ -154,7 +154,7 @@ objective_value(model_primal) set_optimizer(model_dual, SCS.Optimizer) optimize!(model_dual) -@assert has_optimal_solution(model_dual; dual = true) +@assert is_solved_and_feasible(model_dual; dual = true) # and the solution we obtain is: @@ -184,7 +184,7 @@ objective_value(model_dual) set_optimizer(model_primal, Dualization.dual_optimizer(SCS.Optimizer)) optimize!(model_primal) -@assert has_optimal_solution(model_primal; dual = true) +@assert is_solved_and_feasible(model_primal; dual = true) # The performance is the same as if we solved `model_dual`, and the correct # solution is returned to `X`: @@ -200,7 +200,7 @@ dual.(primal_c) set_optimizer(model_dual, Dualization.dual_optimizer(SCS.Optimizer)) optimize!(model_dual) -@assert has_optimal_solution(model_dual; dual = true) +@assert is_solved_and_feasible(model_dual; dual = true) #- diff --git a/docs/src/tutorials/conic/ellipse_approx.jl b/docs/src/tutorials/conic/ellipse_approx.jl index caf5a57df45..2b08720ca47 100644 --- a/docs/src/tutorials/conic/ellipse_approx.jl +++ b/docs/src/tutorials/conic/ellipse_approx.jl @@ -110,7 +110,7 @@ m, n = size(S) @constraint(model, [t; vec(Z)] in MOI.RootDetConeSquare(n)) @objective(model, Max, t) optimize!(model) -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) solution_summary(model) # ## Results @@ -210,7 +210,7 @@ f = [1 - S[i, :]' * Z * S[i, :] + 2 * S[i, :]' * z - s for i in 1:m] ## The former @objective(model, Max, t) @objective(model, Max, 1 * t + 0) optimize!(model) -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) Test.@test isapprox(D, value.(Z); atol = 1e-6) #src solve_time_1 = solve_time(model) @@ -233,7 +233,7 @@ print_active_bridges(model) remove_bridge(model, MOI.Bridges.Constraint.GeoMeanToPowerBridge) optimize!(model) -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) # This time, the solve took: diff --git a/docs/src/tutorials/conic/experiment_design.jl b/docs/src/tutorials/conic/experiment_design.jl index fe45c620dfc..75680c92dbd 100644 --- a/docs/src/tutorials/conic/experiment_design.jl +++ b/docs/src/tutorials/conic/experiment_design.jl @@ -141,7 +141,7 @@ for i in 1:q end @objective(aOpt, Min, sum(u)) optimize!(aOpt) -@assert has_optimal_solution(aOpt) +@assert is_solved_and_feasible(aOpt) objective_value(aOpt) #- @@ -183,7 +183,7 @@ set_silent(eOpt) @constraint(eOpt, sum(np) <= n) @objective(eOpt, Max, t) optimize!(eOpt) -@assert has_optimal_solution(eOpt) +@assert is_solved_and_feasible(eOpt) objective_value(eOpt) #- value.(np) @@ -214,7 +214,7 @@ set_silent(dOpt) E = V * LinearAlgebra.diagm(0 => np ./ n) * V' @constraint(dOpt, [t; 1; triangle_vec(E)] in MOI.LogDetConeTriangle(q)) optimize!(dOpt) -@assert has_optimal_solution(dOpt) +@assert is_solved_and_feasible(dOpt) objective_value(dOpt) #- value.(np) diff --git a/docs/src/tutorials/conic/logistic_regression.jl b/docs/src/tutorials/conic/logistic_regression.jl index 63f72256283..890bde86267 100644 --- a/docs/src/tutorials/conic/logistic_regression.jl +++ b/docs/src/tutorials/conic/logistic_regression.jl @@ -196,7 +196,7 @@ model = build_logit_model(X, y, λ) set_optimizer(model, SCS.Optimizer) set_silent(model) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) #- @@ -239,7 +239,7 @@ sparse_model = build_sparse_logit_model(X, y, λ) set_optimizer(sparse_model, SCS.Optimizer) set_silent(sparse_model) optimize!(sparse_model) -@assert has_optimal_solution(sparse_model) +@assert is_solved_and_feasible(sparse_model) #- diff --git a/docs/src/tutorials/conic/min_ellipse.jl b/docs/src/tutorials/conic/min_ellipse.jl index be82f69e1c0..d6a6f4e74fb 100644 --- a/docs/src/tutorials/conic/min_ellipse.jl +++ b/docs/src/tutorials/conic/min_ellipse.jl @@ -125,7 +125,7 @@ end # Now, solve the program: optimize!(model) -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) solution_summary(model) # ## Results diff --git a/docs/src/tutorials/conic/quantum_discrimination.jl b/docs/src/tutorials/conic/quantum_discrimination.jl index 27303e2fa40..252fd95d7f8 100644 --- a/docs/src/tutorials/conic/quantum_discrimination.jl +++ b/docs/src/tutorials/conic/quantum_discrimination.jl @@ -98,7 +98,7 @@ E = [@variable(model, [1:d, 1:d] in HermitianPSDCone()) for i in 1:N] # Now we optimize: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # The probability of guessing correctly is: @@ -141,7 +141,7 @@ push!(E, E_N) # Then we can check that we get the same solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) #- diff --git a/docs/src/tutorials/conic/simple_examples.jl b/docs/src/tutorials/conic/simple_examples.jl index 98e5541bcd2..4d5951f15ce 100644 --- a/docs/src/tutorials/conic/simple_examples.jl +++ b/docs/src/tutorials/conic/simple_examples.jl @@ -64,7 +64,7 @@ function solve_max_cut_sdp(weights) @objective(model, Max, 0.25 * LinearAlgebra.dot(L, X)) @constraint(model, LinearAlgebra.diag(X) .== 1) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) V = svd_cholesky(value(X)) Random.seed!(N) r = rand(N) @@ -134,7 +134,7 @@ function example_k_means_clustering() @constraint(model, [i = 1:m], sum(Z[i, :]) .== 1) @constraint(model, LinearAlgebra.tr(Z) == num_clusters) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) Z_val = value.(Z) current_cluster, visited = 0, Set{Int}() solution = [1, 1, 2, 1, 2, 2] #src @@ -187,12 +187,12 @@ function example_correlation_problem() @constraint(model, 0.4 <= ρ["B", "C"] <= 0.5) @objective(model, Max, ρ["A", "C"]) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) println("An upper bound for ρ_AC is $(value(ρ["A", "C"]))") Test.@test value(ρ["A", "C"]) ≈ 0.87195 atol = 1e-4 #src @objective(model, Min, ρ["A", "C"]) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) println("A lower bound for ρ_AC is $(value(ρ["A", "C"]))") Test.@test value(ρ["A", "C"]) ≈ -0.978 atol = 1e-3 #src return @@ -269,7 +269,7 @@ function example_minimum_distortion() fix(Q[1, 1], 0) @objective(model, Min, c²) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test objective_value(model) ≈ 4 / 3 atol = 1e-4 ## Recover the minimal distorted embedding: X = [zeros(3) sqrt(value.(Q)[2:end, 2:end])] @@ -353,7 +353,7 @@ function example_theta_problem() J = ones(Int, 5, 5) @objective(model, Max, LinearAlgebra.dot(J, X)) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test objective_value(model) ≈ sqrt(5) rtol = 1e-4 println("The Lovász number is: $(objective_value(model))") return @@ -384,7 +384,7 @@ function example_robust_uncertainty_sets() @constraint(model, [((1-ɛ)/ɛ) (u - μ)'; (u-μ) Σ] >= 0, PSDCone()) @objective(model, Max, c' * u) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) exact = μhat' * c + Γ1(𝛿 / 2, N) * LinearAlgebra.norm(c) + diff --git a/docs/src/tutorials/conic/start_values.jl b/docs/src/tutorials/conic/start_values.jl index 49d09916458..0ebc7d62f4c 100644 --- a/docs/src/tutorials/conic/start_values.jl +++ b/docs/src/tutorials/conic/start_values.jl @@ -67,7 +67,7 @@ model = Model(SCS.Optimizer) @constraint(model, sum(x) <= 1) @objective(model, Max, sum(i * x[i] for i in 1:3)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # By looking at the log, we can see that SCS took 75 iterations to find the optimal # solution. Now we set the optimal solution as our starting point: diff --git a/docs/src/tutorials/conic/tips_and_tricks.jl b/docs/src/tutorials/conic/tips_and_tricks.jl index 217499e63c9..9de21fb0463 100644 --- a/docs/src/tutorials/conic/tips_and_tricks.jl +++ b/docs/src/tutorials/conic/tips_and_tricks.jl @@ -98,7 +98,7 @@ set_silent(model) @constraint(model, [t; x] in SecondOrderCone()) @objective(model, Min, t) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(t), value.(x) # ## Rotated Second-Order Cone @@ -121,7 +121,7 @@ set_silent(model) @constraint(model, [t; 0.5; residuals] in RotatedSecondOrderCone()) @objective(model, Min, t) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(θ), value(t) # ## Exponential Cone @@ -144,7 +144,7 @@ set_silent(model) @objective(model, Min, z) @constraint(model, [x, 1, z] in MOI.ExponentialCone()) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(z), exp(1.5) # ### Logarithm @@ -158,7 +158,7 @@ set_silent(model) @objective(model, Max, x) @constraint(model, [x, 1, z] in MOI.ExponentialCone()) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(x), log(1.5) # ### Log-sum-exp @@ -220,7 +220,7 @@ set_silent(model) @constraint(model, A * x .<= b) @constraint(model, [i = 1:n], [t[i], x[i], 1] in MOI.ExponentialCone()) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) objective_value(model) # The [`MOI.ExponentialCone`](@ref) has a dual, the [`MOI.DualExponentialCone`](@ref), @@ -239,7 +239,7 @@ set_silent(model) @constraint(model, A * x .<= b) @constraint(model, [t; ones(n); x] in MOI.RelativeEntropyCone(2n + 1)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) objective_value(model) # ## PowerCone @@ -261,7 +261,7 @@ set_silent(model) @constraint(model, [t, 1, x] in MOI.PowerCone(1 / 3)) @objective(model, Min, t) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(t), value(x) # The [`MOI.PowerCone`](@ref) has a dual, the [`MOI.DualPowerCone`](@ref), @@ -284,7 +284,7 @@ function p_norm(x::Vector, p) @constraint(model, sum(r) == t) @objective(model, Min, t) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value(t) end @@ -328,7 +328,7 @@ set_silent(model) @objective(model, Min, t) @constraint(model, t .* I - A in PSDCone()) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) objective_value(model) # ## GeometricMeanCone @@ -362,7 +362,7 @@ set_silent(model) @constraint(model, [t; vec(X)] in MOI.RootDetConeSquare(2)) @constraint(model, X .== [2 1; 1 3]) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(t), sqrt(LinearAlgebra.det(value.(X))) # If `X` is symmetric, then you can use [`MOI.RootDetConeTriangle`](@ref) @@ -400,7 +400,7 @@ set_silent(model) @constraint(model, X .== [2 1; 1 3]) @constraint(model, u == 0.5) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(t), 0.5 * log(LinearAlgebra.det(value.(X) ./ 0.5)) # If `X` is symmetric, then you can use [`MOI.LogDetConeTriangle`](@ref) @@ -421,7 +421,7 @@ set_silent(model) @constraint(model, X .== [2 1; 1 3]) @constraint(model, u == 0.5) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(t), 0.5 * log(LinearAlgebra.det(value.(X) ./ 0.5)) # ## Other Cones and Functions diff --git a/docs/src/tutorials/getting_started/debugging.jl b/docs/src/tutorials/getting_started/debugging.jl index 694a16c8b26..8a6be8d6841 100644 --- a/docs/src/tutorials/getting_started/debugging.jl +++ b/docs/src/tutorials/getting_started/debugging.jl @@ -346,7 +346,7 @@ set_silent(model) # for variables with large positive or negative values in the optimal solution. optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) for var in all_variables(model) if var == objective continue diff --git a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl index f5e8382631c..634365c2469 100644 --- a/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl +++ b/docs/src/tutorials/getting_started/design_patterns_for_larger_models.jl @@ -55,7 +55,7 @@ model = Model(HiGHS.Optimizer) @objective(model, Max, sum(profit[i] * x[i] for i in 1:N)) @constraint(model, sum(weight[i] * x[i] for i in 1:N) <= capacity) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value.(x) # The benefits of this approach are: @@ -88,7 +88,7 @@ function solve_knapsack_1(profit::Vector, weight::Vector, capacity::Real) @objective(model, Max, sum(profit[i] * x[i] for i in 1:N)) @constraint(model, sum(weight[i] * x[i] for i in 1:N) <= capacity) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value.(x) end @@ -161,7 +161,7 @@ function solve_knapsack_2(data::KnapsackData) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value.(x) end @@ -233,7 +233,7 @@ function solve_knapsack_3(data::KnapsackData; binary_knapsack::Bool) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value.(x) end @@ -276,7 +276,7 @@ function solve_knapsack_4(data::KnapsackData, config::AbstractConfiguration) sum(v.weight * x[k] for (k, v) in data.objects) <= data.capacity, ) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value.(x) end @@ -364,7 +364,7 @@ function solve_knapsack_5(data::KnapsackData, config::AbstractConfiguration) add_knapsack_constraints(model, data, config) add_knapsack_objective(model, data, config) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value.(model[:x]) end @@ -390,7 +390,7 @@ function solve_knapsack_6( add_knapsack_constraints(model, data, config) add_knapsack_objective(model, data, config) optimize!(model) - if !has_optimal_solution(model) + if !is_solved_and_feasible(model) @warn("Model not solved to optimality") return nothing end @@ -525,7 +525,7 @@ function _solve_knapsack( _add_knapsack_constraints(model, data, config) _add_knapsack_objective(model, data, config) JuMP.optimize!(model) - if !JuMP.has_optimal_solution(model) + if !JuMP.is_solved_and_feasible(model) @warn("Model not solved to optimality") return nothing end diff --git a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl index f3a21c7ca22..9814bdaa54d 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_JuMP.jl @@ -179,9 +179,9 @@ optimize!(model) # end in `!`. A common example is `push!`. # Now let's see what information we can query about the solution, -# starting with [`has_optimal_solution`](@ref): +# starting with [`is_solved_and_feasible`](@ref): -has_optimal_solution(model) +is_solved_and_feasible(model) # We can get more information about the solution by querying the three types of # statuses. @@ -224,7 +224,7 @@ shadow_price(c2) # common workflow is: # ```julia # optimize!(model) -# if !has_optimal_solution(model) +# if !is_solved_and_feasible(model) # error("Solver did not find an optimal solution") # end # ``` @@ -286,7 +286,7 @@ function solve_infeasible() @constraint(model, x + y >= 3) @objective(model, Max, x + 2y) optimize!(model) - if !has_optimal_solution(model) + if !is_solved_and_feasible(model) @warn("The model was not solved correctly.") return end @@ -523,5 +523,5 @@ c = [1, 3, 5, 2] @constraint(vector_model, A * x .== b) @objective(vector_model, Min, c' * x) optimize!(vector_model) -@assert has_optimal_solution(vector_model) +@assert is_solved_and_feasible(vector_model) objective_value(vector_model) diff --git a/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl b/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl index 34e8cf9882d..e617457cc8a 100644 --- a/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl +++ b/docs/src/tutorials/getting_started/getting_started_with_data_and_plotting.jl @@ -366,7 +366,7 @@ solution_summary(model) # Just to be sure, check that the solver found an optimal solution: -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # ### Solution diff --git a/docs/src/tutorials/linear/callbacks.jl b/docs/src/tutorials/linear/callbacks.jl index 920c5906eb7..b647337a507 100644 --- a/docs/src/tutorials/linear/callbacks.jl +++ b/docs/src/tutorials/linear/callbacks.jl @@ -60,7 +60,7 @@ function example_lazy_constraint() end set_attribute(model, MOI.LazyConstraintCallback(), my_callback_function) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test lazy_called Test.@test value(x) == 1 Test.@test value(y) == 2 @@ -99,7 +99,7 @@ function example_user_cut_constraint() end set_attribute(model, MOI.UserCutCallback(), my_callback_function) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test callback_called @show callback_called return @@ -133,7 +133,7 @@ function example_heuristic_solution() end set_attribute(model, MOI.HeuristicCallback(), my_callback_function) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test callback_called return end @@ -171,7 +171,7 @@ function example_solver_dependent_callback() end set_attribute(model, GLPK.CallbackFunction(), my_callback_function) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test lazy_called Test.@test value(x) == 1 Test.@test value(y) == 2 diff --git a/docs/src/tutorials/linear/cannery.jl b/docs/src/tutorials/linear/cannery.jl index 9986c8e97a5..13fae00c5a1 100644 --- a/docs/src/tutorials/linear/cannery.jl +++ b/docs/src/tutorials/linear/cannery.jl @@ -121,7 +121,7 @@ solution_summary(model) # What's the optimal shipment? -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) Test.@test isapprox(objective_value(model), 1_680.0, atol = 1e-6) #src for p in P, m in M println(p, " => ", m, ": ", value(x[p, m])) diff --git a/docs/src/tutorials/linear/constraint_programming.jl b/docs/src/tutorials/linear/constraint_programming.jl index d897b2f7303..a420186effa 100644 --- a/docs/src/tutorials/linear/constraint_programming.jl +++ b/docs/src/tutorials/linear/constraint_programming.jl @@ -29,7 +29,7 @@ set_silent(model) @variable(model, 1 <= x[1:4] <= 4, Int) @constraint(model, x in MOI.AllDifferent(4)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value.(x) # ## BinPacking @@ -45,7 +45,7 @@ set_silent(model) @variable(model, 1 <= x[1:length(weights)] <= number_of_bins, Int) @constraint(model, x in MOI.BinPacking(capacity, weights)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value.(x) # Here, the value of `x[i]` is the bin that item `i` was placed into. @@ -61,7 +61,7 @@ set_silent(model) @variable(model, x[1:4], Int) @constraint(model, x in MOI.Circuit(4)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # Let's see what tour was found, starting at node number `1`: y = round.(Int, value.(x)) @@ -115,7 +115,7 @@ n = 1 # Let's check that we found a valid solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value.(x) # ## CountBelongs @@ -134,7 +134,7 @@ set_silent(model) set = Set([2, 3]) @constraint(model, [n; x] in MOI.CountBelongs(1 + length(x), set)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(n), value.(x) # ## CountDistinct @@ -149,7 +149,7 @@ set_silent(model) @objective(model, Max, sum(x)) @constraint(model, [n; x] in MOI.CountDistinct(1 + length(x))) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(n), value.(x) # ## CountGreaterThan @@ -169,7 +169,7 @@ set_silent(model) @objective(model, Max, sum(x)) @constraint(model, [n; y; x] in MOI.CountGreaterThan(1 + 1 + length(x))) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value(n), value(y), value.(x) # Here `n` is strictly greater than the count, and there is no limit on how @@ -194,5 +194,5 @@ set_silent(model) @variable(model, x[i = 1:3], Int) @constraint(model, x in MOI.Table(table)) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) value.(x) diff --git a/docs/src/tutorials/linear/diet.jl b/docs/src/tutorials/linear/diet.jl index a5c0061023a..413608f942e 100644 --- a/docs/src/tutorials/linear/diet.jl +++ b/docs/src/tutorials/linear/diet.jl @@ -145,7 +145,7 @@ print(model) # Let's optimize and take a look at the solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test objective_value(model) ≈ 11.8288 atol = 1e-4 #hide solution_summary(model) @@ -178,7 +178,7 @@ dairy_foods = ["milk", "ice cream"] is_dairy = map(name -> name in dairy_foods, foods.name) dairy_constraint = @constraint(model, sum(foods[is_dairy, :x]) <= 6) optimize!(model) -Test.@test !has_optimal_solution(model) +Test.@test !is_solved_and_feasible(model) Test.@test termination_status(model) == INFEASIBLE Test.@test primal_status(model) == NO_SOLUTION solution_summary(model) diff --git a/docs/src/tutorials/linear/facility_location.jl b/docs/src/tutorials/linear/facility_location.jl index 9badd958295..8b87955f072 100644 --- a/docs/src/tutorials/linear/facility_location.jl +++ b/docs/src/tutorials/linear/facility_location.jl @@ -130,7 +130,7 @@ set_silent(model) # Solve the uncapacitated facility location problem with HiGHS optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) println("Optimal value: ", objective_value(model)) # ### Visualizing the solution @@ -257,7 +257,7 @@ set_silent(model) # Solve the problem optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) println("Optimal value: ", objective_value(model)) # ### Visualizing the solution diff --git a/docs/src/tutorials/linear/factory_schedule.jl b/docs/src/tutorials/linear/factory_schedule.jl index 661247c843c..7e2e96f3def 100644 --- a/docs/src/tutorials/linear/factory_schedule.jl +++ b/docs/src/tutorials/linear/factory_schedule.jl @@ -186,7 +186,7 @@ function solve_factory_scheduling( ) ) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) schedules = Dict{Symbol,Vector{Float64}}( Symbol(f) => value.(production[:, f]) for f in factories ) diff --git a/docs/src/tutorials/linear/finance.jl b/docs/src/tutorials/linear/finance.jl index 39db0640cfd..ae95dc92336 100644 --- a/docs/src/tutorials/linear/finance.jl +++ b/docs/src/tutorials/linear/finance.jl @@ -92,7 +92,7 @@ end) ) optimize!(financing) -@assert has_optimal_solution(financing) +@assert is_solved_and_feasible(financing) objective_value(financing) # ## Combinatorial auctions @@ -137,7 +137,7 @@ for i in 1:6 @constraint(auction, sum(y[j] for j in 1:6 if i in bid_items[j]) <= 1) end optimize!(auction) -@assert has_optimal_solution(auction) +@assert is_solved_and_feasible(auction) objective_value(auction) #- diff --git a/docs/src/tutorials/linear/geographic_clustering.jl b/docs/src/tutorials/linear/geographic_clustering.jl index 4598a04ebac..681f7cf9155 100644 --- a/docs/src/tutorials/linear/geographic_clustering.jl +++ b/docs/src/tutorials/linear/geographic_clustering.jl @@ -151,7 +151,7 @@ end # We can then call `optimize!` and review the results. optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # ### Reviewing the Results diff --git a/docs/src/tutorials/linear/knapsack.jl b/docs/src/tutorials/linear/knapsack.jl index 82ac83c847e..24077e416e6 100644 --- a/docs/src/tutorials/linear/knapsack.jl +++ b/docs/src/tutorials/linear/knapsack.jl @@ -96,7 +96,7 @@ print(model) # We can now solve the optimization problem and inspect the results. optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # The items chosen are @@ -125,7 +125,7 @@ function solve_knapsack_problem(; @objective(model, Max, profit' * x) @constraint(model, weight' * x <= capacity) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) println("Objective is: ", objective_value(model)) println("Solution is:") for i in 1:n diff --git a/docs/src/tutorials/linear/lp_sensitivity.jl b/docs/src/tutorials/linear/lp_sensitivity.jl index ec8eee83000..0bc608cf19a 100644 --- a/docs/src/tutorials/linear/lp_sensitivity.jl +++ b/docs/src/tutorials/linear/lp_sensitivity.jl @@ -39,7 +39,7 @@ model = Model(HiGHS.Optimizer) @constraint(model, c2, 7x + 12y >= 120) @constraint(model, c3, x + y <= 20) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model; verbose = true) # Can you identify: diff --git a/docs/src/tutorials/linear/mip_duality.jl b/docs/src/tutorials/linear/mip_duality.jl index 92b2ca58843..b82dd1ab379 100644 --- a/docs/src/tutorials/linear/mip_duality.jl +++ b/docs/src/tutorials/linear/mip_duality.jl @@ -58,7 +58,7 @@ print(model) # If we optimize this model, we obtain a [`dual_status`](@ref) of [`NO_SOLUTION`](@ref): optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) dual_status(model) # This is because HiGHS cannot compute the duals of a mixed-integer program. We @@ -74,7 +74,7 @@ print(model) # dual: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) dual_status(model) # and a marginal price of electricity of \$100/MWh: @@ -96,7 +96,7 @@ print(model) # the [`fix_discrete_variables`](@ref) function: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) dual_status(model) #- @@ -116,7 +116,7 @@ print(model) #- optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) dual_status(model) #- diff --git a/docs/src/tutorials/linear/multi.jl b/docs/src/tutorials/linear/multi.jl index bf7fbe66e60..3916ac2cb34 100644 --- a/docs/src/tutorials/linear/multi.jl +++ b/docs/src/tutorials/linear/multi.jl @@ -177,7 +177,7 @@ end # Finally, we can optimize the model: optimize!(model) -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) Test.@test objective_value(model) == 225_700.0 #src solution_summary(model) diff --git a/docs/src/tutorials/linear/multi_commodity_network.jl b/docs/src/tutorials/linear/multi_commodity_network.jl index e7778f7eda8..124d1e9af66 100644 --- a/docs/src/tutorials/linear/multi_commodity_network.jl +++ b/docs/src/tutorials/linear/multi_commodity_network.jl @@ -201,7 +201,7 @@ df = DataFrames.leftjoin( # Finally, we can optimize the model: optimize!(model) -Test.@test has_optimal_solution(model) +Test.@test is_solved_and_feasible(model) solution_summary(model) # update the solution in the DataFrames: diff --git a/docs/src/tutorials/linear/multi_objective_examples.jl b/docs/src/tutorials/linear/multi_objective_examples.jl index d3aa67cb926..cbca784b18e 100644 --- a/docs/src/tutorials/linear/multi_objective_examples.jl +++ b/docs/src/tutorials/linear/multi_objective_examples.jl @@ -37,7 +37,7 @@ solution_summary(model) #- for i in 1:result_count(model) - @assert has_optimal_solution(model; result = i) + @assert is_solved_and_feasible(model; result = i) print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") println("x = ", value.([x1, x2]; result = i)) end @@ -66,7 +66,7 @@ solution_summary(model) #- for i in 1:result_count(model) - @assert has_optimal_solution(model; result = i) + @assert is_solved_and_feasible(model; result = i) print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") println("x = ", round.(Int, value.(x; result = i))) end @@ -111,7 +111,7 @@ solution_summary(model) #- for i in 1:result_count(model) - @assert has_optimal_solution(model; result = i) + @assert is_solved_and_feasible(model; result = i) print(i, ": z = ", round.(Int, objective_value(model; result = i)), " | ") X = round.(Int, value.(x; result = i)) print("Path:") diff --git a/docs/src/tutorials/linear/multi_objective_knapsack.jl b/docs/src/tutorials/linear/multi_objective_knapsack.jl index bc9ecc0da77..a95b98497cf 100644 --- a/docs/src/tutorials/linear/multi_objective_knapsack.jl +++ b/docs/src/tutorials/linear/multi_objective_knapsack.jl @@ -142,7 +142,7 @@ solution_summary(model; result = 5) #- -@assert has_optimal_solution(model; result = 5) +@assert is_solved_and_feasible(model; result = 5) #- diff --git a/docs/src/tutorials/linear/n-queens.jl b/docs/src/tutorials/linear/n-queens.jl index aae78b045ba..c75ff6d824c 100644 --- a/docs/src/tutorials/linear/n-queens.jl +++ b/docs/src/tutorials/linear/n-queens.jl @@ -66,7 +66,7 @@ end # a feasible solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # We can now review the solution that our model found: diff --git a/docs/src/tutorials/linear/network_flows.jl b/docs/src/tutorials/linear/network_flows.jl index 45f11692e02..89c3e98391f 100644 --- a/docs/src/tutorials/linear/network_flows.jl +++ b/docs/src/tutorials/linear/network_flows.jl @@ -79,7 +79,7 @@ set_silent(shortest_path) @constraint(shortest_path, [i = 1:n], sum(x[i, :]) - sum(x[:, i]) == b[i],) @objective(shortest_path, Min, sum(G .* x)) optimize!(shortest_path) -@assert has_optimal_solution(shortest_path) +@assert is_solved_and_feasible(shortest_path) objective_value(shortest_path) #- value.(x) @@ -124,7 +124,7 @@ set_silent(assignment) @constraint(assignment, [j = 1:n], sum(y[j, :]) == 1) @objective(assignment, Max, sum(G .* y)) optimize!(assignment) -@assert has_optimal_solution(assignment) +@assert is_solved_and_feasible(assignment) objective_value(assignment) #- value.(y) @@ -165,7 +165,7 @@ max_flow = Model(HiGHS.Optimizer) @constraint(max_flow, [i = 1:n; i != 1 && i != 8], sum(f[i, :]) == sum(f[:, i])) @objective(max_flow, Max, sum(f[1, :])) optimize!(max_flow) -@assert has_optimal_solution(max_flow) +@assert is_solved_and_feasible(max_flow) objective_value(max_flow) #- value.(f) diff --git a/docs/src/tutorials/linear/piecewise_linear.jl b/docs/src/tutorials/linear/piecewise_linear.jl index c816f4d9417..6ad6fff89ee 100644 --- a/docs/src/tutorials/linear/piecewise_linear.jl +++ b/docs/src/tutorials/linear/piecewise_linear.jl @@ -52,7 +52,7 @@ function outer_approximate_x_squared(x̄) @objective(model, Min, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value(y) end @@ -103,7 +103,7 @@ function outer_approximate_log(x̄) @objective(model, Max, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value(y) end @@ -169,7 +169,7 @@ function inner_approximate_x_squared(x̄) @objective(model, Min, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value(y) end @@ -212,7 +212,7 @@ function inner_approximate_log(x̄) @objective(model, Max, y) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value(y) end @@ -266,7 +266,7 @@ function piecewise_linear_sin(x̄) end) @constraint(model, x == x̄) # <-- a trivial constraint just for testing. optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return value(y) end diff --git a/docs/src/tutorials/linear/sudoku.jl b/docs/src/tutorials/linear/sudoku.jl index da27ae8d39f..b13b6d2389f 100644 --- a/docs/src/tutorials/linear/sudoku.jl +++ b/docs/src/tutorials/linear/sudoku.jl @@ -134,7 +134,7 @@ end # solve problem optimize!(sudoku) -@assert has_optimal_solution(sudoku) +@assert is_solved_and_feasible(sudoku) # Extract the values of x x_val = value.(x); @@ -203,7 +203,7 @@ for i in 1:9, j in 1:9 end optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # Display the solution diff --git a/docs/src/tutorials/linear/transp.jl b/docs/src/tutorials/linear/transp.jl index 3cce287cc97..a62810ca3e8 100644 --- a/docs/src/tutorials/linear/transp.jl +++ b/docs/src/tutorials/linear/transp.jl @@ -120,7 +120,7 @@ function solve_transportation_problem(data::Containers.DenseAxisArray) @constraint(model, [o in O], sum(x[o, :]) <= data[o, "SUPPLY"]) @constraint(model, [d in D], sum(x[:, d]) == data["DEMAND", d]) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) ## Pretty print the solution in the format of the input print(" ", join(lpad.(D, 7, ' '))) for o in O diff --git a/docs/src/tutorials/nonlinear/classifiers.jl b/docs/src/tutorials/nonlinear/classifiers.jl index 896a10c792e..351742b74c6 100644 --- a/docs/src/tutorials/nonlinear/classifiers.jl +++ b/docs/src/tutorials/nonlinear/classifiers.jl @@ -127,7 +127,7 @@ function solve_SVM_classifier(P::Matrix, labels::Vector; C::Float64 = C_0) D = LinearAlgebra.Diagonal(labels) @constraint(model, D * (P * w .- g) .+ y .>= 1) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) slack = extrema(value.(y)) println("Minimum slack: ", slack[1], "\nMaximum slack: ", slack[2]) classifier(x) = line(x; w = value.(w), g = value(g)) @@ -233,7 +233,7 @@ function solve_dual_SVM_classifier(P::Matrix, labels::Vector; C::Float64 = C_0) @objective(model, Min, 1 / 2 * u' * D * P * P' * D * u - sum(u)) @constraint(model, con, sum(D * u) == 0) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) w = P' * D * value.(u) g = dual(con) classifier(x) = line(x; w = w, g = g) @@ -320,7 +320,7 @@ function solve_kernel_SVM_classifier( con = @constraint(model, sum(D * u) == 0) @objective(model, Min, 1 / 2 * u' * D * K * D * u - sum(u)) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) u_sol, g_sol = value.(u), dual(con) function classifier(v::Vector) return sum( diff --git a/docs/src/tutorials/nonlinear/complementarity.jl b/docs/src/tutorials/nonlinear/complementarity.jl index 8a0f65d1ac6..3dde3f22350 100644 --- a/docs/src/tutorials/nonlinear/complementarity.jl +++ b/docs/src/tutorials/nonlinear/complementarity.jl @@ -47,7 +47,7 @@ set_silent(model) @variable(model, 0 <= x[1:4] <= 10, start = 0) @constraint(model, M * x + q ⟂ x) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test value.(x) ≈ [2.8, 0.0, 0.8, 1.2] #src value.(x) @@ -68,7 +68,7 @@ set_silent(model) @constraint(model, w + 2x - 2y + 4z - 6 ⟂ z) @constraint(model, w - x + 2y - 2z - 2 ⟂ y) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test value.([w, x, y, z]) ≈ [2.8, 0.0, 0.8, 1.2] #src value.([w, x, y, z]) @@ -104,7 +104,7 @@ set_silent(model) end ) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test isapprox(value(p["new-york"]), 0.225; atol = 1e-3) #src value.(p) @@ -140,7 +140,7 @@ set_silent(model) end ) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test isapprox(value(C_G), 0.996; atol = 1e-3) #src value(K) @@ -193,7 +193,7 @@ set_silent(model) ## Production does not exceed capacity @constraint(model, [ω = 1:5], x - Y[ω] ⟂ μ[ω]) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # An equilibrium solution is to build 389 MW: diff --git a/docs/src/tutorials/nonlinear/nested_problems.jl b/docs/src/tutorials/nonlinear/nested_problems.jl index 370646120c2..27b069469e9 100644 --- a/docs/src/tutorials/nonlinear/nested_problems.jl +++ b/docs/src/tutorials/nonlinear/nested_problems.jl @@ -86,7 +86,7 @@ function solve_lower_level(x...) ) @constraint(model, (y[1] - 10)^2 + (y[2] - 10)^2 <= 25) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) return objective_value(model), value.(y) end @@ -149,7 +149,7 @@ model = Model(Ipopt.Optimizer) @operator(model, op_V, 2, V, ∇V, ∇²V) @objective(model, Min, x[1]^2 + x[2]^2 + op_V(x[1], x[2])) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # The optimal objective value is: @@ -229,7 +229,7 @@ cache = Cache(Float64[], NaN, Float64[]) ) @objective(model, Min, x[1]^2 + x[2]^2 + op_cached_f(x[1], x[2])) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # an we can check we get the same objective value: diff --git a/docs/src/tutorials/nonlinear/portfolio.jl b/docs/src/tutorials/nonlinear/portfolio.jl index 47f02182784..420c13bacf0 100644 --- a/docs/src/tutorials/nonlinear/portfolio.jl +++ b/docs/src/tutorials/nonlinear/portfolio.jl @@ -158,7 +158,7 @@ set_silent(model) @constraint(model, sum(x) <= 1000) @constraint(model, r' * x >= 50) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # The optimal allocation of our assets is: diff --git a/docs/src/tutorials/nonlinear/querying_hessians.jl b/docs/src/tutorials/nonlinear/querying_hessians.jl index da0e18a574a..e740a33e829 100644 --- a/docs/src/tutorials/nonlinear/querying_hessians.jl +++ b/docs/src/tutorials/nonlinear/querying_hessians.jl @@ -71,7 +71,7 @@ set_silent(model) @constraint(model, g_2, (x[1] + x[2])^2 <= 2) @objective(model, Min, (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) # ## The analytic solution diff --git a/docs/src/tutorials/nonlinear/rocket_control.jl b/docs/src/tutorials/nonlinear/rocket_control.jl index 2cf3d1ac55e..478e8319c87 100644 --- a/docs/src/tutorials/nonlinear/rocket_control.jl +++ b/docs/src/tutorials/nonlinear/rocket_control.jl @@ -127,7 +127,7 @@ ddt(x::Vector, t::Int) = (x[t] - x[t-1]) / Δt # Now we optimize the model and check that we found a solution: optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model) # Finally, we plot the solution: diff --git a/docs/src/tutorials/nonlinear/simple_examples.jl b/docs/src/tutorials/nonlinear/simple_examples.jl index 5d138dbcc8a..1eade4f5462 100644 --- a/docs/src/tutorials/nonlinear/simple_examples.jl +++ b/docs/src/tutorials/nonlinear/simple_examples.jl @@ -25,7 +25,7 @@ function example_rosenbrock() @variable(model, y) @objective(model, Min, (1 - x)^2 + 100 * (y - x^2)^2) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test objective_value(model) ≈ 0.0 atol = 1e-10 Test.@test value(x) ≈ 1.0 Test.@test value(y) ≈ 1.0 @@ -86,7 +86,7 @@ function example_clnlbeam() primal_status = $(primal_status(model)) objective_value = $(objective_value(model)) """) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) Test.@test objective_value(model) ≈ 350.0 #src return end @@ -114,7 +114,7 @@ function example_mle() sum((data[i] - μ)^2 for i in 1:n) / (2 * σ^2) ) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) println("μ = ", value(μ)) println("mean(data) = ", Statistics.mean(data)) println("σ^2 = ", value(σ)^2) @@ -125,7 +125,7 @@ function example_mle() ## You can even do constrained MLE! @constraint(model, μ == σ^2) optimize!(model) - @assert has_optimal_solution(model) + @assert is_solved_and_feasible(model) Test.@test value(μ) ≈ value(σ)^2 println() println("With constraint μ == σ^2:") @@ -153,7 +153,7 @@ function example_qcp() @constraint(model, x * x + y * y - z * z <= 0) @constraint(model, x * x - y * z <= 0) optimize!(model) - Test.@test has_optimal_solution(model) + Test.@test is_solved_and_feasible(model) print(model) println("Objective value: ", objective_value(model)) println("x = ", value(x)) diff --git a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl index b5bb6324518..c6725f9ad32 100644 --- a/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl +++ b/docs/src/tutorials/nonlinear/space_shuttle_reentry_trajectory.jl @@ -303,7 +303,7 @@ end set_silent(model) # Hide solver's verbose output optimize!(model) # Solve for the control and state -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) ## Show final cross-range of the solution println( diff --git a/docs/src/tutorials/nonlinear/tips_and_tricks.jl b/docs/src/tutorials/nonlinear/tips_and_tricks.jl index 1394d0640c7..a51f5e6108a 100644 --- a/docs/src/tutorials/nonlinear/tips_and_tricks.jl +++ b/docs/src/tutorials/nonlinear/tips_and_tricks.jl @@ -52,7 +52,7 @@ set_silent(model) @constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test objective_value(model) ≈ √3 atol = 1e-4 Test.@test value.(x) ≈ [1.0, 1.0] atol = 1e-4 println("Naive approach: function calls = $(function_calls)") @@ -121,7 +121,7 @@ set_silent(model) @constraint(model, op_foo_2(x[1], x[2]) <= 2) function_calls = 0 optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) Test.@test objective_value(model) ≈ √3 atol = 1e-4 Test.@test value.(x) ≈ [1.0, 1.0] atol = 1e-4 println("Memoized approach: function_calls = $(function_calls)") diff --git a/docs/src/tutorials/nonlinear/user_defined_hessians.jl b/docs/src/tutorials/nonlinear/user_defined_hessians.jl index 568ba6b5803..f2e272229d8 100644 --- a/docs/src/tutorials/nonlinear/user_defined_hessians.jl +++ b/docs/src/tutorials/nonlinear/user_defined_hessians.jl @@ -72,5 +72,5 @@ model = Model(Ipopt.Optimizer) @operator(model, op_rosenbrock, 2, rosenbrock, ∇rosenbrock, ∇²rosenbrock) @objective(model, Min, op_rosenbrock(x[1], x[2])) optimize!(model) -@assert has_optimal_solution(model) +@assert is_solved_and_feasible(model) solution_summary(model; verbose = true) diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 02d81aafd04..aa42f18186b 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -583,20 +583,16 @@ function dual_status(model::GenericModel; result::Int = 1) end """ - has_optimal_solution( + is_solved_and_feasible( model::GenericModel; - dual::Bool = false, allow_local::Bool = true, allow_almost::Bool = false, + dual::Bool = false, result::Int = 1, ) -Return `true` if the model has an optimal primal solution associated with result -index `result`. - -If `dual`, additionally check that an optimal dual solution is available. - -If `allow_local`, the [`termination_status`](@ref) may be [`OPTIMAL`](@ref) (the +Return `true` if the model has a feasible primal solution associated with result +index `result` and the [`termination_status`](@ref) is [`OPTIMAL`](@ref) (the solver found a global optimum) or [`LOCALLY_SOLVED`](@ref) (the solver found a local optimum, which may also be the global optimum, but the solver could not prove so). @@ -609,11 +605,13 @@ be [`ALMOST_OPTIMAL`](@ref) or [`ALMOST_LOCALLY_SOLVED`](@ref) (if `allow_local` and the [`primal_status`](@ref) and [`dual_status`](@ref) may additionally be [`NEARLY_FEASIBLE_POINT`](@ref). +If `dual`, additionally check that an optimal dual solution is available. + If this function returns `false`, use [`termination_status`](@ref), [`result_count`](@ref), [`primal_status`](@ref) and [`dual_status`](@ref) to understand what solutions are available (if any). """ -function has_optimal_solution( +function is_solved_and_feasible( model::GenericModel; dual::Bool = false, allow_local::Bool = true, diff --git a/test/test_model.jl b/test/test_model.jl index 8f9e2954770..c9134eab2ad 100644 --- a/test/test_model.jl +++ b/test/test_model.jl @@ -1244,7 +1244,7 @@ function test_caching_mps_model() return end -function test_has_optimal_solution() +function test_is_solved_and_feasible() mock = MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Float64}()) model = direct_model(mock) for term in [ @@ -1270,29 +1270,29 @@ function test_has_optimal_solution() MOI.set(mock, MOI.TerminationStatus(), term) MOI.set(mock, MOI.PrimalStatus(), primal) MOI.set(mock, MOI.DualStatus(), dual) - @test has_optimal_solution(model) == (has_local && _primal) - @test has_optimal_solution(model; dual = true) == + @test is_solved_and_feasible(model) == (has_local && _primal) + @test is_solved_and_feasible(model; dual = true) == (has_local && _primal && _dual) - @test has_optimal_solution(model; allow_local = false) == + @test is_solved_and_feasible(model; allow_local = false) == (_global && _primal) - @test has_optimal_solution( + @test is_solved_and_feasible( model; dual = true, allow_local = false, ) == (_global && _primal && _dual) - @test has_optimal_solution(model; allow_almost = true) == + @test is_solved_and_feasible(model; allow_almost = true) == (_almost_local && _almost_primal) - @test has_optimal_solution( + @test is_solved_and_feasible( model; dual = true, allow_almost = true, ) == (_almost_local && _almost_primal && _almost_dual) - @test has_optimal_solution( + @test is_solved_and_feasible( model; allow_local = false, allow_almost = true, ) == (_almost_global && _almost_primal) - @test has_optimal_solution( + @test is_solved_and_feasible( model; dual = true, allow_local = false, @@ -1301,11 +1301,11 @@ function test_has_optimal_solution() MOI.set(mock, MOI.ResultCount(), 3) MOI.set(mock, MOI.PrimalStatus(3), primal) MOI.set(mock, MOI.DualStatus(3), dual) - @test !has_optimal_solution(model; result = 2) - @test !has_optimal_solution(model; dual = true, result = 2) - @test has_optimal_solution(model; result = 3) == + @test !is_solved_and_feasible(model; result = 2) + @test !is_solved_and_feasible(model; dual = true, result = 2) + @test is_solved_and_feasible(model; result = 3) == (has_local && _primal) - @test has_optimal_solution(model; dual = true, result = 3) == + @test is_solved_and_feasible(model; dual = true, result = 3) == (has_local && _primal && _dual) end end From 773303a74f18ea10fdae4ecff209c34dcc00dd9b Mon Sep 17 00:00:00 2001 From: odow Date: Wed, 14 Feb 2024 09:29:20 +1300 Subject: [PATCH 17/19] Update docs --- docs/src/manual/solutions.md | 72 +++++++++++++++++++++++++++++------- 1 file changed, 59 insertions(+), 13 deletions(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 8bf3db83b00..1fdf0ce9686 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -298,13 +298,24 @@ And data, a 2-element Vector{Float64}: You should always check whether the solver found a solution before calling solution functions like [`value`](@ref) or [`objective_value`](@ref). -A simple approach is to use [`is_solved_and_feasible`](@ref): +A simple approach for small scripts and notebooks is to use +[`is_solved_and_feasible`](@ref): ```jldoctest solutions julia> function solve_and_print_solution(model) optimize!(model) if !is_solved_and_feasible(model; dual = true) - error("The model was not solved correctly.") + term = termination_status(model) + error( + """ + The model was not solved correctly: + + termination_status : $(termination_status(model)) + primal_status : $(primal_status(model)) + dual_status : $(dual_status(model)) + raw_status : $(raw_status(model)) + """, + ) end println("Solution is optimal") println(" objective value = ", objective_value(model)) @@ -321,25 +332,60 @@ Solution is optimal dual solution: c1 = 1.7142857142857142 ``` -You can also use a more advanced workflow that deals with a broader range of -statuses: +For code like libraries that should be more robust to the range of possible +termination and result statuses, do some variation of the following: ```jldoctest solutions julia> function solve_and_print_solution(model) - if termination_status(model) in (OPTIMAL, LOCALLY_SOLVED) + status = termination_status(model) + if status in (OPTIMAL, LOCALLY_SOLVED) println("Solution is optimal") - elseif termination_status(model) == TIME_LIMIT && has_values(model) - println("Solution is suboptimal due to a time limit, but a primal solution is available") + elseif status in (ALMOST_OPTIMAL, ALMOST_LOCALLY_SOLVED) + println("Solution is optimal to a relaxed tolerance") + elseif status == TIME_LIMIT + println( + "Solver stopped due to a time limit. If a solution is available, " * + "it may be suboptimal." + ) + elseif status in ( + ITERATION_LIMIT, NODE_LIMIT, SOLUTION_LIMIT, MEMORY_LIMIT, + OBJECTIVE_LIMIT, NORM_LIMIT, OTHER_LIMIT, + ) + println( + "Solver stopped due to a limit. If a solution is available, it " * + "may be suboptimal." + ) + elseif status in (INFEASIBLE, LOCALLY_INFEASIBLE) + println("The problem is primal infeasible") + elseif status == DUAL_INFEASIBLE + println( + "The problem is dual infeasible. If a primal feasible solution " * + "exists, the problem is unbounded. To check, set the objective " * + "to `@objective(model, Min, 0)` and re-solve. If the problem is " * + "feasible, the primal is unbounded. If the problem is " * + "infeasible, both the primal and dual are infeasible.", + ) + elseif status == INFEASIBLE_OR_UNBOUNDED + println( + "The model is either infeasible or unbounded. Set the objective " * + "to `@objective(model, Min, 0)` and re-solve to disambiguate. If " * + "the problem was infeasible, it will still be infeasible. If the " * + "problem was unbounded, it will now have a finite optimal solution.", + ) else - error("The model was not solved correctly.") + println( + "The model was not solved correctly. The termination status is $status", + ) end - println(" objective value = ", objective_value(model)) - if primal_status(model) == FEASIBLE_POINT + if primal_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT) + println(" objective value = ", objective_value(model)) println(" primal solution: x = ", value(x)) + elseif primal_status(model) == INFEASIBILITY_CERTIFICATE + println(" primal certificate: x = ", value(x)) end - if dual_status(model) == FEASIBLE_POINT + if dual_status(model) in (FEASIBLE_POINT, NEARLY_FEASIBLE_POINT) println(" dual solution: c1 = ", dual(c1)) - else - println(" dual solution: NO SOLUTION") + elseif dual_status(model) == INFEASIBILITY_CERTIFICATE + println(" dual certificate: c1 = ", dual(c1)) end return end From 632d4a3966ea953b997f1ce643d44e497a729583 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 14 Feb 2024 12:05:58 +1300 Subject: [PATCH 18/19] Update docs/src/manual/solutions.md --- docs/src/manual/solutions.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 1fdf0ce9686..701d0688d48 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -309,7 +309,6 @@ julia> function solve_and_print_solution(model) error( """ The model was not solved correctly: - termination_status : $(termination_status(model)) primal_status : $(primal_status(model)) dual_status : $(dual_status(model)) From a9a9257b73e7fcbbe7c39a00cbb69afcdd38162e Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Wed, 14 Feb 2024 13:28:14 +1300 Subject: [PATCH 19/19] Update docs/src/manual/solutions.md --- docs/src/manual/solutions.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/src/manual/solutions.md b/docs/src/manual/solutions.md index 701d0688d48..99d83b4d164 100644 --- a/docs/src/manual/solutions.md +++ b/docs/src/manual/solutions.md @@ -305,7 +305,6 @@ A simple approach for small scripts and notebooks is to use julia> function solve_and_print_solution(model) optimize!(model) if !is_solved_and_feasible(model; dual = true) - term = termination_status(model) error( """ The model was not solved correctly: