Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add set_optimize_hook #330

Merged
merged 3 commits into from
Nov 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/src/manual/model.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ InfiniteModel()
JuMP.object_dictionary(::InfiniteModel)
has_internal_supports
Base.empty!(::InfiniteModel)
JuMP.set_optimize_hook(::InfiniteModel, ::Union{Function, Nothing})
```

## Abstract Dependencies
Expand Down
11 changes: 8 additions & 3 deletions src/TranscriptionOpt/optimize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
Return the transcription model stored in `model` if that is what is stored in
`model.optimizer_model`.
"""
function transcription_model(model::InfiniteOpt.InfiniteModel)::JuMP.Model
function transcription_model(model::InfiniteOpt.InfiniteModel)
trans_model = InfiniteOpt.optimizer_model(model)
if !is_transcription_model(trans_model)
error("The model does not contain a transcription model.")
Expand All @@ -26,8 +26,13 @@
function InfiniteOpt.build_optimizer_model!(
model::InfiniteOpt.InfiniteModel,
key::Val{:TransData};
check_support_dims::Bool = true
)::Nothing
check_support_dims::Bool = true,
extra_kwargs...
)
# throw error for extra keywords
for (kw, _) in extra_kwargs
error("Unrecognized keyword argument `$kw` for building transcription models.")
end

Check warning on line 35 in src/TranscriptionOpt/optimize.jl

View check run for this annotation

Codecov / codecov/patch

src/TranscriptionOpt/optimize.jl#L35

Added line #L35 was not covered by tests
# clear the optimzier model contents
trans_model = InfiniteOpt.clear_optimizer_model_build!(model)
# build the transcription model based on model
Expand Down
14 changes: 9 additions & 5 deletions src/datatypes.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1316,6 +1316,7 @@ mutable struct InfiniteModel <: JuMP.AbstractModel

# Extensions
ext::Dict{Symbol, Any}
optimize_hook::Any
end

"""
Expand Down Expand Up @@ -1402,7 +1403,8 @@ function InfiniteModel(;
# Optimize data
nothing, OptimizerModel(; kwargs...), false,
# Extensions
Dict{Symbol, Any}()
Dict{Symbol, Any}(),
nothing
)
end

Expand All @@ -1411,13 +1413,13 @@ end
function _set_optimizer_constructor(
model::InfiniteModel,
constructor::MOI.OptimizerWithAttributes
)::Nothing
)
model.optimizer_constructor = constructor.optimizer_constructor
return
end

# No attributes
function _set_optimizer_constructor(model::InfiniteModel, constructor)::Nothing
function _set_optimizer_constructor(model::InfiniteModel, constructor)
model.optimizer_constructor = constructor
return
end
Expand All @@ -1427,7 +1429,7 @@ function InfiniteModel(
optimizer_constructor;
OptimizerModel::Function = TranscriptionModel,
kwargs...
)::InfiniteModel
)
model = InfiniteModel()
model.optimizer_model = OptimizerModel(optimizer_constructor; kwargs...)
_set_optimizer_constructor(model, optimizer_constructor)
Expand All @@ -1436,6 +1438,7 @@ end

# Define basic InfiniteModel extension functions
Base.broadcastable(model::InfiniteModel) = Ref(model)
JuMP.variable_ref_type(::Type{InfiniteModel}) = GeneralVariableRef

"""
JuMP.object_dictionary(model::InfiniteModel)::Dict{Symbol, Any}
Expand All @@ -1446,7 +1449,7 @@ registered to a specific symbol in the macros. For example,
`@variable(model, x[1:2, 1:2])` registers the array of variables
`x` to the symbol `:x`.
"""
JuMP.object_dictionary(model::InfiniteModel)::Dict{Symbol, Any} = model.obj_dict
JuMP.object_dictionary(model::InfiniteModel) = model.obj_dict

"""
Base.empty!(model::InfiniteModel)::InfiniteModel
Expand Down Expand Up @@ -1492,6 +1495,7 @@ function Base.empty!(model::InfiniteModel)
empty!(model.optimizer_model)
model.ready_to_optimize = false
empty!(model.ext)
model.optimize_hook = nothing
return model
end

Expand Down
41 changes: 38 additions & 3 deletions src/optimize.jl
Original file line number Diff line number Diff line change
Expand Up @@ -903,13 +903,42 @@ end
################################################################################
# OPTIMIZATION METHODS
################################################################################
"""
JuMP.set_optimize_hook(
model::InfiniteModel,
hook::Union{Function, Nothing}
)::Nothing

Set the function `hook` as the optimize hook for `model` where `hook` should
have be of the form `hook(model::GenericModel; hook_specfic_kwargs..., kwargs...)`.
The `kwargs` are those passed to [`optimize!`](@ref). The `hook_specifc_kwargs`
are passed as additional keywords by the user when they call [`optimize!`](@ref).

## Notes

* The optimize hook should generally modify the model, or some external state
in some way, and then call `optimize!(model; ignore_optimize_hook = true)` to
optimize the problem, bypassing the hook.
* Use `set_optimize_hook(model, nothing)` to unset an optimize hook.
"""
function JuMP.set_optimize_hook(
model::InfiniteModel,
hook::Union{Function, Nothing}
)
model.optimize_hook = hook
set_optimizer_model_ready(model, false)
return
end

"""
JuMP.optimize!(model::InfiniteModel; [kwargs...])

Extend `JuMP.optimize!` to optimize infinite models using the internal
optimizer model. Will call [`build_optimizer_model!`](@ref) if the optimizer
optimizer model. Calls [`build_optimizer_model!`](@ref) if the optimizer
model isn't up to date. The `kwargs` correspond to keyword arguments passed to
[`build_optimizer_model!`](@ref) if any are defined.
[`build_optimizer_model!`](@ref) if any are defined. The `kwargs` can also
include arguments that are passed to an optimize hook if one was set with
[`JuMP.set_optimize_hook`](@ref).

**Example**
```julia-repl
Expand All @@ -919,7 +948,13 @@ julia> has_values(model)
true
```
"""
function JuMP.optimize!(model::InfiniteModel; kwargs...)
function JuMP.optimize!(
model::InfiniteModel;
ignore_optimize_hook = isnothing(model.optimize_hook),
kwargs...)
if !ignore_optimize_hook
return model.optimize_hook(model; kwargs...)
end
if !optimizer_model_ready(model)
build_optimizer_model!(model; kwargs...)
end
Expand Down
2 changes: 2 additions & 0 deletions test/datatypes.jl
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,8 @@ end
@test InfiniteOpt._param_object_indices(m) isa Vector{Union{IndependentParameterIndex, DependentParametersIndex}}
# test other methods
@test empty!(InfiniteModel(mockoptimizer)).optimizer_constructor == mockoptimizer
@test variable_ref_type(InfiniteModel) == GeneralVariableRef
@test variable_ref_type(InfiniteModel()) == GeneralVariableRef
end

# Test reference variable datatypes
Expand Down
21 changes: 21 additions & 0 deletions test/optimizer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
@constraint(m, c4, meas2 - 2y0 + x <= 1, DomainRestrictions(par => [0.5, 1]))
@constraint(m, c5, meas2 == 0)
@objective(m, Min, x0 + meas1)
# test extra keywords
@test_throws ErrorException build_optimizer_model!(m, bad = 42)
# test normal usage
@test isa(build_optimizer_model!(m), Nothing)
@test optimizer_model_ready(m)
Expand Down Expand Up @@ -181,6 +183,25 @@ end
@test isa(optimize!(m, check_support_dims = false), Nothing)
@test optimizer_model_ready(m)
@test num_variables(optimizer_model(m)) == 8
# test optimize hook
function myhook(model; n = "", ub = 2, kwargs...)
if !isempty(n)
var = variable_by_name(model, n)
set_upper_bound(var, ub)
end
optimize!(model; ignore_optimize_hook = true, kwargs...)
return
end
@test set_optimize_hook(m, myhook) isa Nothing
@test optimize!(m, n = "x", check_support_dims = false) isa Nothing
@test optimizer_model_ready(m)
@test num_variables(optimizer_model(m)) == 8
@test upper_bound(x) == 2
@test set_optimize_hook(m, nothing) isa Nothing
@test isnothing(m.optimize_hook)
@test_throws ErrorException optimize!(m, n = "x")
@test optimize!(m) isa Nothing
@test optimizer_model_ready(m)
end

# Test JuMP.result_count
Expand Down
Loading