Skip to content

Commit

Permalink
Update for JuMP v0.18 on julia 1.0 (#82)
Browse files Browse the repository at this point in the history
* update for julia 1.0

* update travis script

* add LinearAlgebra to REQUIRE

* some updates

* more changes

* add constructors

* define constructor for numbers too

* don't broadcast over norm expressions

* revert to map

* switch back to working version of GLPK

* comment out flakey test for now

it is correct, but the GLPK solver has been problematic on it. see
* #74 (comment)
and
*
8327d3648b0aa59f5355
f9bd56b65e8dda6fa642
for workarounds in the past.

* test on different versions of julia
  • Loading branch information
yeesian authored Feb 23, 2019
1 parent 090ce1d commit a118b1b
Show file tree
Hide file tree
Showing 29 changed files with 467 additions and 461 deletions.
17 changes: 10 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
language: julia
os:
- linux
- osx
julia:
- 0.5
- 0.7
- 1.0
- 1.1
notifications:
email: false
sudo: false
Expand All @@ -12,10 +15,10 @@ addons:
- liblapack-dev
- libgmp-dev
- libglpk-dev
script:
- if [[ -a .git/shallow ]]; then git fetch --unshallow; fi
- julia -e 'Pkg.clone(pwd())'
- julia -e 'Pkg.update()'
- julia -e 'Pkg.test("JuMPeR", coverage=true)'
# script:
# - if [[ -a .git/shallow ]]; then git fetch --unshallow; fi
# - julia -e 'Pkg.clone(pwd())'
# - julia -e 'Pkg.update()'
# - julia -e 'Pkg.test("JuMPeR", coverage=true)'
after_success:
- julia -e 'cd(Pkg.dir("JuMPeR")); Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())'
- julia -e 'using Pkg; cd(Pkg.dir("JuMPeR")); Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())'
6 changes: 3 additions & 3 deletions REQUIRE
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
julia 0.5
JuMP 0.16.2 0.17
MathProgBase 0.6.4 0.7
julia 0.7
JuMP 0.18.5 0.19
MathProgBase
8 changes: 4 additions & 4 deletions example/cap_budget.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@
# Requires a mixed-integer linear optimization problem solver.
#-----------------------------------------------------------------------

using JuMP, JuMPeR
using JuMP, JuMPeR, LinearAlgebra

"""
TreeScenario
Stores the values of "active" uncertain parameters, as well as the
associated tree structure described in the paper.
"""
type TreeScenario
mutable struct TreeScenario
ξ::Vector{Float64}
parent
children::Vector
Expand Down Expand Up @@ -136,12 +136,12 @@ function solve_partitioned_problem(N::Int, θ::Float64, B::Float64,
# Extend the scenario tree
for p in 1:P
# Extract the active uncertain parameter values
profit_scen = get(getscenario(profit_con_refs[p]))
profit_scen = getscenario(profit_con_refs[p])
profit_scen_ξ = [uncvalue(profit_scen, ξ[i]) for i in 1:4]
# Create a new child in the tree under this leaf
profit_child = TreeScenario(profit_scen_ξ, leaf_scenarios[p], [])
# Same for budget
budget_scen = get(getscenario(budget_con_refs[p]))
budget_scen = getscenario(budget_con_refs[p])
budget_scen_ξ = [uncvalue(budget_scen, ξ[i]) for i in 1:4]
budget_child = TreeScenario(budget_scen_ξ, leaf_scenarios[p], [])
# Add to the tree
Expand Down
1 change: 1 addition & 0 deletions example/portfolio.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

using JuMP, JuMPeR # Modeling
using Distributions # For generating data
using LinearAlgebra

# Number of stocks
const NUM_ASSET = 10
Expand Down
35 changes: 18 additions & 17 deletions src/JuMPeR.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@

module JuMPeR

importall Base.Operators
import MathProgBase
importall JuMP # So we can build on it, but prefer explict qualification
import JuMP: JuMPContainer, GenericNorm, GenericNormExpr
using JuMP # So we can build on it, but prefer explicit qualification
import JuMP: JuMPContainer, GenericAffExpr, GenericNorm, GenericNormExpr, getname
import LinearAlgebra: dot, norm

# JuMPeRs exported interface
export RobustModel, @uncertain, @adaptive,
Expand All @@ -34,7 +34,7 @@ export RobustModel, @uncertain, @adaptive,
All uncertainty sets implement the interface defined by AbstractUncertaintySet.
Parent type is JuMP.AbstractModel, to enable JuMP's `@constraint`, etc.
"""
abstract AbstractUncertaintySet <: JuMP.AbstractModel
abstract type AbstractUncertaintySet <: JuMP.AbstractModel end


"""
Expand Down Expand Up @@ -73,7 +73,7 @@ Fields:
Misc:
solved Flags if solved already (to prevent resolves)
"""
type RobustModelExt{S,T,U}
mutable struct RobustModelExt{S,T,U}
# Uncertain parameters
num_uncs::Int
unc_names::Vector{String}
Expand All @@ -97,22 +97,22 @@ type RobustModelExt{S,T,U}
default_uncset::AbstractUncertaintySet
constraint_uncsets::Vector{Any}
# Scenarios
scenarios::Vector{Nullable{U}}
scenarios::Vector{Union{U, Missing}}
# Misc
solved::Bool
# Pretty printing magic
dictList::Vector
uncDict::Dict{Symbol,Any}
uncData::ObjectIdDict
uncData::IdDict{Any, Any}
end
RobustModelExt(cutsolver) =
RobustModelExt{UncConstraint, AdaptConstraint, Scenario}(
# Uncertain parameters
0, String[], # num_uncs, unc_names
0, String[], # num_uncs, unc_names
Float64[], Float64[], # unc_lower, unc_upper
Symbol[], # unc_cat
# Adaptive variables
0, String[], # num_adps, adp_names
0, String[], # num_adps, adp_names
Float64[], Float64[], # adp_lower, adp_upper
Symbol[], Symbol[], Any[], # adp_cat, adp_policy,adp_arguments
# Constraints
Expand All @@ -124,13 +124,13 @@ RobustModelExt(cutsolver) =
BasicUncertaintySet(), # default_uncset
Any[], # constraint_uncsets
# Scenarios
Nullable{Scenario}[], # scenarios
Union{Scenario, Missing}[], # scenarios
# Misc
false, # solved
# Pretty printing magic
Any[], # dictList
Dict{Symbol,Any}(), # uncDict
ObjectIdDict()) # uncData
IdDict{Any, Any}()) # uncData


"""
Expand Down Expand Up @@ -181,7 +181,7 @@ end
JuMP.registercon(m::AbstractUncertaintySet, conname, value) = value


# Uncertain, UncExpr, UncSetConstraint, UncSetNorm, UncSetNormConstraint
# Uncertain, UncExpr, UncSetConstraint, UncSetNormConstraint
include("uncertain.jl")


Expand All @@ -194,8 +194,7 @@ include("adaptive.jl")
`∑ⱼ (∑ᵢ aᵢⱼ uᵢ) xⱼ` -- affine expression of unc. parameters and variables.
"""
typealias UncVarExpr JuMP.GenericAffExpr{UncExpr,JuMPeRVar}
UncVarExpr() = zero(UncVarExpr)
UncVarExpr = JuMP.GenericAffExpr{UncExpr,JuMPeRVar}
Base.convert(::Type{UncVarExpr}, c::Number) =
UncVarExpr(JuMPeRVar[], UncExpr[], UncExpr(c))
Base.convert(::Type{UncVarExpr}, x::JuMPeRVar) =
Expand All @@ -204,6 +203,8 @@ Base.convert(::Type{UncVarExpr}, aff::AffExpr) =
UncVarExpr(copy(aff.vars), map(UncExpr,aff.coeffs), UncExpr(aff.constant))
Base.convert(::Type{UncVarExpr}, uaff::UncExpr) =
UncVarExpr(JuMPeRVar[], UncExpr[], uaff)
JuMP.GenericAffExpr{U,V}() where {U<:UncExpr,V<:JuMPeRVar} = zero(UncVarExpr)
JuMP.GenericAffExpr{U,V}(x::Union{Number,JuMPeRVar,AffExpr,UncExpr}) where {U<:UncExpr,V<:JuMPeRVar} = convert(UncExpr, x)
function Base.push!(faff::UncVarExpr, new_coeff::Union{Real,Uncertain}, new_var::JuMPeRVar)
push!(faff.vars, new_var)
push!(faff.coeffs, UncExpr(new_coeff))
Expand All @@ -215,7 +216,7 @@ end
A constraint with uncertain parameters and variables (i.e., `UncVarExpr`).
"""
typealias UncConstraint JuMP.GenericRangeConstraint{UncVarExpr}
UncConstraint = JuMP.GenericRangeConstraint{UncVarExpr}
function JuMP.addconstraint(m::Model, c::UncConstraint; uncset=nothing)
# Handle the odd special case where there are actually no variables in
# the constraint - arises from use of macros
Expand All @@ -241,7 +242,7 @@ end
A realization of some or all of the uncertain parameters in a model.
"""
type Scenario
mutable struct Scenario
values::Vector{Float64} # Using NaN as undefined
end

Expand All @@ -257,7 +258,7 @@ uncvalue(scen::Scenario, u::Uncertain) = scen.values[u.id]
"""
getscenario(ConstraintRef{RobustModel,UncConstraint})
Get the Scenario for a constraint (as a `Nullable{Scenario}`)
Get the Scenario for a constraint (as a `Union{Scenario, Missing}`)
"""
getscenario(uc::ConstraintRef{Model,UncConstraint}) = get_robust(uc.m).scenarios[uc.idx]

Expand Down
13 changes: 6 additions & 7 deletions src/adaptive.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
An adaptive variable, a variable whose value depends on the realized
values of the uncertain parameters.
"""
type Adaptive <: JuMP.AbstractJuMPScalar
mutable struct Adaptive <: JuMP.AbstractJuMPScalar
m::Model
id::Int
end
Expand All @@ -37,9 +37,9 @@ function Adaptive(m::Model, lower::Real, upper::Real,
push!(rmext.adp_arguments, depends_on)
return Adaptive(m, rmext.num_adps)
end
Base.zero(::Type{Adaptive}) = AdaptExpr()
Base.zero(::Type{Adaptive}) = zero(AdaptExpr)
Base.zero( ::Adaptive) = zero(Adaptive)
Base.one(::Type{Adaptive}) = AdaptExpr(1)
Base.one(::Type{Adaptive}) = one(AdaptExpr)
Base.one( ::Adaptive) = one(Adaptive)
Base.isequal(a::Adaptive, b::Adaptive) = (a.m === b.m) && (a.id == b.id)
getname(x::Adaptive) = get_robust(x.m).adp_names[x.id]
Expand All @@ -50,16 +50,15 @@ getname(x::Adaptive) = get_robust(x.m).adp_names[x.id]
Either a plain JuMP Variable, or a JuMPeR Adaptive variable.
"""
typealias JuMPeRVar Union{Variable,Adaptive}
JuMPeRVar = Union{Variable,Adaptive}


"""
AdaptExpr
`∑ᵢ aᵢ vᵢ` -- affine expression of JuMPeRVars and numbers.
"""
typealias AdaptExpr JuMP.GenericAffExpr{Float64,JuMPeRVar}
AdaptExpr() = zero(AdaptExpr)
AdaptExpr = JuMP.GenericAffExpr{Float64,JuMPeRVar}
Base.convert(::Type{AdaptExpr}, c::Number) =
AdaptExpr(JuMPeRVar[ ], Float64[ ], 0.0)
Base.convert(::Type{AdaptExpr}, x::JuMPeRVar) =
Expand All @@ -73,7 +72,7 @@ Base.convert(::Type{AdaptExpr}, aff::AffExpr) =
A constraint with just JuMPeRVars and numbers (i.e., `AdaptExpr`).
"""
typealias AdaptConstraint JuMP.GenericRangeConstraint{AdaptExpr}
AdaptConstraint = JuMP.GenericRangeConstraint{AdaptExpr}
function JuMP.addconstraint(m::Model, c::AdaptConstraint)
rm = get_robust(m)::RobustModelExt
push!(rm.adapt_constraints, c)
Expand Down
2 changes: 1 addition & 1 deletion src/expand.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# Adaptive robust optimization support - pre-solve expansion
#-----------------------------------------------------------------------

any_adaptive(u::UncVarExpr) = any(v->isa(v,Adaptive), u.vars)
any_adaptive(u::UncVarExpr) = any(isa.(u.vars,Adaptive))


function expand_adaptive(rm::Model)
Expand Down
Loading

0 comments on commit a118b1b

Please sign in to comment.