diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index beb8d28..e623086 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,6 +35,13 @@ jobs: ${{ runner.os }}-test-${{ env.cache-name }}- ${{ runner.os }}-test- ${{ runner.os }}- + - name: MOI + shell: julia --project=@. {0} + run: | + using Pkg + Pkg.add([ + PackageSpec(name="MathOptInterface", rev="bl/lmi"), + ]) - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 - uses: julia-actions/julia-processcoverage@v1 diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 04179a8..d5c720f 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -14,12 +14,18 @@ const PIECES_MAP = Dict{String,Int}( "overallsc" => 8, ) +const _SetWithDotProd = MOI.SetWithDotProducts{ + MOI.PositiveSemidefiniteConeTriangle, + MOI.TriangleVectorization{MOI.LowRankMatrix{Cdouble}}, +} + const SupportedSets = - Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle} + Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle,_SetWithDotProd} mutable struct Optimizer <: MOI.AbstractOptimizer objective_constant::Float64 objective_sign::Int + dot_product::Vector{Union{Nothing,MOI.LowRankMatrix{Cdouble}}} blksz::Vector{Cptrdiff_t} blktype::Vector{Cchar} varmap::Vector{Tuple{Int,Int,Int}} # Variable Index vi -> blk, i, j @@ -48,6 +54,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer return new( 0.0, 1, + Union{Nothing,MOI.LowRankMatrix{Cdouble}}[], Cptrdiff_t[], Cchar[], Tuple{Int,Int,Int}[], @@ -142,6 +149,7 @@ function _new_block(model::Optimizer, set::MOI.Nonnegatives) blk = length(model.blksz) for i in 1:MOI.dimension(set) push!(model.varmap, (blk, i, i)) + push!(model.dot_product, nothing) end return end @@ -153,11 +161,22 @@ function _new_block(model::Optimizer, set::MOI.PositiveSemidefiniteConeTriangle) for j in 1:set.side_dimension for i in 1:j push!(model.varmap, (blk, i, j)) + push!(model.dot_product, nothing) end end return end +function _new_block(model::Optimizer, set::_SetWithDotProd) + blk = length(model.blksz) + 1 + for i in eachindex(set.vectors) + push!(model.varmap, (-blk, i, i)) + push!(model.dot_product, set.vectors[i].matrix) + end + _new_block(model, set.set) + return +end + function MOI.add_constrained_variables(model::Optimizer, set::SupportedSets) reset_solution!(model) offset = length(model.varmap) @@ -239,14 +258,36 @@ function _fill!( ) for t in MOI.Utilities.canonical(func).terms blk, i, j = model.varmap[t.variable.value] - _fill_until(model, blk, entptr, type, length(ent)) - coef = t.coefficient - if i != j - coef /= 2 + _fill_until(model, abs(blk), entptr, type, length(ent)) + if type[end] == Cchar('l') + error( + "Can either have one dot product variable or several normal variables in the same constraint", + ) + end + if blk < 0 + type[end] = Cchar('l') + mat = model.dot_product[t.variable.value] + for i in eachindex(mat.diagonal) + push!(ent, mat.diagonal[i]) + push!(row, i) + push!(col, i) + end + for j in axes(mat.factor, 2) + for i in axes(mat.factor, 1) + push!(ent, mat.factor[i, j]) + push!(row, i) + push!(col, j) + end + end + else + coef = t.coefficient + if i != j + coef /= 2 + end + push!(ent, coef) + push!(row, i) + push!(col, j) end - push!(ent, coef) - push!(row, i) - push!(col, j) end _fill_until(model, length(model.blksz), entptr, type, length(ent)) @assert length(entptr) == length(model.blksz) @@ -367,6 +408,7 @@ end function MOI.empty!(optimizer::Optimizer) optimizer.objective_constant = 0.0 optimizer.objective_sign = 1 + empty!(optimizer.dot_product) empty!(optimizer.blksz) empty!(optimizer.blktype) empty!(optimizer.varmap) diff --git a/src/SDPLR.jl b/src/SDPLR.jl index 285f3a5..7d3f03b 100644 --- a/src/SDPLR.jl +++ b/src/SDPLR.jl @@ -129,14 +129,17 @@ function solve( k += 1 @assert CAinfo_entptr[k] <= CAinfo_entptr[k+1] for j in ((CAinfo_entptr[k]+1):CAinfo_entptr[k+1]) - @assert blktype[blk] == CAinfo_type[k] @assert 1 <= CArow[j] <= blksz[blk] @assert 1 <= CAcol[j] <= blksz[blk] if CAinfo_type[k] == Cchar('s') + @assert blktype[blk] == Cchar('s') @assert CArow[j] <= CAcol[j] - else - @assert CAinfo_type[k] == Cchar('d') + elseif CAinfo_type[k] == Cchar('d') + @assert blktype[blk] == Cchar('d') @assert CArow[j] == CAcol[j] + else + @assert CAinfo_type[k] == Cchar('l') + @assert blktype[blk] == Cchar('s') end end end diff --git a/test/simple_lowrank.jl b/test/simple_lowrank.jl new file mode 100644 index 0000000..3149e01 --- /dev/null +++ b/test/simple_lowrank.jl @@ -0,0 +1,8 @@ +blksz = Cptrdiff_t[2] +blktype = Cchar['s'] +b = Cdouble[1] +CAinfo_entptr = Csize_t[0, 2, 8] +CAent = Cdouble[1, 1, -0.25, 0.25, -1, 1, 1, 1] +CArow = Csize_t[1, 2, 1, 2, 1, 2, 1, 2] +CAcol = Csize_t[1, 2, 1, 2, 1, 1, 2, 2] +CAinfo_type = Cchar['s', 'l'] diff --git a/test/simple.jl b/test/simple_sparse.jl similarity index 100% rename from test/simple.jl rename to test/simple_sparse.jl diff --git a/test/test_simple.jl b/test/test_simple.jl index 491cd9b..641c6cd 100644 --- a/test/test_simple.jl +++ b/test/test_simple.jl @@ -4,8 +4,11 @@ import Random import MathOptInterface as MOI # This is `test_conic_PositiveSemidefiniteConeTriangle_VectorOfVariables` -@testset "Solve simple with sdplrlib" begin - include("simple.jl") +@testset "Solve simple with sdplrlib with $file" for file in [ + "simple_sparse.jl", + "simple_lowrank.jl", +] + include(file) # The `925` seed is taken from SDPLR's `main.c` Random.seed!(925) ret, R, lambda, ranks, pieces = SDPLR.solve( @@ -26,7 +29,7 @@ import MathOptInterface as MOI @test ranks == Csize_t[2] end -function simple_model() +function simple_sparse_model() model = SDPLR.Optimizer() X, _ = MOI.add_constrained_variables( model, @@ -40,6 +43,30 @@ function simple_model() return model, X, c end +function simple_lowrank_model() + model = SDPLR.Optimizer() + A = MOI.LowRankMatrix( + [-1 / 4, 1 / 4], + [ + -1.0 1.0 + 1.0 1.0 + ], + ) + X, _ = MOI.add_constrained_variables( + model, + MOI.SetWithDotProducts( + MOI.PositiveSemidefiniteConeTriangle(2), + [MOI.TriangleVectorization(A)], + ), + ) + c = MOI.add_constraint(model, 1.0 * X[1], MOI.EqualTo(1.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + obj = 1.0 * X[2] + 1.0 * X[4] + MOI.set(model, MOI.ObjectiveFunction{typeof(obj)}(), obj) + @test MOI.get(model, MOI.TerminationStatus()) == MOI.OPTIMIZE_NOT_CALLED + return model, X[2:4], c +end + function simple_test(model, X, c) atol = rtol = 1e-2 @test MOI.get(model, MOI.TerminationStatus()) == MOI.LOCALLY_SOLVED @@ -54,14 +81,15 @@ function simple_test(model, X, c) @test σ ≈ sigma end -@testset "MOI wrapper" begin - model, X, c = simple_model() +@testset "MOI wrapper for $f" for f in + [simple_sparse_model, simple_lowrank_model] + model, X, c = f() MOI.optimize!(model) simple_test(model, X, c) end function _test_limit(attr, val, term) - model, _, _ = simple_model() + model, _, _ = simple_sparse_model() MOI.set(model, MOI.RawOptimizerAttribute(attr), val) MOI.optimize!(model) @test MOI.get(model, MOI.TerminationStatus()) == term @@ -89,7 +117,7 @@ end end @testset "continuity between solve" begin - model, X, c = simple_model() + model, X, c = simple_sparse_model() MOI.set(model, MOI.RawOptimizerAttribute("majiter"), SDPLR.MAX_MAJITER - 2) @test MOI.get(model, MOI.RawOptimizerAttribute("majiter")) == SDPLR.MAX_MAJITER - 2