Skip to content

Commit ac1e290

Browse files
authored
Add support for low rank constraint (#26)
* Add support for low rank constraint * Fix format * Fixes * Drops Julia v1.6 * Add LRO tests * Fix format * Fixes * Renam * Fix * Fixes * Fix * Fix * Debug * Cchar * Fix * Fix format * Fix * Fix * Add def of N in README * Add support for Factor in low-rank case * Fix * Add tests for sum of low rank * Add support for multiple factor in same block * Fixes * Use LowRankOpt v0.1 * Fix format * Mention in README * Fix for mac os
1 parent 43f64e2 commit ac1e290

File tree

8 files changed

+277
-36
lines changed

8 files changed

+277
-36
lines changed

.github/workflows/ci.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@ jobs:
1515
strategy:
1616
fail-fast: false
1717
matrix:
18-
version: ['1.6', '1'] # Test against LTS
18+
version: ['1.10', '1'] # Test against LTS
1919
os: [ubuntu-latest, macOS-latest, windows-latest]
2020
arch: [x64]
2121
include:
2222
# Also test against 32-bit Linux on LTS.
23-
- version: '1.6'
23+
- version: '1.10'
2424
os: ubuntu-latest
2525
arch: x86
2626
steps:

Project.toml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
11
name = "SDPLR"
22
uuid = "56161740-ea4e-4253-9d15-43c62ff94d95"
33
authors = ["Benoît Legat <[email protected]>"]
4-
version = "0.1.2"
4+
version = "0.2.0"
55

66
[deps]
7+
LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c"
78
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
89
SDPLR_jll = "3a057b76-36a0-51f0-a66f-6d580b8e8efd"
910

1011
[compat]
12+
LowRankOpt = "0.1"
1113
MathOptInterface = "1.24"
1214
SDPLR_jll = "=100.2.300"
13-
julia = "1.6"
15+
julia = "1.10"

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,9 @@ To use SDPLR with [JuMP](https://github.com/jump-dev/JuMP.jl), use
4646
using JuMP, SDPLR
4747
model = Model(SDPLR.Optimizer)
4848
```
49+
In order to use low-rank **constraints** (not to be confused with low-rank
50+
**solutions** detailed in the next section), use the
51+
[LowRankOpt](https://github.com/blegat/LowRankOpt.jl/) extension of JuMP.
4952

5053
## Example: modifying the rank and checking optimality
5154

src/MOI_wrapper.jl

Lines changed: 99 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,23 @@ const PIECES_MAP = Dict{String,Int}(
1717
"overallsc" => 8,
1818
)
1919

20+
const _LowRankMatrix{F<:AbstractMatrix{Cdouble},D<:AbstractVector{Cdouble}} =
21+
LRO.Factorization{Cdouble,F,D}
22+
23+
const _SetDotProd{F<:AbstractMatrix{Cdouble},D<:AbstractVector{Cdouble}} =
24+
LRO.SetDotProducts{
25+
LRO.WITH_SET,
26+
MOI.PositiveSemidefiniteConeTriangle,
27+
LRO.TriangleVectorization{Cdouble,_LowRankMatrix{F,D}},
28+
}
29+
2030
const SupportedSets =
21-
Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle}
31+
Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle,_SetDotProd}
2232

2333
mutable struct Optimizer <: MOI.AbstractOptimizer
2434
objective_constant::Float64
2535
objective_sign::Int
36+
dot_product::Vector{Union{Nothing,_LowRankMatrix}}
2637
blksz::Vector{Cptrdiff_t}
2738
blktype::Vector{Cchar}
2839
varmap::Vector{Tuple{Int,Int,Int}} # Variable Index vi -> blk, i, j
@@ -51,6 +62,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
5162
return new(
5263
0.0,
5364
1,
65+
Union{Nothing,_LowRankMatrix}[],
5466
Cptrdiff_t[],
5567
Cchar[],
5668
Tuple{Int,Int,Int}[],
@@ -151,6 +163,7 @@ function _new_block(model::Optimizer, set::MOI.Nonnegatives)
151163
blk = length(model.blksz)
152164
for i in 1:MOI.dimension(set)
153165
push!(model.varmap, (blk, i, i))
166+
push!(model.dot_product, nothing)
154167
end
155168
return
156169
end
@@ -162,11 +175,22 @@ function _new_block(model::Optimizer, set::MOI.PositiveSemidefiniteConeTriangle)
162175
for j in 1:set.side_dimension
163176
for i in 1:j
164177
push!(model.varmap, (blk, i, j))
178+
push!(model.dot_product, nothing)
165179
end
166180
end
167181
return
168182
end
169183

184+
function _new_block(model::Optimizer, set::_SetDotProd)
185+
blk = length(model.blksz) + 1
186+
for i in eachindex(set.vectors)
187+
push!(model.varmap, (-blk, i, i))
188+
push!(model.dot_product, set.vectors[i].matrix)
189+
end
190+
_new_block(model, set.set)
191+
return
192+
end
193+
170194
function MOI.add_constrained_variables(model::Optimizer, set::SupportedSets)
171195
reset_solution!(model)
172196
offset = length(model.varmap)
@@ -208,6 +232,9 @@ function _next(model::Optimizer, i)
208232
return length(model.Aent)
209233
end
210234

235+
# For the variables that are not in the dot product, we need to fill the
236+
# `entptr` and `type`. This is because the solver needs to have an entry
237+
# for each block.
211238
function _fill_until(
212239
model::Optimizer,
213240
numblk,
@@ -226,6 +253,42 @@ function _fill_until(
226253
return
227254
end
228255

256+
# We have ∑ αᵢ' * Fᵢ' * Fᵢ' = [F₁ ... Fₖ] * Diag(α₁, .., αₖ) * [F₁' .. Fₖ']
257+
function merge_low_rank_terms(
258+
ent,
259+
row,
260+
col,
261+
type::Vector{Cchar},
262+
mats::Vector{Tuple{Cdouble,_LowRankMatrix}},
263+
)
264+
if isempty(mats)
265+
return
266+
end
267+
type[end] = Cchar('l')
268+
offset = 0
269+
for (coef, mat) in mats
270+
for i in eachindex(mat.scaling)
271+
push!(ent, coef * mat.scaling[i])
272+
push!(row, offset + i)
273+
push!(col, offset + i)
274+
end
275+
offset += length(mat.scaling)
276+
end
277+
offset = 0
278+
for (_, mat) in mats
279+
for j in axes(mat.factor, 2)
280+
for i in axes(mat.factor, 1)
281+
push!(ent, mat.factor[i, j])
282+
push!(row, i)
283+
push!(col, offset + j)
284+
end
285+
end
286+
offset += length(mat.scaling)
287+
end
288+
empty!(mats)
289+
return
290+
end
291+
229292
function _fill!(
230293
model,
231294
ent,
@@ -235,17 +298,28 @@ function _fill!(
235298
type::Vector{Cchar},
236299
func,
237300
)
301+
prev_blk = 0
302+
mats = Tuple{Cdouble,_LowRankMatrix}[]
238303
for t in MOI.Utilities.canonical(func).terms
239304
blk, i, j = model.varmap[t.variable.value]
240-
_fill_until(model, blk, entptr, type, length(ent))
241-
coef = t.coefficient
242-
if i != j
243-
coef /= 2
305+
if blk != prev_blk
306+
merge_low_rank_terms(ent, row, col, type, mats)
307+
end
308+
_fill_until(model, abs(blk), entptr, type, length(ent))
309+
if blk < 0
310+
prev_blk = blk
311+
push!(mats, (t.coefficient, model.dot_product[t.variable.value]))
312+
else
313+
coef = t.coefficient
314+
if i != j
315+
coef /= 2
316+
end
317+
push!(ent, coef)
318+
push!(row, i)
319+
push!(col, j)
244320
end
245-
push!(ent, coef)
246-
push!(row, i)
247-
push!(col, j)
248321
end
322+
merge_low_rank_terms(ent, row, col, type, mats)
249323
_fill_until(model, length(model.blksz), entptr, type, length(ent))
250324
@assert length(entptr) == length(model.blksz)
251325
@assert length(type) == length(model.blksz)
@@ -366,6 +440,7 @@ end
366440
function MOI.empty!(optimizer::Optimizer)
367441
optimizer.objective_constant = 0.0
368442
optimizer.objective_sign = 1
443+
empty!(optimizer.dot_product)
369444
empty!(optimizer.blksz)
370445
empty!(optimizer.blktype)
371446
empty!(optimizer.varmap)
@@ -479,9 +554,10 @@ function MOI.get(
479554
# The constraint index corresponds to the variable index of the `1, 1` entry
480555
blk, i, j = optimizer.varmap[ci.value]
481556
@assert i == j == 1
557+
blk = abs(blk) # In the Low-Rank case, we just take the factor of the PSD matrix
482558
I = (optimizer.Rmap[blk]+1):optimizer.Rmap[blk+1]
483559
r = optimizer.R[I]
484-
if S === MOI.PositiveSemidefiniteConeTriangle
560+
if S === MOI.PositiveSemidefiniteConeTriangle || S <: _SetDotProd
485561
@assert optimizer.blktype[blk] == Cchar('s')
486562
d = optimizer.blksz[blk]
487563
return reshape(r, d, div(length(I), d))
@@ -497,12 +573,23 @@ function MOI.get(
497573
vi::MOI.VariableIndex,
498574
)
499575
MOI.check_result_index_bounds(optimizer, attr)
500-
blk, i, j = optimizer.varmap[vi.value]
501-
I = (optimizer.Rmap[blk]+1):optimizer.Rmap[blk+1]
576+
_blk, i, j = optimizer.varmap[vi.value]
577+
blk = abs(_blk)
578+
I = (optimizer.Rmap[abs(blk)]+1):optimizer.Rmap[abs(blk)+1]
502579
if optimizer.blktype[blk] == Cchar('s')
503580
d = optimizer.blksz[blk]
504581
U = reshape(optimizer.R[I], d, div(length(I), d))
505-
return U[i, :]' * U[j, :]
582+
if _blk < 0
583+
# result of dot product
584+
mat = optimizer.dot_product[vi.value]
585+
return sum(eachindex(mat.scaling); init = 0.0) do i
586+
v = mat.factor[:, i]
587+
vU = U' * v
588+
return mat.scaling[i] * (vU' * vU)
589+
end
590+
else
591+
return U[i, :]' * U[j, :]
592+
end
506593
else
507594
@assert optimizer.blktype[blk] == Cchar('d')
508595
return optimizer.R[I[i]]^2

src/SDPLR.jl

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
module SDPLR
77

88
import MathOptInterface as MOI
9+
import LowRankOpt as LRO
910
import SDPLR_jll
1011

1112
include("bounds.jl")
@@ -138,14 +139,17 @@ function solve(
138139
k += 1
139140
@assert CAinfo_entptr[k] <= CAinfo_entptr[k+1]
140141
for j in ((CAinfo_entptr[k]+1):CAinfo_entptr[k+1])
141-
@assert blktype[blk] == CAinfo_type[k]
142142
@assert 1 <= CArow[j] <= blksz[blk]
143143
@assert 1 <= CAcol[j] <= blksz[blk]
144144
if CAinfo_type[k] == Cchar('s')
145+
@assert blktype[blk] == Cchar('s')
145146
@assert CArow[j] <= CAcol[j]
146-
else
147-
@assert CAinfo_type[k] == Cchar('d')
147+
elseif CAinfo_type[k] == Cchar('d')
148+
@assert blktype[blk] == Cchar('d')
148149
@assert CArow[j] == CAcol[j]
150+
else
151+
@assert CAinfo_type[k] == Cchar('l')
152+
@assert blktype[blk] == Cchar('s')
149153
end
150154
end
151155
end

test/Project.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
[deps]
22
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
3+
LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c"
34
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
45
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
56
SDPLR = "56161740-ea4e-4253-9d15-43c62ff94d95"

0 commit comments

Comments
 (0)