@@ -29,13 +29,12 @@ abstract type AbstractQuadraticModel{T, S} <: AbstractNLPModel{T, S} end
29
29
30
30
qp = QuadraticModel(c, H; A = A, lcon = lcon, ucon = ucon, lvar = lvar, uvar = uvar, coo_matrices = true)
31
31
32
- Create a Quadratic model ``min ~\\ tfrac{1}{2} x^T Q x + c^T x + c_0`` with optional bounds
32
+ Create a Quadratic model ``min ~\\ tfrac{1}{2} x^T H x + c^T x + c_0`` with optional bounds
33
33
`lvar ≦ x ≦ uvar` and optional linear constraints `lcon ≦ Ax ≦ ucon`.
34
+ The user should only give the lower triangle of `H` to the `QuadraticModel` constructor.
34
35
35
36
With the first constructor, if `sortcols = true`, then `Hcols` and `Acols` are sorted in ascending order
36
37
(`Hrows`, `Hvals` and `Arows`, `Avals` are then sorted accordingly).
37
- With the second constructor, if `coo_matrices = true`, `H` and/or `A` will be converted to SparseMatricesCOO
38
- (this will be ignored if they already are SparseMatricesCOO).
39
38
40
39
You can also use [`QPSReader.jl`](https://github.com/JuliaSmoothOptimizers/QPSReader.jl) to
41
40
create a Quadratic model from a QPS file:
@@ -65,6 +64,11 @@ mutable struct QuadraticModel{T, S, M1, M2} <: AbstractQuadraticModel{T, S}
65
64
data:: QPData{T, S, M1, M2}
66
65
end
67
66
67
+ function Base. convert (:: Type{QuadraticModel{T, S, Mconv, Mconv}} , qm:: QuadraticModel{T, S, M1, M2} ) where {T, S, M1 <: AbstractMatrix , M2 <: AbstractMatrix , Mconv}
68
+ data_conv = convert (QPData{T, S, Mconv, Mconv}, qm. data)
69
+ return QuadraticModel (qm. meta, qm. counters, data_conv)
70
+ end
71
+
68
72
function QuadraticModel (
69
73
c:: S ,
70
74
Hrows:: AbstractVector{<:Integer} ,
@@ -140,7 +144,6 @@ function QuadraticModel(
140
144
lvar:: S = fill! (S (undef, length (c)), T (- Inf )),
141
145
uvar:: S = fill! (S (undef, length (c)), T (Inf )),
142
146
c0:: T = zero (T),
143
- coo_matrices = true ,
144
147
kwargs... ,
145
148
) where {T, S}
146
149
ncon, nvar = size (A)
@@ -149,25 +152,9 @@ function QuadraticModel(
149
152
nnzj = 0
150
153
data = QPData (c0, c, H, A)
151
154
else
152
- if coo_matrices
153
- if typeof (H) <: Symmetric && ! (typeof (H. data) <: SparseMatrixCOO )
154
- tril! (H. data)
155
- HCOO = SparseMatrixCOO (H. data)
156
- elseif ! (typeof (H) <: SparseMatrixCOO )
157
- tril! (H)
158
- HCOO = SparseMatrixCOO (H)
159
- else
160
- HCOO = H
161
- end
162
- ACOO = ! (typeof (A) <: SparseMatrixCOO ) ? SparseMatrixCOO (A) : ACOO = A
163
- nnzh = nnz (HCOO)
164
- nnzj = nnz (ACOO)
165
- data = QPData (c0, c, HCOO, ACOO)
166
- else
167
- nnzh = typeof (H) <: DenseMatrix ? nvar * (nvar + 1 ) / 2 : nnz (H)
168
- nnzj = nnz (A)
169
- data = QPData (c0, c, H, A)
170
- end
155
+ nnzh = typeof (H) <: DenseMatrix ? nvar * (nvar + 1 ) / 2 : nnz (H)
156
+ nnzj = nnz (A)
157
+ data = typeof (H) <: Symmetric ? QPData (c0, c, H. data, A) : QPData (c0, c, H, A)
171
158
end
172
159
173
160
QuadraticModel (
@@ -271,6 +258,48 @@ function NLPModels.hess_structure!(
271
258
return rows, cols
272
259
end
273
260
261
+ function fill_structure! (S:: SparseMatrixCSC , rows, cols)
262
+ count = 1
263
+ @inbounds for col = 1 : size (S, 2 ), k = S. colptr[col] : (S. colptr[col+ 1 ]- 1 )
264
+ rows[count] = S. rowval[k]
265
+ cols[count] = col
266
+ count += 1
267
+ end
268
+ end
269
+
270
+ function fill_coord! (S:: SparseMatrixCSC , vals, obj_weight)
271
+ count = 1
272
+ @inbounds for col = 1 : size (S, 2 ), k = S. colptr[col] : (S. colptr[col+ 1 ]- 1 )
273
+ vals[count] = obj_weight * S. nzval[k]
274
+ count += 1
275
+ end
276
+ end
277
+
278
+ function NLPModels. hess_structure! (
279
+ qp:: QuadraticModel{T, S, M1} ,
280
+ rows:: AbstractVector{<:Integer} ,
281
+ cols:: AbstractVector{<:Integer} ,
282
+ ) where {T, S, M1 <: SparseMatrixCSC }
283
+ fill_structure! (qp. data. H, rows, cols)
284
+ return rows, cols
285
+ end
286
+
287
+ function NLPModels. hess_structure! (
288
+ qp:: QuadraticModel{T, S, M1} ,
289
+ rows:: AbstractVector{<:Integer} ,
290
+ cols:: AbstractVector{<:Integer} ,
291
+ ) where {T, S, M1 <: Matrix }
292
+ count = 1
293
+ for j= 1 : qp. meta. nvar
294
+ for i= j: qp. meta. nvar
295
+ rows[count] = i
296
+ cols[count] = j
297
+ count += 1
298
+ end
299
+ end
300
+ return rows, cols
301
+ end
302
+
274
303
function NLPModels. hess_coord! (
275
304
qp:: QuadraticModel{T, S, M1} ,
276
305
x:: AbstractVector{T} ,
@@ -282,6 +311,34 @@ function NLPModels.hess_coord!(
282
311
return vals
283
312
end
284
313
314
+ function NLPModels. hess_coord! (
315
+ qp:: QuadraticModel{T, S, M1} ,
316
+ x:: AbstractVector{T} ,
317
+ vals:: AbstractVector{T} ;
318
+ obj_weight:: Real = one (eltype (x)),
319
+ ) where {T, S, M1 <: SparseMatrixCSC }
320
+ NLPModels. increment! (qp, :neval_hess )
321
+ fill_coord! (qp. data. H, vals, obj_weight)
322
+ return vals
323
+ end
324
+
325
+ function NLPModels. hess_coord! (
326
+ qp:: QuadraticModel{T, S, M1} ,
327
+ x:: AbstractVector{T} ,
328
+ vals:: AbstractVector{T} ;
329
+ obj_weight:: Real = one (eltype (x)),
330
+ ) where {T, S, M1 <: Matrix }
331
+ NLPModels. increment! (qp, :neval_hess )
332
+ count = 1
333
+ for j= 1 : qp. meta. nvar
334
+ for i= j: qp. meta. nvar
335
+ vals[count] = obj_weight * qp. data. H[i,j]
336
+ count += 1
337
+ end
338
+ end
339
+ return vals
340
+ end
341
+
285
342
NLPModels. hess_coord! (
286
343
qp:: QuadraticModel ,
287
344
x:: AbstractVector ,
@@ -300,6 +357,31 @@ function NLPModels.jac_structure!(
300
357
return rows, cols
301
358
end
302
359
360
+ function NLPModels. jac_structure! (
361
+ qp:: QuadraticModel{T, S, M1, M2} ,
362
+ rows:: AbstractVector{<:Integer} ,
363
+ cols:: AbstractVector{<:Integer} ,
364
+ ) where {T, S, M1, M2 <: SparseMatrixCSC }
365
+ fill_structure! (qp. data. A, rows, cols)
366
+ return rows, cols
367
+ end
368
+
369
+ function NLPModels. jac_structure! (
370
+ qp:: QuadraticModel{T, S, M1, M2} ,
371
+ rows:: AbstractVector{<:Integer} ,
372
+ cols:: AbstractVector{<:Integer} ,
373
+ ) where {T, S, M1, M2 <: DenseMatrix }
374
+ count = 1
375
+ for j= 1 : qp. meta. nvar
376
+ for i= 1 : qp. meta. ncon
377
+ rows[count] = i
378
+ cols[count] = j
379
+ count += 1
380
+ end
381
+ end
382
+ return rows, cols
383
+ end
384
+
303
385
function NLPModels. jac_coord! (
304
386
qp:: QuadraticModel{T, S, M1, M2} ,
305
387
x:: AbstractVector ,
@@ -310,6 +392,32 @@ function NLPModels.jac_coord!(
310
392
return vals
311
393
end
312
394
395
+ function NLPModels. jac_coord! (
396
+ qp:: QuadraticModel{T, S, M1, M2} ,
397
+ x:: AbstractVector ,
398
+ vals:: AbstractVector
399
+ ) where {T, S, M1, M2 <: SparseMatrixCSC }
400
+ NLPModels. increment! (qp, :neval_jac )
401
+ fill_coord! (qp. data. H, vals, one (T))
402
+ return vals
403
+ end
404
+
405
+ function NLPModels. jac_coord! (
406
+ qp:: QuadraticModel{T, S, M1, M2} ,
407
+ x:: AbstractVector ,
408
+ vals:: AbstractVector
409
+ ) where {T, S, M1, M2 <: DenseMatrix }
410
+ NLPModels. increment! (qp, :neval_jac )
411
+ count = 1
412
+ for j= 1 : qp. meta. nvar
413
+ for i= 1 : qp. meta. ncon
414
+ vals[count] = qp. data. A[i, j]
415
+ count += 1
416
+ end
417
+ end
418
+ return vals
419
+ end
420
+
313
421
function NLPModels. cons! (qp:: AbstractQuadraticModel , x:: AbstractVector , c:: AbstractVector )
314
422
NLPModels. increment! (qp, :neval_cons )
315
423
mul! (c, qp. data. A, x)
0 commit comments