|
28 | 28 | from cvxpy.atoms.affine.affine_atom import AffAtom |
29 | 29 | from cvxpy.atoms.affine.broadcast_to import broadcast_to |
30 | 30 | from cvxpy.atoms.affine.conj import conj |
31 | | -from cvxpy.atoms.affine.promote import Promote |
32 | 31 | from cvxpy.atoms.affine.reshape import deep_flatten, reshape |
33 | 32 | from cvxpy.atoms.affine.sum import sum as cvxpy_sum |
34 | 33 | from cvxpy.constraints.constraint import Constraint |
|
38 | 37 | is_param_free, |
39 | 38 | ) |
40 | 39 | from cvxpy.expressions.expression import Expression |
41 | | -from cvxpy.expressions.variable import Variable |
42 | 40 | from cvxpy.utilities import bounds as bounds_utils |
43 | 41 |
|
44 | 42 |
|
@@ -311,137 +309,6 @@ def _grad(self, values): |
311 | 309 | DY = sp.kron(sp.eye_array(n), X, format='csc').T |
312 | 310 |
|
313 | 311 | return [DX, DY] |
314 | | - |
315 | | - def _verify_hess_vec_args(self): |
316 | | - X = self.args[0] |
317 | | - Y = self.args[1] |
318 | | - |
319 | | - # if X is an atom, Y must be constant |
320 | | - if not isinstance(X, Variable) and not X.is_constant(): |
321 | | - if not Y.is_constant(): |
322 | | - return False |
323 | | - |
324 | | - # if Y is an atom, X must be constant |
325 | | - if not isinstance(Y, Variable) and not Y.is_constant(): |
326 | | - if not X.is_constant(): |
327 | | - return False |
328 | | - |
329 | | - # if both are variables, check that they are not the same variable |
330 | | - if isinstance(X, Variable) and isinstance(Y, Variable): |
331 | | - if X.id == Y.id: |
332 | | - return False |
333 | | - |
334 | | - return True |
335 | | - |
336 | | - def _hess_vec(self, vec): |
337 | | - X = self.args[0] |
338 | | - Y = self.args[1] |
339 | | - |
340 | | - m, n = self.get_dimensions(X) |
341 | | - _, p = self.get_dimensions(Y) |
342 | | - |
343 | | - if X.is_constant(): |
344 | | - B = X.value.T @ np.reshape(vec, (m, p), order='F') |
345 | | - hess_dict = Y.hess_vec(B.flatten(order='F')) |
346 | | - return hess_dict |
347 | | - |
348 | | - if Y.is_constant(): |
349 | | - B = np.reshape(vec, (m, p), order='F') @ Y.value.T |
350 | | - hess_dict = X.hess_vec(B.flatten(order='F')) |
351 | | - return hess_dict |
352 | | - |
353 | | - # here both are variables by themselves so we only get a cross term |
354 | | - rows = np.tile(np.arange(m * n), p) |
355 | | - cols = np.repeat(np.arange(n * p), m) |
356 | | - vals = vec[(cols // n) * m + (rows % m)] |
357 | | - return {(X, Y): (rows, cols, vals), (Y, X): (cols, rows, vals)} |
358 | | - |
359 | | - def _verify_jacobian_args(self): |
360 | | - X = self.args[0] |
361 | | - Y = self.args[1] |
362 | | - |
363 | | - X_vars = X.variables() |
364 | | - Y_vars = Y.variables() |
365 | | - |
366 | | - # no variable can appear in both arguments |
367 | | - for x_var in X_vars: |
368 | | - for y_var in Y_vars: |
369 | | - if x_var.id == y_var.id: |
370 | | - return False |
371 | | - |
372 | | - return True |
373 | | - |
374 | | - def get_dimensions(self, X): |
375 | | - """Get the dimensions of X as (rows, cols). |
376 | | - """ |
377 | | - if len(X.shape) == 0: |
378 | | - return (1, 1) |
379 | | - elif len(X.shape) == 1: |
380 | | - return (X.shape[0], 1) |
381 | | - else: |
382 | | - return X.shape |
383 | | - |
384 | | - def _jacobian(self): |
385 | | - """ |
386 | | - The atom is phi(X, Y) = X @ Y. It is vectorized as |
387 | | - z = vec(phi(X, Y)) = (I ⊗ X) vec(Y) = (Y.T ⊗ I) vec(X). |
388 | | - Let x = vec(X) and y = vec(Y). Then the Jacobian is given by |
389 | | - dz/dx = kron(Y.T, I) and dz/dy = kron(I, X). |
390 | | - """ |
391 | | - |
392 | | - X = self.args[0] |
393 | | - Y = self.args[1] |
394 | | - |
395 | | - m, _ = self.get_dimensions(X) |
396 | | - _, p = self.get_dimensions(Y) |
397 | | - |
398 | | - dx_dict = {} |
399 | | - dy_dict = {} |
400 | | - |
401 | | - if not X.is_constant(): |
402 | | - dx = sp.kron(Y.value.T, sp.eye(m), format='csr') |
403 | | - |
404 | | - if not isinstance(X, Variable): |
405 | | - X_jac_dict = X.jacobian() |
406 | | - for var in X_jac_dict: |
407 | | - rows, cols, vals = X_jac_dict[var] |
408 | | - X_jac = sp.coo_array((vals, (rows, cols)), |
409 | | - shape=(dx.shape[1], var.size)).tocsc() |
410 | | - X_jac = (dx @ X_jac).tocoo() |
411 | | - X_jac_dict[var] = (X_jac.row, X_jac.col, X_jac.data) |
412 | | - |
413 | | - dx_dict = X_jac_dict |
414 | | - else: |
415 | | - dx = dx.tocoo() |
416 | | - dx_dict = {X: (dx.row, dx.col, dx.data)} |
417 | | - |
418 | | - |
419 | | - if not Y.is_constant(): |
420 | | - dy = sp.kron(sp.eye(p), X.value, format='csr') |
421 | | - |
422 | | - if not isinstance(Y, Variable): |
423 | | - Y_jac_dict = Y.jacobian() |
424 | | - for var in Y_jac_dict: |
425 | | - rows, cols, vals = Y_jac_dict[var] |
426 | | - Y_jac = sp.coo_array((vals, (rows, cols)), |
427 | | - shape=(dy.shape[1], var.size)).tocsc() |
428 | | - Y_jac = (dy @ Y_jac).tocoo() |
429 | | - Y_jac_dict[var] = (Y_jac.row, Y_jac.col, Y_jac.data) |
430 | | - |
431 | | - dy_dict = Y_jac_dict |
432 | | - else: |
433 | | - dy = dy.tocoo() |
434 | | - dy_dict = {Y: (dy.row, dy.col, dy.data)} |
435 | | - |
436 | | - if X.is_constant() and not Y.is_constant(): |
437 | | - return dy_dict |
438 | | - |
439 | | - if not X.is_constant() and Y.is_constant(): |
440 | | - return dx_dict |
441 | | - |
442 | | - # merge the two dictionaries together |
443 | | - dx_dict.update(dy_dict) |
444 | | - return dx_dict |
445 | 312 |
|
446 | 313 | def graph_implementation( |
447 | 314 | self, arg_objs, shape: Tuple[int, ...], data=None |
@@ -574,114 +441,6 @@ def _grad(self, values): |
574 | 441 |
|
575 | 442 | return [DX, DY] |
576 | 443 |
|
577 | | - def _verify_hess_vec_args(self): |
578 | | - x = self.args[0] |
579 | | - y = self.args[1] |
580 | | - if x.size != y.size: |
581 | | - return False |
582 | | - |
583 | | - if x.is_constant() and y.is_constant(): |
584 | | - return False |
585 | | - |
586 | | - # one of the following must be true: |
587 | | - # 1. both arguments are variables |
588 | | - # 2. one argument is a constant |
589 | | - # 3. one argument is a Promote of a variable and the other is a variable |
590 | | - both_are_variables = isinstance(x, Variable) and isinstance(y, Variable) |
591 | | - one_is_constant = x.is_constant() or y.is_constant() |
592 | | - x_is_promote = type(x) == Promote and isinstance(y, Variable) |
593 | | - y_is_promote = type(y) == Promote and isinstance(x, Variable) |
594 | | - |
595 | | - if not (both_are_variables or one_is_constant or x_is_promote or y_is_promote): |
596 | | - return False |
597 | | - |
598 | | - if both_are_variables and x.id == y.id: |
599 | | - return False |
600 | | - |
601 | | - return True |
602 | | - |
603 | | - def _hess_vec(self, vec): |
604 | | - x = self.args[0] |
605 | | - y = self.args[1] |
606 | | - |
607 | | - # constant * atom |
608 | | - if x.is_constant(): |
609 | | - y_hess_vec = y.hess_vec(x.value.flatten(order='F') * vec) |
610 | | - return y_hess_vec |
611 | | - |
612 | | - # atom * constant |
613 | | - if y.is_constant(): |
614 | | - x_hess_vec = x.hess_vec(y.value.flatten(order='F') * vec) |
615 | | - return x_hess_vec |
616 | | - |
617 | | - # x * y with x a scalar variable, y a vector variable |
618 | | - if not isinstance(x, Variable) and x.is_affine(): |
619 | | - assert(type(x) == Promote) |
620 | | - x_var = x.args[0] # here x is a Promote because of how we canonicalize |
621 | | - zeros_x = np.zeros(x_var.size, dtype=int) |
622 | | - cols = np.arange(y.size, dtype=int) |
623 | | - return {(x_var, y): (zeros_x, cols, vec), |
624 | | - (y, x_var): (cols, zeros_x, vec)} |
625 | | - |
626 | | - # x * y with x a vector variable, y a scalar |
627 | | - if not isinstance(y, Variable) and y.is_affine(): |
628 | | - assert(type(y) == Promote) |
629 | | - y_var = y.args[0] # here y is a Promote because of how we canonicalize |
630 | | - zeros_y = np.zeros(y_var.size, dtype=int) |
631 | | - cols = np.arange(x.size, dtype=int) |
632 | | - return {(x, y_var): (cols, zeros_y, vec), |
633 | | - (y_var, x): (zeros_y, cols, vec)} |
634 | | - |
635 | | - # if we arrive here both arguments are variables of the same size |
636 | | - rows = np.arange(x.size, dtype=int) |
637 | | - cols = np.arange(x.size, dtype=int) |
638 | | - return {(x, y): (rows, cols, vec), (y, x): (rows, cols, vec)} |
639 | | - |
640 | | - def _verify_jacobian_args(self): |
641 | | - return self._verify_hess_vec_args() |
642 | | - |
643 | | - |
644 | | - def _jacobian(self): |
645 | | - x = self.args[0] |
646 | | - y = self.args[1] |
647 | | - |
648 | | - if x.is_constant(): |
649 | | - dy = y.jacobian() |
650 | | - for k in dy: |
651 | | - rows, cols, vals = dy[k] |
652 | | - # this is equivalent to forming the matrix defined |
653 | | - # rows, cols, vals and scaling each row i by y.value[i] |
654 | | - dy[k] = (rows, cols, np.atleast_1d(x.value).flatten(order='F')[rows] * vals) |
655 | | - return dy |
656 | | - |
657 | | - if y.is_constant(): |
658 | | - dx = x.jacobian() |
659 | | - for k in dx: |
660 | | - rows, cols, vals = dx[k] |
661 | | - dx[k] = (rows, cols, np.atleast_1d(y.value).flatten(order='F')[rows] * vals) |
662 | | - return dx |
663 | | - |
664 | | - if not isinstance(x, Variable) and x.is_affine(): |
665 | | - assert(type(x) == Promote) |
666 | | - x_var = x.args[0] # here x is a Promote because of how we canonicalize |
667 | | - idxs = np.arange(y.size, dtype=int) |
668 | | - return {(x_var): (idxs, np.zeros(y.size, dtype=int), y.value), |
669 | | - (y): (idxs, idxs, x.value)} |
670 | | - |
671 | | - # x * y with x a vector variable, y a scalar |
672 | | - if not isinstance(y, Variable) and y.is_affine(): |
673 | | - assert(type(y) == Promote) |
674 | | - y_var = y.args[0] # here y is a Promote because of how we canonicalize |
675 | | - idxs = np.arange(x.size, dtype=int) |
676 | | - return {(x): (idxs, idxs, y.value), |
677 | | - (y_var): (idxs, np.zeros(x.size, dtype=int), x.value)} |
678 | | - |
679 | | - # here both are variables |
680 | | - idxs = np.arange(x.size, dtype=int) |
681 | | - jacobian_dict = {x: (idxs, idxs, y.value.flatten(order='F')), |
682 | | - y: (idxs, idxs, x.value.flatten(order='F'))} |
683 | | - return jacobian_dict |
684 | | - |
685 | 444 | def graph_implementation( |
686 | 445 | self, arg_objs, shape: Tuple[int, ...], data=None |
687 | 446 | ) -> Tuple[lo.LinOp, List[Constraint]]: |
@@ -799,14 +558,6 @@ def is_decr(self, idx) -> bool: |
799 | 558 | def point_in_domain(self): |
800 | 559 | return np.ones(self.args[1].shape) |
801 | 560 |
|
802 | | - def _verify_hess_vec_args(self): |
803 | | - raise RuntimeError("The _verify_hess_vec_args method of" |
804 | | - " the division atom should never be called.") |
805 | | - |
806 | | - def _hess_vec(self, vec): |
807 | | - raise RuntimeError("The hess_vec method of the division atom should never " |
808 | | - "be called.") |
809 | | - |
810 | 561 | def graph_implementation( |
811 | 562 | self, arg_objs, shape: Tuple[int, ...], data=None |
812 | 563 | ) -> Tuple[lo.LinOp, List[Constraint]]: |
|
0 commit comments