Skip to content

Commit 441c21a

Browse files
Transurgeonclaude
andauthored
Remove Python _jacobian and _hess_vec oracle methods (#141)
These methods are now provided by the C-based diff_engine_core library. The NLP solver Oracles class uses c_problem from diff_engine, not the Python methods on atoms. Removed from base classes: - atom.py: jacobian(), hess_vec(), _jacobian(), _hess_vec(), _verify_jacobian_args(), _verify_hess_vec_args() - variable.py: jacobian(), hess_vec() - constant.py: jacobian(), hess_vec() Removed from 13 affine atoms, 10 elementwise atoms, and 4 non-elementwise atoms (pnorm, prod, quad_form, quad_over_lin). Deleted test files: - cvxpy/tests/nlp_tests/jacobian_tests/ (10 files) - cvxpy/tests/nlp_tests/hess_tests/ (10 files) Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 150f104 commit 441c21a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

50 files changed

+4
-4652
lines changed

cvxpy/atoms/affine/add_expr.py

Lines changed: 0 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
from typing import Any, Iterable, List, Tuple
1919

2020
import numpy as np
21-
from scipy.sparse import coo_matrix
2221

2322
import cvxpy.lin_ops.lin_op as lo
2423
import cvxpy.lin_ops.lin_utils as lu
@@ -156,81 +155,3 @@ def graph_implementation(
156155
if arg.shape != shape and lu.is_scalar(arg):
157156
arg_objs[i] = lu.promote(arg, shape)
158157
return (lu.sum_expr(arg_objs), [])
159-
160-
def _verify_hess_vec_args(self):
161-
return True
162-
163-
def _hess_vec(self, vec):
164-
"""
165-
Computes the merged Hessian-vector product dictionary for all arguments.
166-
If a key appears in several, their values are summed.
167-
"""
168-
hess_dict = {}
169-
keys_require_summing = []
170-
171-
for arg in self.args:
172-
if not arg.is_affine():
173-
arg_hess = arg.hess_vec(vec)
174-
for k, v in arg_hess.items():
175-
if k in hess_dict:
176-
hess_dict[k][0].extend(v[0])
177-
hess_dict[k][1].extend(v[1])
178-
hess_dict[k][2].extend(v[2])
179-
keys_require_summing.append(k)
180-
else:
181-
hess_dict[k] = ([], [], [])
182-
hess_dict[k][0].extend(np.atleast_1d(v[0]))
183-
hess_dict[k][1].extend(np.atleast_1d(v[1]))
184-
hess_dict[k][2].extend(np.atleast_1d(v[2]))
185-
186-
# sum duplicates
187-
for key in set(keys_require_summing):
188-
rows, cols, vals = hess_dict[key]
189-
shape = (key[0].size, key[0].size)
190-
coo = coo_matrix((vals, (rows, cols)), shape=shape)
191-
coo.sum_duplicates()
192-
hess_dict[key] = (coo.row, coo.col, coo.data)
193-
194-
# convert lists to arrays
195-
for k, v in hess_dict.items():
196-
rows, cols, vals = v
197-
hess_dict[k] = (np.array(rows), np.array(cols), np.array(vals))
198-
199-
return hess_dict
200-
201-
def _verify_jacobian_args(self):
202-
return True
203-
204-
def _jacobian(self):
205-
jacobian_dict = {}
206-
keys_require_summing = []
207-
208-
for arg in self.args:
209-
if not arg.is_constant():
210-
arg_jac = arg.jacobian()
211-
212-
for k, v in arg_jac.items():
213-
if k in jacobian_dict:
214-
jacobian_dict[k][0].extend(v[0])
215-
jacobian_dict[k][1].extend(v[1])
216-
jacobian_dict[k][2].extend(v[2])
217-
keys_require_summing.append(k)
218-
else:
219-
jacobian_dict[k] = ([], [], [])
220-
jacobian_dict[k][0].extend(np.atleast_1d(v[0]))
221-
jacobian_dict[k][1].extend(np.atleast_1d(v[1]))
222-
jacobian_dict[k][2].extend(np.atleast_1d(v[2]))
223-
224-
# sum duplicates
225-
for key in set(keys_require_summing):
226-
rows, cols, vals = jacobian_dict[key]
227-
coo = coo_matrix((vals, (rows, cols)), shape=(self.size, key.size))
228-
coo.sum_duplicates()
229-
jacobian_dict[key] = (coo.row, coo.col, coo.data)
230-
231-
# convert lists to arrays
232-
for k, v in jacobian_dict.items():
233-
rows, cols, vals = v
234-
jacobian_dict[k] = (np.array(rows), np.array(cols), np.array(vals))
235-
236-
return jacobian_dict

cvxpy/atoms/affine/affine_atom.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,4 @@ def _grad(self, values) -> List[Any]:
169169
grad_list.append(grad_matrix[var_start:var_end, :])
170170
var_start = var_end
171171

172-
return grad_list
173-
174-
def _verify_jacobian_args(self):
175-
return True
172+
return grad_list

cvxpy/atoms/affine/binary_operators.py

Lines changed: 0 additions & 249 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@
2828
from cvxpy.atoms.affine.affine_atom import AffAtom
2929
from cvxpy.atoms.affine.broadcast_to import broadcast_to
3030
from cvxpy.atoms.affine.conj import conj
31-
from cvxpy.atoms.affine.promote import Promote
3231
from cvxpy.atoms.affine.reshape import deep_flatten, reshape
3332
from cvxpy.atoms.affine.sum import sum as cvxpy_sum
3433
from cvxpy.constraints.constraint import Constraint
@@ -38,7 +37,6 @@
3837
is_param_free,
3938
)
4039
from cvxpy.expressions.expression import Expression
41-
from cvxpy.expressions.variable import Variable
4240
from cvxpy.utilities import bounds as bounds_utils
4341

4442

@@ -311,137 +309,6 @@ def _grad(self, values):
311309
DY = sp.kron(sp.eye_array(n), X, format='csc').T
312310

313311
return [DX, DY]
314-
315-
def _verify_hess_vec_args(self):
316-
X = self.args[0]
317-
Y = self.args[1]
318-
319-
# if X is an atom, Y must be constant
320-
if not isinstance(X, Variable) and not X.is_constant():
321-
if not Y.is_constant():
322-
return False
323-
324-
# if Y is an atom, X must be constant
325-
if not isinstance(Y, Variable) and not Y.is_constant():
326-
if not X.is_constant():
327-
return False
328-
329-
# if both are variables, check that they are not the same variable
330-
if isinstance(X, Variable) and isinstance(Y, Variable):
331-
if X.id == Y.id:
332-
return False
333-
334-
return True
335-
336-
def _hess_vec(self, vec):
337-
X = self.args[0]
338-
Y = self.args[1]
339-
340-
m, n = self.get_dimensions(X)
341-
_, p = self.get_dimensions(Y)
342-
343-
if X.is_constant():
344-
B = X.value.T @ np.reshape(vec, (m, p), order='F')
345-
hess_dict = Y.hess_vec(B.flatten(order='F'))
346-
return hess_dict
347-
348-
if Y.is_constant():
349-
B = np.reshape(vec, (m, p), order='F') @ Y.value.T
350-
hess_dict = X.hess_vec(B.flatten(order='F'))
351-
return hess_dict
352-
353-
# here both are variables by themselves so we only get a cross term
354-
rows = np.tile(np.arange(m * n), p)
355-
cols = np.repeat(np.arange(n * p), m)
356-
vals = vec[(cols // n) * m + (rows % m)]
357-
return {(X, Y): (rows, cols, vals), (Y, X): (cols, rows, vals)}
358-
359-
def _verify_jacobian_args(self):
360-
X = self.args[0]
361-
Y = self.args[1]
362-
363-
X_vars = X.variables()
364-
Y_vars = Y.variables()
365-
366-
# no variable can appear in both arguments
367-
for x_var in X_vars:
368-
for y_var in Y_vars:
369-
if x_var.id == y_var.id:
370-
return False
371-
372-
return True
373-
374-
def get_dimensions(self, X):
375-
"""Get the dimensions of X as (rows, cols).
376-
"""
377-
if len(X.shape) == 0:
378-
return (1, 1)
379-
elif len(X.shape) == 1:
380-
return (X.shape[0], 1)
381-
else:
382-
return X.shape
383-
384-
def _jacobian(self):
385-
"""
386-
The atom is phi(X, Y) = X @ Y. It is vectorized as
387-
z = vec(phi(X, Y)) = (I ⊗ X) vec(Y) = (Y.T ⊗ I) vec(X).
388-
Let x = vec(X) and y = vec(Y). Then the Jacobian is given by
389-
dz/dx = kron(Y.T, I) and dz/dy = kron(I, X).
390-
"""
391-
392-
X = self.args[0]
393-
Y = self.args[1]
394-
395-
m, _ = self.get_dimensions(X)
396-
_, p = self.get_dimensions(Y)
397-
398-
dx_dict = {}
399-
dy_dict = {}
400-
401-
if not X.is_constant():
402-
dx = sp.kron(Y.value.T, sp.eye(m), format='csr')
403-
404-
if not isinstance(X, Variable):
405-
X_jac_dict = X.jacobian()
406-
for var in X_jac_dict:
407-
rows, cols, vals = X_jac_dict[var]
408-
X_jac = sp.coo_array((vals, (rows, cols)),
409-
shape=(dx.shape[1], var.size)).tocsc()
410-
X_jac = (dx @ X_jac).tocoo()
411-
X_jac_dict[var] = (X_jac.row, X_jac.col, X_jac.data)
412-
413-
dx_dict = X_jac_dict
414-
else:
415-
dx = dx.tocoo()
416-
dx_dict = {X: (dx.row, dx.col, dx.data)}
417-
418-
419-
if not Y.is_constant():
420-
dy = sp.kron(sp.eye(p), X.value, format='csr')
421-
422-
if not isinstance(Y, Variable):
423-
Y_jac_dict = Y.jacobian()
424-
for var in Y_jac_dict:
425-
rows, cols, vals = Y_jac_dict[var]
426-
Y_jac = sp.coo_array((vals, (rows, cols)),
427-
shape=(dy.shape[1], var.size)).tocsc()
428-
Y_jac = (dy @ Y_jac).tocoo()
429-
Y_jac_dict[var] = (Y_jac.row, Y_jac.col, Y_jac.data)
430-
431-
dy_dict = Y_jac_dict
432-
else:
433-
dy = dy.tocoo()
434-
dy_dict = {Y: (dy.row, dy.col, dy.data)}
435-
436-
if X.is_constant() and not Y.is_constant():
437-
return dy_dict
438-
439-
if not X.is_constant() and Y.is_constant():
440-
return dx_dict
441-
442-
# merge the two dictionaries together
443-
dx_dict.update(dy_dict)
444-
return dx_dict
445312

446313
def graph_implementation(
447314
self, arg_objs, shape: Tuple[int, ...], data=None
@@ -574,114 +441,6 @@ def _grad(self, values):
574441

575442
return [DX, DY]
576443

577-
def _verify_hess_vec_args(self):
578-
x = self.args[0]
579-
y = self.args[1]
580-
if x.size != y.size:
581-
return False
582-
583-
if x.is_constant() and y.is_constant():
584-
return False
585-
586-
# one of the following must be true:
587-
# 1. both arguments are variables
588-
# 2. one argument is a constant
589-
# 3. one argument is a Promote of a variable and the other is a variable
590-
both_are_variables = isinstance(x, Variable) and isinstance(y, Variable)
591-
one_is_constant = x.is_constant() or y.is_constant()
592-
x_is_promote = type(x) == Promote and isinstance(y, Variable)
593-
y_is_promote = type(y) == Promote and isinstance(x, Variable)
594-
595-
if not (both_are_variables or one_is_constant or x_is_promote or y_is_promote):
596-
return False
597-
598-
if both_are_variables and x.id == y.id:
599-
return False
600-
601-
return True
602-
603-
def _hess_vec(self, vec):
604-
x = self.args[0]
605-
y = self.args[1]
606-
607-
# constant * atom
608-
if x.is_constant():
609-
y_hess_vec = y.hess_vec(x.value.flatten(order='F') * vec)
610-
return y_hess_vec
611-
612-
# atom * constant
613-
if y.is_constant():
614-
x_hess_vec = x.hess_vec(y.value.flatten(order='F') * vec)
615-
return x_hess_vec
616-
617-
# x * y with x a scalar variable, y a vector variable
618-
if not isinstance(x, Variable) and x.is_affine():
619-
assert(type(x) == Promote)
620-
x_var = x.args[0] # here x is a Promote because of how we canonicalize
621-
zeros_x = np.zeros(x_var.size, dtype=int)
622-
cols = np.arange(y.size, dtype=int)
623-
return {(x_var, y): (zeros_x, cols, vec),
624-
(y, x_var): (cols, zeros_x, vec)}
625-
626-
# x * y with x a vector variable, y a scalar
627-
if not isinstance(y, Variable) and y.is_affine():
628-
assert(type(y) == Promote)
629-
y_var = y.args[0] # here y is a Promote because of how we canonicalize
630-
zeros_y = np.zeros(y_var.size, dtype=int)
631-
cols = np.arange(x.size, dtype=int)
632-
return {(x, y_var): (cols, zeros_y, vec),
633-
(y_var, x): (zeros_y, cols, vec)}
634-
635-
# if we arrive here both arguments are variables of the same size
636-
rows = np.arange(x.size, dtype=int)
637-
cols = np.arange(x.size, dtype=int)
638-
return {(x, y): (rows, cols, vec), (y, x): (rows, cols, vec)}
639-
640-
def _verify_jacobian_args(self):
641-
return self._verify_hess_vec_args()
642-
643-
644-
def _jacobian(self):
645-
x = self.args[0]
646-
y = self.args[1]
647-
648-
if x.is_constant():
649-
dy = y.jacobian()
650-
for k in dy:
651-
rows, cols, vals = dy[k]
652-
# this is equivalent to forming the matrix defined
653-
# rows, cols, vals and scaling each row i by y.value[i]
654-
dy[k] = (rows, cols, np.atleast_1d(x.value).flatten(order='F')[rows] * vals)
655-
return dy
656-
657-
if y.is_constant():
658-
dx = x.jacobian()
659-
for k in dx:
660-
rows, cols, vals = dx[k]
661-
dx[k] = (rows, cols, np.atleast_1d(y.value).flatten(order='F')[rows] * vals)
662-
return dx
663-
664-
if not isinstance(x, Variable) and x.is_affine():
665-
assert(type(x) == Promote)
666-
x_var = x.args[0] # here x is a Promote because of how we canonicalize
667-
idxs = np.arange(y.size, dtype=int)
668-
return {(x_var): (idxs, np.zeros(y.size, dtype=int), y.value),
669-
(y): (idxs, idxs, x.value)}
670-
671-
# x * y with x a vector variable, y a scalar
672-
if not isinstance(y, Variable) and y.is_affine():
673-
assert(type(y) == Promote)
674-
y_var = y.args[0] # here y is a Promote because of how we canonicalize
675-
idxs = np.arange(x.size, dtype=int)
676-
return {(x): (idxs, idxs, y.value),
677-
(y_var): (idxs, np.zeros(x.size, dtype=int), x.value)}
678-
679-
# here both are variables
680-
idxs = np.arange(x.size, dtype=int)
681-
jacobian_dict = {x: (idxs, idxs, y.value.flatten(order='F')),
682-
y: (idxs, idxs, x.value.flatten(order='F'))}
683-
return jacobian_dict
684-
685444
def graph_implementation(
686445
self, arg_objs, shape: Tuple[int, ...], data=None
687446
) -> Tuple[lo.LinOp, List[Constraint]]:
@@ -799,14 +558,6 @@ def is_decr(self, idx) -> bool:
799558
def point_in_domain(self):
800559
return np.ones(self.args[1].shape)
801560

802-
def _verify_hess_vec_args(self):
803-
raise RuntimeError("The _verify_hess_vec_args method of"
804-
" the division atom should never be called.")
805-
806-
def _hess_vec(self, vec):
807-
raise RuntimeError("The hess_vec method of the division atom should never "
808-
"be called.")
809-
810561
def graph_implementation(
811562
self, arg_objs, shape: Tuple[int, ...], data=None
812563
) -> Tuple[lo.LinOp, List[Constraint]]:

0 commit comments

Comments
 (0)