Skip to content

Commit 0c4f10e

Browse files
committed
Merge upstream cvxpy/cvxpy master (includes Parameter attribute handling cvxpy#3146)
# Conflicts: # cvxpy/reductions/cvx_attr2constr.py
2 parents 25d65da + 33fe669 commit 0c4f10e

File tree

14 files changed

+540
-182
lines changed

14 files changed

+540
-182
lines changed

cvxpy/cvxcore/python/canonInterface.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,9 @@ def get_parameter_vector(param_size,
4747
-------
4848
A flattened NumPy array of parameter values, of length param_size + 1
4949
"""
50-
#TODO handle parameters with structure.
50+
# Parameters with dim-reducing attributes (sparsity, diag, symmetric, PSD,
51+
# NSD) are handled by CvxAttr2Constr, which replaces them with reduced-size
52+
# parameters before this point.
5153
if param_size == 0:
5254
return None
5355
param_vec = np.zeros(param_size + 1)

cvxpy/expressions/leaf.py

Lines changed: 32 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,7 @@ def __init__(
166166
"Sparsity and diag attributes force zeros, which contradicts "
167167
"strict positivity/negativity."
168168
)
169+
self._leaf_of_provenance = None
169170
self.args = []
170171
self.bounds = self._ensure_valid_bounds(bounds)
171172
self.attributes['bounds'] = self.bounds
@@ -514,7 +515,7 @@ def save_value(self, val, sparse_path=False) -> None:
514515
if val is None:
515516
self._value = None
516517
elif self.sparse_idx is not None and not sparse_path:
517-
self._value = sp.coo_array((val[self.sparse_idx], self.sparse_idx), shape=self.shape)
518+
self._value = val[self.sparse_idx]
518519
elif self.sparse_idx is not None and sparse_path:
519520
self._value = val.data
520521
else:
@@ -531,7 +532,7 @@ def value(self) -> Optional[np.ndarray]:
531532
if self._value is None:
532533
return None
533534
val = np.zeros(self.shape, dtype=self._value.dtype)
534-
val[self.sparse_idx] = self._value.data
535+
val[self.sparse_idx] = self._value
535536
return val
536537

537538
@value.setter
@@ -546,10 +547,7 @@ def value_sparse(self) -> Optional[...]:
546547
"""The numeric value of the expression if it is a sparse variable."""
547548
if self._value is None:
548549
return None
549-
if isinstance(self._value, np.ndarray):
550-
return sp.coo_array((self._value, self.sparse_idx), shape=self.shape)
551-
else:
552-
return self._value
550+
return sp.coo_array((self._value, self.sparse_idx), shape=self.shape)
553551

554552
@value_sparse.setter
555553
def value_sparse(self, val) -> None:
@@ -719,6 +717,34 @@ def is_dpp(self, context: str = 'dcp') -> bool:
719717
def atoms(self) -> list[Atom]:
720718
return []
721719

720+
def attributes_were_lowered(self) -> bool:
721+
"""True iff this leaf was generated when lowering a leaf with attributes."""
722+
return self._leaf_of_provenance is not None
723+
724+
def set_leaf_of_provenance(self, leaf: Leaf) -> None:
725+
assert leaf.attributes
726+
self._leaf_of_provenance = leaf
727+
728+
def leaf_of_provenance(self) -> Leaf | None:
729+
"""Returns a leaf with attributes from which this leaf was generated."""
730+
return self._leaf_of_provenance
731+
732+
@property
733+
def _has_dim_reducing_attr(self) -> bool:
734+
return (self.sparse_idx is not None or self.attributes['diag'] or
735+
self.attributes['symmetric'] or self.attributes['PSD'] or
736+
self.attributes['NSD'])
737+
738+
@property
739+
def _reduced_size(self) -> int:
740+
if self.sparse_idx is not None:
741+
return len(self.sparse_idx[0])
742+
elif self.attributes['diag']:
743+
return self.shape[0]
744+
elif self.attributes['symmetric'] or self.attributes['PSD'] or self.attributes['NSD']:
745+
return self.shape[0] * (self.shape[0] + 1) // 2
746+
return self.size
747+
722748
def _validate_sparse_bound(self, val):
723749
"""Validate a single sparse bound entry.
724750

cvxpy/expressions/variable.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ def __init__(
4848
else:
4949
raise TypeError("Variable name %s must be a string." % name)
5050

51-
self._variable_with_attributes: Variable | None = None
5251
self._value = None
5352
self.delta = None
5453
self.gradient = None
@@ -118,17 +117,13 @@ def canonicalize(self) -> Tuple[Expression, list[Constraint]]:
118117
obj = lu.create_var(self.shape, self.id)
119118
return (obj, [])
120119

121-
def attributes_were_lowered(self) -> bool:
122-
"""True iff variable generated when lowering a variable with attributes."""
123-
return self._variable_with_attributes is not None
124-
125120
def set_variable_of_provenance(self, variable: Variable) -> None:
126-
assert variable.attributes
127-
self._variable_with_attributes = variable
121+
"""Deprecated: use set_leaf_of_provenance instead."""
122+
self.set_leaf_of_provenance(variable)
128123

129124
def variable_of_provenance(self) -> Optional[Variable]:
130-
"""Returns a variable with attributes from which this variable was generated."""
131-
return self._variable_with_attributes
125+
"""Deprecated: use leaf_of_provenance instead."""
126+
return self.leaf_of_provenance()
132127

133128
def __repr__(self) -> str:
134129
"""String to recreate the variable."""

cvxpy/problems/problem.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1325,13 +1325,17 @@ def backward(self) -> None:
13251325
# reductions that transform parameters (e.g., DGP log transform,
13261326
# Complex2Real split into real/imag).
13271327
for param in self.parameters():
1328-
# Start with the direct gradient if the param passed through unchanged
1329-
grad = 0.0 if param.id not in dparams else dparams[param.id]
1328+
grad = 0.0
1329+
handled = False
13301330
# Apply chain rule through any reductions that transformed this param
13311331
for reduction in self._cache.solving_chain.reductions:
13321332
reduction_grad = reduction.param_backward(param, dparams)
13331333
if reduction_grad is not None:
13341334
grad = grad + reduction_grad
1335+
handled = True
1336+
# Fall back to the direct gradient if no reduction transformed this param
1337+
if not handled and param.id in dparams:
1338+
grad = dparams[param.id]
13351339
param.gradient = grad
13361340

13371341
def derivative(self) -> None:
@@ -1401,14 +1405,16 @@ def derivative(self) -> None:
14011405
# Complex2Real split into real/imag).
14021406
for param in self.parameters():
14031407
delta = param.delta if param.delta is not None else np.zeros(param.shape)
1404-
# If param passed through unchanged, add its delta directly
1405-
if param.id in param_prog.param_id_to_col:
1406-
param_deltas[param.id] = np.asarray(delta, dtype=np.float64)
1408+
handled = False
14071409
# Apply chain rule through any reductions that transformed this param
14081410
for reduction in self._cache.solving_chain.reductions:
14091411
transformed_deltas = reduction.param_forward(param, delta)
14101412
if transformed_deltas is not None:
14111413
param_deltas.update(transformed_deltas)
1414+
handled = True
1415+
# If no reduction transformed this param, add its delta directly
1416+
if not handled and param.id in param_prog.param_id_to_col:
1417+
param_deltas[param.id] = np.asarray(delta, dtype=np.float64)
14121418
dc, _, dA, db = param_prog.apply_parameters(param_deltas,
14131419
zero_offset=True)
14141420
start = time.time()

cvxpy/reductions/cvx_attr2constr.py

Lines changed: 103 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from cvxpy.atoms.affine.upper_tri import upper_tri_to_full
2424
from cvxpy.expressions import cvxtypes
2525
from cvxpy.expressions.constants import Constant
26+
from cvxpy.expressions.constants.parameter import Parameter
2627
from cvxpy.expressions.variable import Variable
2728
from cvxpy.reductions.reduction import Reduction
2829
from cvxpy.reductions.solution import Solution
@@ -73,7 +74,7 @@ def attributes_present(variables, attr_map) -> list[str]:
7374
in variables)]
7475

7576

76-
def recover_value_for_variable(variable, lowered_value, project: bool = True):
77+
def recover_value_for_leaf(variable, lowered_value, project: bool = True):
7778
if variable.attributes['diag']:
7879
return sp.diags_array(lowered_value.flatten(order='F'))
7980
elif attributes_present([variable], SYMMETRIC_ATTRIBUTES):
@@ -93,21 +94,66 @@ def recover_value_for_variable(variable, lowered_value, project: bool = True):
9394
return lowered_value
9495

9596

96-
def lower_value(variable, value) -> np.ndarray:
97+
def lower_value(variable, value=None) -> np.ndarray:
98+
"""Extract the reduced representation of a leaf's value.
99+
100+
Args:
101+
variable: The leaf whose attributes determine the reduction.
102+
value: If provided, a full-size value (e.g. a differentiation delta)
103+
to reduce. If ``None``, reads the leaf's stored ``_value``.
104+
105+
Notes:
106+
Called without *value* by ``update_parameters`` and ``apply`` to read
107+
the current parameter value into the reduced parameter. Called *with*
108+
an explicit value by ``param_forward`` to reduce a full-size delta.
109+
110+
For sparse leaves ``Leaf.save_value`` already stores only the nonzero
111+
entries, so when ``value is None`` the sparse branch can return
112+
``_value`` directly. An explicit *value* is always full-size and must
113+
be extracted at the sparse indices.
114+
"""
115+
# Track whether the caller supplied a full-size value. When value is None
116+
# we read _value, which for sparse leaves is already in reduced form.
117+
full_size = value is not None
118+
if value is None:
119+
value = variable._value
97120
if attributes_present([variable], SYMMETRIC_ATTRIBUTES):
98121
return value[np.triu_indices(variable.shape[0])]
99122
elif variable.attributes['diag']:
100123
return np.diag(value)
124+
elif variable.attributes['sparsity']:
125+
if full_size:
126+
return np.asarray(value)[variable.sparse_idx]
127+
else:
128+
# _value already stores only the nonzero data (see Leaf.save_value).
129+
return np.asarray(value)
101130
else:
102131
return value
103132

104133

134+
def build_dim_reduced_expression(leaf, reduced_leaf):
135+
"""Build Expression that reconstructs full shape from a reduced-size leaf."""
136+
if attributes_present([leaf], SYMMETRIC_ATTRIBUTES):
137+
n = leaf.shape[0]
138+
return reshape(Constant(upper_tri_to_full(n)) @ reduced_leaf, (n, n), order='F')
139+
elif leaf.sparse_idx is not None:
140+
n = len(leaf.sparse_idx[0])
141+
row_idx = np.ravel_multi_index(leaf.sparse_idx, leaf.shape, order='F')
142+
coeff = Constant(sp.csc_array((np.ones(n), (row_idx, np.arange(n))),
143+
shape=(np.prod(leaf.shape, dtype=int), n)), name="sparse_coeff")
144+
return reshape(coeff @ reduced_leaf, leaf.shape, order='F')
145+
elif leaf.attributes['diag']:
146+
return diag(reduced_leaf)
147+
return reduced_leaf
148+
149+
105150
class CvxAttr2Constr(Reduction):
106151
"""Expand convex variable attributes into constraints."""
107152

108153
def __init__(self, problem=None, reduce_bounds: bool = False) -> None:
109154
"""If reduce_bounds, reduce lower and upper bounds on variables."""
110155
self.reduce_bounds = reduce_bounds
156+
self._parameters = {} # {orig_param: reduced_param}
111157
super(CvxAttr2Constr, self).__init__(problem=problem)
112158

113159
def reduction_attributes(self) -> List[str]:
@@ -123,7 +169,9 @@ def accepts(self, problem) -> bool:
123169
return True
124170

125171
def apply(self, problem):
126-
if not attributes_present(problem.variables(), CONVEX_ATTRIBUTES):
172+
has_var_attrs = attributes_present(problem.variables(), CONVEX_ATTRIBUTES)
173+
has_param_attrs = any(p._has_dim_reducing_attr for p in problem.parameters())
174+
if not has_var_attrs and not has_param_attrs:
127175
return problem, ()
128176

129177
# The attributes to be reduced.
@@ -144,31 +192,21 @@ def apply(self, problem):
144192
new_var = True
145193
new_attr[key] = None if key == 'bounds' else False
146194

147-
if attributes_present([var], SYMMETRIC_ATTRIBUTES):
148-
n = var.shape[0]
149-
shape = (n*(n+1)//2, 1)
150-
upper_tri = Variable(shape, var_id=var.id, **new_attr)
151-
upper_tri.set_variable_of_provenance(var)
152-
id2new_var[var.id] = upper_tri
153-
fill_coeff = Constant(upper_tri_to_full(n))
154-
full_mat = fill_coeff @ upper_tri
155-
obj = reshape(full_mat, (n, n), order='F')
156-
elif var.attributes['sparsity']:
157-
n = len(var.sparse_idx[0])
158-
159-
# Transform bounds for reduced variable if bounds are not being reduced
160-
if 'bounds' not in reduction_attributes and new_attr.get('bounds'):
195+
if var._has_dim_reducing_attr:
196+
n = var._reduced_size
197+
198+
# Transform bounds for sparse reduced variable
199+
if var.attributes['sparsity'] and \
200+
'bounds' not in reduction_attributes and new_attr.get('bounds'):
161201
bounds = new_attr['bounds']
162202
transformed_bounds = []
163203
for bound in bounds:
164204
if sp.issparse(bound):
165-
# Extract data from sparse bound
166-
# (already validated to match sparsity)
167205
coo = sp.coo_array(bound)
168206
coo.sum_duplicates()
169207
transformed_bounds.append(coo.data)
170-
elif np.isscalar(bound) or (hasattr(bound, 'ndim') and bound.ndim == 0):
171-
# Scalar bounds - keep as-is
208+
elif np.isscalar(bound) or (
209+
hasattr(bound, 'ndim') and bound.ndim == 0):
172210
transformed_bounds.append(bound)
173211
else:
174212
raise ValueError(
@@ -177,29 +215,13 @@ def apply(self, problem):
177215
)
178216
new_attr['bounds'] = transformed_bounds
179217

180-
sparse_var = Variable(n, var_id=var.id, **new_attr)
181-
if var.value_sparse is not None:
182-
sparse_var.value = var.value_sparse.data
183-
sparse_var.set_variable_of_provenance(var)
184-
id2new_var[var.id] = sparse_var
185-
row_idx = np.ravel_multi_index(var.sparse_idx, var.shape, order='F')
186-
col_idx = np.arange(n)
187-
coeff_matrix = Constant(sp.csc_array((np.ones(n), (row_idx, col_idx)),
188-
shape=(np.prod(var.shape, dtype=int), n)),
189-
name="sparse_coeff")
190-
obj = reshape(coeff_matrix @ sparse_var, var.shape, order='F')
191-
elif var.attributes['diag']:
192-
diag_var = Variable(var.shape[0], var_id=var.id, **new_attr)
193-
if var.value is not None and sp.issparse(var.value):
194-
diag_var.value = var.value.diagonal()
195-
elif var.value is not None:
196-
diag_var.value = np.diag(var.value)
197-
diag_var.set_variable_of_provenance(var)
198-
id2new_var[var.id] = diag_var
199-
obj = diag(diag_var)
218+
reduced_var = Variable(n, var_id=var.id, **new_attr)
219+
reduced_var.set_leaf_of_provenance(var)
220+
id2new_var[var.id] = reduced_var
221+
obj = build_dim_reduced_expression(var, reduced_var)
200222
elif new_var:
201223
obj = Variable(var.shape, var_id=var.id, **new_attr)
202-
obj.set_variable_of_provenance(var)
224+
obj.set_leaf_of_provenance(var)
203225
id2new_var[var.id] = obj
204226
else:
205227
obj = var
@@ -215,6 +237,23 @@ def apply(self, problem):
215237
if self.reduce_bounds:
216238
var._bound_domain(obj, constr)
217239

240+
# For each unique parameter with dim-reducing attributes, create a
241+
# reduced parameter and a reconstruction expression.
242+
for param in problem.parameters():
243+
if param._has_dim_reducing_attr and id(param) not in id2new_obj:
244+
n = param._reduced_size
245+
new_attr = param.attributes.copy()
246+
for key in reduction_attributes:
247+
if new_attr[key]:
248+
new_attr[key] = None if key == 'bounds' else False
249+
reduced_param = Parameter(n, id=param.id, **new_attr)
250+
reduced_param.set_leaf_of_provenance(param)
251+
self._parameters[param] = reduced_param
252+
if param.value is not None:
253+
reduced_param.value = lower_value(param)
254+
obj = build_dim_reduced_expression(param, reduced_param)
255+
id2new_obj[id(param)] = obj
256+
218257
# Create new problem.
219258
obj = problem.objective.tree_copy(id_objects=id2new_obj)
220259
cons_id_map = {}
@@ -224,6 +263,27 @@ def apply(self, problem):
224263
inverse_data = (id2new_var, id2old_var, cons_id_map)
225264
return cvxtypes.problem()(obj, constr), inverse_data
226265

266+
def update_parameters(self, problem) -> None:
267+
"""Update reduced parameter values from original parameters."""
268+
for param, reduced_param in self._parameters.items():
269+
reduced_param.value = lower_value(param)
270+
271+
def param_backward(self, param, dparams):
272+
"""Recover full-size gradient from reduced-size gradient."""
273+
if param not in self._parameters:
274+
return None
275+
reduced_param = self._parameters[param]
276+
if reduced_param.id not in dparams:
277+
return None
278+
return recover_value_for_leaf(param, dparams[reduced_param.id])
279+
280+
def param_forward(self, param, delta):
281+
"""Transform full-size delta to reduced-size delta."""
282+
if param not in self._parameters:
283+
return None
284+
reduced_param = self._parameters[param]
285+
return {reduced_param.id: lower_value(param, delta)}
286+
227287
def invert(self, solution, inverse_data) -> Solution:
228288
if not inverse_data:
229289
return solution
@@ -233,7 +293,7 @@ def invert(self, solution, inverse_data) -> Solution:
233293
for id, var in id2old_var.items():
234294
new_var = id2new_var[id]
235295
if new_var.id in solution.primal_vars:
236-
pvars[id] = recover_value_for_variable(
296+
pvars[id] = recover_value_for_leaf(
237297
var, solution.primal_vars[new_var.id])
238298

239299
dvars = {orig_id: solution.dual_vars[vid]

0 commit comments

Comments
 (0)