Skip to content

Commit 25d65da

Browse files
[WIP] Left matmul refactor (#149)
* clarified 0 iteration termination * add tests * removed print statements * trigger CI --------- Co-authored-by: William Zijie Zhang <william@gridmatic.com>
1 parent da4ca72 commit 25d65da

File tree

5 files changed

+98
-11
lines changed

5 files changed

+98
-11
lines changed

cvxpy/reductions/solvers/nlp_solvers/ipopt_nlpif.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def solve_via_data(self, data, warm_start: bool, verbose: bool, solver_opts, sol
182182

183183
_, info = nlp.solve(data["x0"])
184184

185-
if oracles.iterations == 0:
185+
if oracles.iterations == 0 and info['status'] == s.OPTIMAL:
186186
print("Warning: IPOPT returned after 0 iterations. This may indicate that\n"
187187
"the initial point passed to Ipopt is a stationary point, and it is\n"
188188
"quite unlikely that the initial point is also a local minimizer. \n"

cvxpy/reductions/solvers/nlp_solvers/nlp_solver.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -418,7 +418,6 @@ def lagrangian_gradient(x_eval):
418418

419419
def check_objective_value(self, x=None):
420420
""" Compare objective value from C implementation with Python implementation. """
421-
print("Checking objective value...")
422421
if x is None:
423422
x = self.x0
424423

@@ -443,7 +442,6 @@ def check_gradient(self, x=None, epsilon=1e-8):
443442
""" Compare C-based gradient with numerical approximation using finite differences. """
444443
if x is None:
445444
x = self.x0
446-
print("Checking gradient...")
447445
# Get gradient from C implementation
448446
self.c_problem.objective_forward(x)
449447
c_grad = self.c_problem.gradient()
@@ -474,10 +472,8 @@ def objective_func(x_eval):
474472
def run(self, x=None):
475473
""" Run all derivative checks (constraints, Jacobian, and Hessian). """
476474

477-
print("initializing derivatives for derivative checking...")
478475
self.c_problem.init_jacobian()
479476
self.c_problem.init_hessian()
480-
print("done initializing derivatives.")
481477
objective_result = self.check_objective_value(x)
482478
gradient_result = self.check_gradient(x)
483479
constraints_result = self.check_constraint_values()

cvxpy/tests/nlp_tests/stress_tests_diff_engine/test_affine_matrix_atoms.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,3 +147,27 @@ def test_three_diag_vec(self):
147147
checker = DerivativeChecker(prob)
148148
checker.run_and_assert()
149149

150+
def test_one_left_matmul(self):
151+
np.random.seed(0)
152+
Y = cp.Variable((15, 5), bounds=[0.5, 1])
153+
X = cp.Variable((15, 5), bounds=[0.5, 1])
154+
A = np.random.rand(5, 15)
155+
obj = cp.Minimize(cp.Trace(A @ (cp.log(Y) - 3 * cp.log(X))))
156+
constr =[]
157+
prob = cp.Problem(obj, constr)
158+
prob.solve(solver=cp.IPOPT, nlp=True, verbose=False)
159+
checker = DerivativeChecker(prob)
160+
checker.run_and_assert()
161+
162+
def test_two_left_matmul(self):
163+
np.random.seed(0)
164+
Y = cp.Variable((15, 5), bounds=[0.5, 1])
165+
X = cp.Variable((15, 5), bounds=[0.5, 1])
166+
A = np.random.rand(5, 15)
167+
obj = cp.Minimize(cp.Trace(A @ (cp.log(Y) - 3 * cp.log(X))))
168+
constr = [A @ Y <= 2 * A @ X]
169+
prob = cp.Problem(obj, constr)
170+
prob.solve(solver=cp.IPOPT, nlp=True, verbose=False)
171+
checker = DerivativeChecker(prob)
172+
checker.run_and_assert()
173+

cvxpy/tests/nlp_tests/stress_tests_diff_engine/test_quad_form.py

Lines changed: 50 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,50 @@
1010
class TestQuadFormDifferentFormats:
1111

1212
def test_quad_form_dense_sparse_sparse(self):
13+
# Generate a random non-trivial quadratic program.
14+
m = 15
15+
n = 10
16+
p = 5
17+
np.random.seed(1)
18+
P = np.random.randn(n, n)
19+
P = P.T @ P
20+
q = np.random.randn(n)
21+
G = np.random.randn(m, n)
22+
h = G @ np.random.randn(n, 1)
23+
A = np.random.randn(p, n)
24+
b = np.random.randn(p, 1)
25+
x = cp.Variable((n, 1))
26+
27+
constraints = [G @ x <= h,
28+
A @ x == b]
29+
30+
# dense problem
31+
x.value = None
32+
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P) + q.T @ x),
33+
constraints)
34+
prob.solve(nlp=True, verbose=False)
35+
dense_val = x.value
36+
37+
# CSR problem
38+
x.value = None
39+
P_csr = sp.csr_matrix(P)
40+
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csr) + q.T @ x),
41+
constraints)
42+
prob.solve(nlp=True, verbose=False)
43+
csr_val = x.value
44+
45+
# CSC problem
46+
x.value = None
47+
P_csc = sp.csc_matrix(P)
48+
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csc) + q.T @ x),
49+
constraints)
50+
prob.solve(nlp=True, verbose=False)
51+
csc_val = x.value
52+
53+
assert np.allclose(dense_val, csr_val)
54+
assert np.allclose(dense_val, csc_val)
55+
56+
def test_quad_form_dense_sparse_sparse_different_x(self):
1357
# Generate a random non-trivial quadratic program.
1458
m = 15
1559
n = 10
@@ -22,27 +66,28 @@ def test_quad_form_dense_sparse_sparse(self):
2266
h = G @ np.random.randn(n)
2367
A = np.random.randn(p, n)
2468
b = np.random.randn(p)
69+
x = cp.Variable(n)
2570

26-
constraints = [G @ cp.Variable(n) <= h,
27-
A @ cp.Variable(n) == b]
71+
constraints = [G @ x <= h,
72+
A @ x == b]
2873

2974
# dense problem
30-
x = cp.Variable(n)
75+
x.value = None
3176
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P) + q.T @ x),
3277
constraints)
3378
prob.solve(nlp=True, verbose=False)
3479
dense_val = x.value
3580

3681
# CSR problem
37-
x = cp.Variable(n)
82+
x.value = None
3883
P_csr = sp.csr_matrix(P)
3984
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csr) + q.T @ x),
4085
constraints)
4186
prob.solve(nlp=True, verbose=False)
4287
csr_val = x.value
4388

4489
# CSC problem
45-
x = cp.Variable(n)
90+
x.value = None
4691
P_csc = sp.csc_matrix(P)
4792
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csc) + q.T @ x),
4893
constraints)

cvxpy/tests/nlp_tests/test_broadcast.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,4 +54,26 @@ def test_column_broadcast(self):
5454
assert(np.allclose(x.value.flatten(), np.mean(A, axis=1)))
5555

5656
checker = DerivativeChecker(problem)
57-
checker.run_and_assert()
57+
checker.run_and_assert()
58+
59+
def test_subtle_broadcast1(self):
60+
n = 5
61+
x = cp.Variable((n, 1))
62+
b = np.ones(n)
63+
constraints = [cp.log(x) == b]
64+
x.value = np.random.rand(n, 1) + 0.1
65+
66+
prob = cp.Problem(cp.Minimize(0), constraints)
67+
checker = DerivativeChecker(prob)
68+
checker.run_and_assert()
69+
70+
def test_subtle_broadcast2(self):
71+
n = 5
72+
x = cp.Variable((n, 1))
73+
b = np.ones((1, n))
74+
constraints = [cp.log(x) == b]
75+
x.value = np.random.rand(n, 1) + 0.1
76+
77+
prob = cp.Problem(cp.Minimize(0), constraints)
78+
checker = DerivativeChecker(prob)
79+
checker.run_and_assert()

0 commit comments

Comments
 (0)