Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cvxpy/reductions/solvers/nlp_solvers/ipopt_nlpif.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def solve_via_data(self, data, warm_start: bool, verbose: bool, solver_opts, sol

_, info = nlp.solve(data["x0"])

if oracles.iterations == 0:
if oracles.iterations == 0 and info['status'] == s.OPTIMAL:
print("Warning: IPOPT returned after 0 iterations. This may indicate that\n"
"the initial point passed to Ipopt is a stationary point, and it is\n"
"quite unlikely that the initial point is also a local minimizer. \n"
Expand Down
4 changes: 0 additions & 4 deletions cvxpy/reductions/solvers/nlp_solvers/nlp_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,6 @@ def lagrangian_gradient(x_eval):

def check_objective_value(self, x=None):
""" Compare objective value from C implementation with Python implementation. """
print("Checking objective value...")
if x is None:
x = self.x0

Expand All @@ -443,7 +442,6 @@ def check_gradient(self, x=None, epsilon=1e-8):
""" Compare C-based gradient with numerical approximation using finite differences. """
if x is None:
x = self.x0
print("Checking gradient...")
# Get gradient from C implementation
self.c_problem.objective_forward(x)
c_grad = self.c_problem.gradient()
Expand Down Expand Up @@ -474,10 +472,8 @@ def objective_func(x_eval):
def run(self, x=None):
""" Run all derivative checks (constraints, Jacobian, and Hessian). """

print("initializing derivatives for derivative checking...")
self.c_problem.init_jacobian()
self.c_problem.init_hessian()
print("done initializing derivatives.")
objective_result = self.check_objective_value(x)
gradient_result = self.check_gradient(x)
constraints_result = self.check_constraint_values()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,3 +147,27 @@ def test_three_diag_vec(self):
checker = DerivativeChecker(prob)
checker.run_and_assert()

def test_one_left_matmul(self):
np.random.seed(0)
Y = cp.Variable((15, 5), bounds=[0.5, 1])
X = cp.Variable((15, 5), bounds=[0.5, 1])
A = np.random.rand(5, 15)
obj = cp.Minimize(cp.Trace(A @ (cp.log(Y) - 3 * cp.log(X))))
constr =[]
prob = cp.Problem(obj, constr)
prob.solve(solver=cp.IPOPT, nlp=True, verbose=False)
checker = DerivativeChecker(prob)
checker.run_and_assert()

def test_two_left_matmul(self):
np.random.seed(0)
Y = cp.Variable((15, 5), bounds=[0.5, 1])
X = cp.Variable((15, 5), bounds=[0.5, 1])
A = np.random.rand(5, 15)
obj = cp.Minimize(cp.Trace(A @ (cp.log(Y) - 3 * cp.log(X))))
constr = [A @ Y <= 2 * A @ X]
prob = cp.Problem(obj, constr)
prob.solve(solver=cp.IPOPT, nlp=True, verbose=False)
checker = DerivativeChecker(prob)
checker.run_and_assert()

55 changes: 50 additions & 5 deletions cvxpy/tests/nlp_tests/stress_tests_diff_engine/test_quad_form.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,50 @@
class TestQuadFormDifferentFormats:

def test_quad_form_dense_sparse_sparse(self):
# Generate a random non-trivial quadratic program.
m = 15
n = 10
p = 5
np.random.seed(1)
P = np.random.randn(n, n)
P = P.T @ P
q = np.random.randn(n)
G = np.random.randn(m, n)
h = G @ np.random.randn(n, 1)
A = np.random.randn(p, n)
b = np.random.randn(p, 1)
x = cp.Variable((n, 1))

constraints = [G @ x <= h,
A @ x == b]

# dense problem
x.value = None
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P) + q.T @ x),
constraints)
prob.solve(nlp=True, verbose=False)
dense_val = x.value

# CSR problem
x.value = None
P_csr = sp.csr_matrix(P)
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csr) + q.T @ x),
constraints)
prob.solve(nlp=True, verbose=False)
csr_val = x.value

# CSC problem
x.value = None
P_csc = sp.csc_matrix(P)
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csc) + q.T @ x),
constraints)
prob.solve(nlp=True, verbose=False)
csc_val = x.value

assert np.allclose(dense_val, csr_val)
assert np.allclose(dense_val, csc_val)

def test_quad_form_dense_sparse_sparse_different_x(self):
# Generate a random non-trivial quadratic program.
m = 15
n = 10
Expand All @@ -22,27 +66,28 @@ def test_quad_form_dense_sparse_sparse(self):
h = G @ np.random.randn(n)
A = np.random.randn(p, n)
b = np.random.randn(p)
x = cp.Variable(n)

constraints = [G @ cp.Variable(n) <= h,
A @ cp.Variable(n) == b]
constraints = [G @ x <= h,
A @ x == b]

# dense problem
x = cp.Variable(n)
x.value = None
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P) + q.T @ x),
constraints)
prob.solve(nlp=True, verbose=False)
dense_val = x.value

# CSR problem
x = cp.Variable(n)
x.value = None
P_csr = sp.csr_matrix(P)
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csr) + q.T @ x),
constraints)
prob.solve(nlp=True, verbose=False)
csr_val = x.value

# CSC problem
x = cp.Variable(n)
x.value = None
P_csc = sp.csc_matrix(P)
prob = cp.Problem(cp.Minimize((1/2)*cp.quad_form(x, P_csc) + q.T @ x),
constraints)
Expand Down
24 changes: 23 additions & 1 deletion cvxpy/tests/nlp_tests/test_broadcast.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,26 @@ def test_column_broadcast(self):
assert(np.allclose(x.value.flatten(), np.mean(A, axis=1)))

checker = DerivativeChecker(problem)
checker.run_and_assert()
checker.run_and_assert()

def test_subtle_broadcast1(self):
n = 5
x = cp.Variable((n, 1))
b = np.ones(n)
constraints = [cp.log(x) == b]
x.value = np.random.rand(n, 1) + 0.1

prob = cp.Problem(cp.Minimize(0), constraints)
checker = DerivativeChecker(prob)
checker.run_and_assert()

def test_subtle_broadcast2(self):
n = 5
x = cp.Variable((n, 1))
b = np.ones((1, n))
constraints = [cp.log(x) == b]
x.value = np.random.rand(n, 1) + 0.1

prob = cp.Problem(cp.Minimize(0), constraints)
checker = DerivativeChecker(prob)
checker.run_and_assert()
Loading