-
Notifications
You must be signed in to change notification settings - Fork 54
BUG: optimize.minimize: COBYLA callback not called at final iteration #273
Copy link
Copy link
Open
Description
As reported in the scipy repo, when using the COBYLA method in scipy.optimize.minimize, the user-provided callback function is not called with the final optimizer state, unlike all other methods. This inconsistency means users cannot rely on the callback to capture the final result, which may lead to unexpected behavior in iterative logging or post-optimization processing.
Here's a MRE using scipy
from scipy.optimize import minimize
import numpy as np
last_callback_value = [None]
def callback(xk, state=None):
last_callback_value[0] = xk.copy()
print(f"Callback called with xk: {xk}")
# Define a simple quadratic objective, its gradient, and Hessian
def objective(x):
return x[0]**2 + x[1]**2
def jacobian(x):
return np.array([2*x[0], 2*x[1]])
def hessian(x):
return np.array([[2, 0], [0, 2]])
# List of methods to test
optimizers = [
"Nelder-Mead",
"Powell",
"CG",
"BFGS",
"Newton-CG",
"L-BFGS-B",
"TNC",
"COBYLA",
"SLSQP",
"trust-constr",
"dogleg",
"trust-ncg",
"trust-exact",
"trust-krylov",
]
# Test each optimizer
for optimizer in optimizers:
print(f"\nTesting method: {optimizer}")
last_callback_value[0] = None
options = {
'maxiter': 4,
'disp': False,
}
kwargs = {
""
"fun": objective,
"x0": [2, 2],
"method": optimizer,
"callback": callback,
"options": options,
}
try:
if optimizer in ["Newton-CG", "L-BFGS-B", "TNC", "SLSQP", "dogleg", "trust-ncg", "trust-exact", "trust-krylov"]:
kwargs["jac"] = jacobian
if optimizer in ["L-BFGS-B", "TNC", "SLSQP"]:
kwargs["bounds"] = [(-5, 5), (-5, 5)]
if optimizer in ["dogleg", "trust-ncg", "trust-exact", "trust-krylov"]:
kwargs["hess"] = hessian
if optimizer in ["TNC"]:
options["maxfun"] = options.pop("maxiter")
if optimizer in ["L-BFGS-B"]:
options.pop("disp")
result = minimize(**kwargs)
print(f"Final result: {result.x}")
if last_callback_value[0] is not None:
print(f"Last callback value: {last_callback_value[0]}")
print(f"Are they equal? {np.allclose(last_callback_value[0], result.x)}")
else:
print("No callback value recorded.")
except Exception as e:
print(f"Error: {e}")
and one using prima directly:
from scipy._lib.pyprima import minimize
print(f"\nTesting method: {optimizer}")
last_callback_value = [None]
def callback(xk, *args):
last_callback_value[0] = xk.copy()
print(f"Callback called with xk: {xk}")
options = {
"maxfun": 4
}
kwargs = {
"fun": objective,
"x0": [2, 2],
"callback": callback,
"options": options,
}
try:
result = minimize(**kwargs)
print(f"Final result: {result.x}")
if last_callback_value[0] is not None:
print(f"Last callback value: {last_callback_value[0]}")
print(f"Are they equal? {np.allclose(last_callback_value[0], result.x)}")
else:
print("No callback value recorded.")
except Exception as e:
print(f"Error: {e}")
Reactions are currently unavailable
Metadata
Metadata
Assignees
Labels
No labels