-
Notifications
You must be signed in to change notification settings - Fork 358
Open
Labels
questionFurther information is requestedFurther information is requested
Description
Question
When optimising a really simple toy problem I get a number of messages like:
Optimization failed in `gen_candidates_scipy` with the following warning(s):
[NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal'), OptimizationWarning('Optimization failed within `scipy.optimize.minimize` with status 2 and message ABNORMAL: .'), NumericalWarning('A not p.d., added jitter of 1.0e-08 to the diagonal')]
### Please provide any relevant code snippet if applicable.
```shell
from ax import Client, RangeParameterConfig
# -----------------------------
# Objective function to MINIMIZE
# -----------------------------
def f(x: float) -> float:
return x + 1.0 / x
# -----------------------------
# Helper: increase acquisition optimizer restarts (BoTorch step)
# -----------------------------
def set_acq_optimizer_kwargs(
client: Client,
num_restarts: int = 64,
raw_samples: int = 4096,
maxiter: int = 500,
) -> None:
gs = client._generation_strategy # internal API
# Get list of nodes (varies across Ax versions)
nodes = getattr(gs, "nodes", None)
if nodes is None:
nodes = getattr(gs, "_nodes", None)
if nodes is None:
raise AttributeError("Could not find GenerationStrategy nodes (no .nodes or ._nodes).")
# Find the MBM node (BoTorch Bayesian optimization step)
mbm_node = None
for n in nodes:
if getattr(n, "name", None) == "MBM":
mbm_node = n
break
if mbm_node is None:
raise ValueError("Could not find MBM node in generation strategy nodes.")
# Get the first GeneratorSpec on MBM
specs = getattr(mbm_node, "generator_specs", None)
if specs is None:
# very old naming fallback
specs = getattr(mbm_node, "model_specs", None)
if not specs:
raise AttributeError("Could not find generator specs on MBM node.")
spec = specs[0]
# Ensure generator_gen_kwargs dict exists
gkwargs = getattr(spec, "generator_gen_kwargs", None)
if gkwargs is None:
# fallback for older naming (if present)
gkwargs = getattr(spec, "model_gen_kwargs", None)
if gkwargs is None:
# create the modern attribute
spec.generator_gen_kwargs = {}
gkwargs = spec.generator_gen_kwargs
# Set optimizer kwargs (BoTorch's optimize_acqf uses these)
gkwargs.setdefault("model_gen_options", {})
gkwargs["model_gen_options"]["optimizer_kwargs"] = {
"num_restarts": num_restarts,
"raw_samples": raw_samples,
"options": {"maxiter": maxiter},
}
# Write back if we were using a fallback attribute reference
if hasattr(spec, "generator_gen_kwargs"):
spec.generator_gen_kwargs = gkwargs
elif hasattr(spec, "model_gen_kwargs"):
spec.model_gen_kwargs = gkwargs
# -----------------------------
# Main script
# -----------------------------
if __name__ == "__main__":
client = Client(random_seed=123)
# Avoid x=0 exactly (division by zero). This is "effectively" [0,4] for testing.
eps = 1e-6
client.configure_experiment(
parameters=[
RangeParameterConfig(
name="x",
bounds=(eps, 4.0),
parameter_type="float",
)
],
)
# Ax maximizes objectives by default; minimize f by maximizing -f
client.configure_optimization(objective="-f")
# Sobol init then BoTorch MBM
client.configure_generation_strategy(method="fast", initialization_budget=5)
# Increase acquisition optimization effort (restarts / seeds / iterations)
set_acq_optimizer_kwargs(client, num_restarts=64, raw_samples=4096, maxiter=500)
# Run trials
total_trials = 25
for _ in range(total_trials):
trial_index, params = next(iter(client.get_next_trials(max_trials=1).items()))
x = float(params["x"])
y = f(x)
client.complete_trial(trial_index=trial_index, raw_data={"f": y})
best_params, best_vals, best_trial, best_arm = client.get_best_parameterization()
best_x = float(best_params["x"])
print("Best x:", best_x)
print("f(best_x):", f(best_x))
print("Best vals (objective is -f):", best_vals)
print("Best trial:", best_trial, "arm:", best_arm)
Code of Conduct
- I agree to follow this Ax's Code of Conduct
Metadata
Metadata
Assignees
Labels
questionFurther information is requestedFurther information is requested