@@ -532,7 +532,10 @@ def gen_candidates_torch(
532532 optimizer (Optimizer): The pytorch optimizer to use to perform
533533 candidate search.
534534 options: Options used to control the optimization. Includes
535- maxiter: Maximum number of iterations
535+
536+ - optimizer_options: Dict of additional options to pass to the optimizer
537+ (e.g. lr, weight_decay)
538+ - stopping_criterion_options: Dict of options for the stopping criterion.
536539 callback: A callback function accepting the current iteration, loss,
537540 and gradients as arguments. This function is executed after computing
538541 the loss and gradients, but before calling the optimizer.
@@ -571,7 +574,6 @@ def gen_candidates_torch(
571574 # the 1st order optimizers implemented in this method.
572575 # Here, it does not matter whether one combines multiple optimizations into
573576 # one or not.
574- options .pop ("max_optimization_problem_aggregation_size" , None )
575577 _clamp = partial (columnwise_clamp , lower = lower_bounds , upper = upper_bounds )
576578 clamped_candidates = _clamp (initial_conditions )
577579 if fixed_features :
@@ -580,11 +582,30 @@ def gen_candidates_torch(
580582 [i for i in range (clamped_candidates .shape [- 1 ]) if i not in fixed_features ],
581583 ]
582584 clamped_candidates = clamped_candidates .requires_grad_ (True )
583- _optimizer = optimizer (params = [clamped_candidates ], lr = options .get ("lr" , 0.025 ))
585+
586+ # Extract optimizer-specific options from the options dict
587+ optimizer_options = options .get ("optimizer_options" , {}).copy ()
588+ stopping_criterion_options = options .get ("stopping_criterion_options" , {}).copy ()
589+
590+ # Backward compatibility: if old 'maxiter' parameter is passed, move it to
591+ # stopping_criterion_options with a deprecation warning
592+ if "maxiter" in options :
593+ warnings .warn (
594+ "Passing 'maxiter' directly in options is deprecated. "
595+ "Please use options['stopping_criterion_options']['maxiter'] instead." ,
596+ DeprecationWarning ,
597+ stacklevel = 2 ,
598+ )
599+ # For backward compatibility, pass to stopping_criterion_options
600+ if "maxiter" not in stopping_criterion_options :
601+ stopping_criterion_options ["maxiter" ] = options ["maxiter" ]
602+
603+ optimizer_options .setdefault ("lr" , 0.025 )
604+ _optimizer = optimizer (params = [clamped_candidates ], ** optimizer_options )
584605
585606 i = 0
586607 stop = False
587- stopping_criterion = ExpMAStoppingCriterion (** options )
608+ stopping_criterion = ExpMAStoppingCriterion (** stopping_criterion_options )
588609 while not stop :
589610 i += 1
590611 with torch .no_grad ():
0 commit comments