1212from bayes_opt .exception import NotUniqueError
1313from bayes_opt .logger import ScreenLogger
1414from bayes_opt .target_space import TargetSpace
15+ from scipy .optimize import NonlinearConstraint
1516
1617
1718def target_func (** kwargs ):
@@ -350,24 +351,25 @@ def test_save_load_state(tmp_path):
350351 state_path = tmp_path / "optimizer_state.json"
351352 optimizer .save_state (state_path )
352353
353- # Load state into new optimizer
354+ # Create new optimizer and load state
354355 new_optimizer = BayesianOptimization (
355356 f = target_func ,
356357 pbounds = PBOUNDS ,
357358 random_state = 1 ,
358- verbose = 0 ,
359- load_state_path = state_path
359+ verbose = 0
360360 )
361+ new_optimizer .load_state (state_path )
361362
362363 # Test that key properties match
363364 assert len (optimizer .space ) == len (new_optimizer .space )
364365 assert optimizer .max ["target" ] == new_optimizer .max ["target" ]
365366 assert optimizer .max ["params" ] == new_optimizer .max ["params" ]
366367 np .testing .assert_array_equal (optimizer .space .params , new_optimizer .space .params )
367368 np .testing .assert_array_equal (optimizer .space .target , new_optimizer .space .target )
368-
369- def test_save_load_w_string_params (tmp_path ):
370- """Test saving and loading optimizer state with string parameters."""
369+
370+
371+ def test_save_load_w_categorical_params (tmp_path ):
372+ """Test saving and loading optimizer state with categorical parameters."""
371373 def str_target_func (param1 : str , param2 : str ) -> float :
372374 # Simple function that maps strings to numbers
373375 value_map = {
@@ -398,9 +400,9 @@ def str_target_func(param1: str, param2: str) -> float:
398400 f = str_target_func ,
399401 pbounds = str_pbounds ,
400402 random_state = 1 ,
401- verbose = 0 ,
402- load_state_path = state_path
403+ verbose = 0
403404 )
405+ new_optimizer .load_state (state_path )
404406
405407 assert len (optimizer .space ) == len (new_optimizer .space )
406408 assert optimizer .max ["target" ] == new_optimizer .max ["target" ]
@@ -415,24 +417,28 @@ def str_target_func(param1: str, param2: str) -> float:
415417
416418def test_probe_point_returns_same_point (tmp_path ):
417419 """Check that probe returns same point after save/load."""
418- # Initialize optimizer
419420 optimizer = BayesianOptimization (
420421 f = target_func ,
421422 pbounds = PBOUNDS ,
422423 random_state = 1 ,
423424 verbose = 0
424425 )
425426
427+ optimizer .register (
428+ params = {"p1" : 5.0 , "p2" : 5.0 },
429+ target = 10.0
430+ )
431+
426432 state_path = tmp_path / "optimizer_state.json"
427433 optimizer .save_state (state_path )
428434
429435 new_optimizer = BayesianOptimization (
430436 f = target_func ,
431437 pbounds = PBOUNDS ,
432438 random_state = 1 ,
433- verbose = 0 ,
434- load_state_path = state_path
435- )
439+ verbose = 0
440+ )
441+ new_optimizer . load_state ( state_path )
436442
437443 # Both optimizers should probe the same point
438444 point = {"p1" : 1.5 , "p2" : 0.5 }
@@ -441,6 +447,33 @@ def test_probe_point_returns_same_point(tmp_path):
441447 assert probe1 == probe2
442448
443449
450+ def test_suggest_point_returns_same_point (tmp_path ):
451+ """Check that suggest returns same point after save/load."""
452+ optimizer = BayesianOptimization (
453+ f = target_func ,
454+ pbounds = PBOUNDS ,
455+ random_state = 1 ,
456+ verbose = 0
457+ )
458+ optimizer .maximize (init_points = 2 , n_iter = 3 )
459+
460+ state_path = tmp_path / "optimizer_state.json"
461+ optimizer .save_state (state_path )
462+
463+ new_optimizer = BayesianOptimization (
464+ f = target_func ,
465+ pbounds = PBOUNDS ,
466+ random_state = 1 ,
467+ verbose = 0
468+ )
469+ new_optimizer .load_state (state_path )
470+
471+ # Both optimizers should suggest the same point
472+ suggestion1 = optimizer .suggest ()
473+ suggestion2 = new_optimizer .suggest ()
474+ assert suggestion1 == suggestion2
475+
476+
444477def test_save_load_random_state (tmp_path ):
445478 """Test that random state is properly preserved."""
446479 # Initialize optimizer
@@ -460,11 +493,84 @@ def test_save_load_random_state(tmp_path):
460493 f = target_func ,
461494 pbounds = PBOUNDS ,
462495 random_state = 1 ,
463- verbose = 0 ,
464- load_state_path = state_path
496+ verbose = 0
465497 )
466498
467499 # Both optimizers should suggest the same point
468500 suggestion1 = optimizer .suggest ()
469501 suggestion2 = new_optimizer .suggest ()
470502 assert suggestion1 == suggestion2
503+
504+
505+ def test_save_load_w_constraint (tmp_path ):
506+ """Test saving and loading optimizer state with constraints."""
507+ def constraint_func (x : float , y : float ) -> float :
508+ return x + y # Simple constraint: sum of parameters should be within bounds
509+
510+ constraint = NonlinearConstraint (
511+ fun = constraint_func ,
512+ lb = 0.0 ,
513+ ub = 3.0
514+ )
515+
516+ # Initialize optimizer with constraint
517+ optimizer = BayesianOptimization (
518+ f = target_func ,
519+ pbounds = {"x" : (- 1 , 3 ), "y" : (0 , 5 )},
520+ constraint = constraint ,
521+ random_state = 1 ,
522+ verbose = 0
523+ )
524+
525+ # Register some points, some that satisfy constraint and some that don't
526+ optimizer .register (
527+ params = {"x" : 1.0 , "y" : 1.0 }, # Satisfies constraint: sum = 2.0
528+ target = 2.0 ,
529+ constraint_value = 2.0
530+ )
531+ optimizer .register (
532+ params = {"x" : 2.0 , "y" : 2.0 }, # Violates constraint: sum = 4.0
533+ target = 4.0 ,
534+ constraint_value = 4.0
535+ )
536+ optimizer .register (
537+ params = {"x" : 0.5 , "y" : 0.5 }, # Satisfies constraint: sum = 1.0
538+ target = 1.0 ,
539+ constraint_value = 1.0
540+ )
541+
542+ state_path = tmp_path / "optimizer_state.json"
543+ optimizer .save_state (state_path )
544+
545+ new_optimizer = BayesianOptimization (
546+ f = target_func ,
547+ pbounds = {"x" : (- 1 , 3 ), "y" : (0 , 5 )},
548+ constraint = constraint ,
549+ random_state = 1 ,
550+ verbose = 0
551+ )
552+ new_optimizer .load_state (state_path )
553+
554+ # Test that key properties match
555+ assert len (optimizer .space ) == len (new_optimizer .space )
556+ assert optimizer .max ["target" ] == new_optimizer .max ["target" ]
557+ assert optimizer .max ["params" ] == new_optimizer .max ["params" ]
558+ np .testing .assert_array_equal (optimizer .space .params , new_optimizer .space .params )
559+ np .testing .assert_array_equal (optimizer .space .target , new_optimizer .space .target )
560+
561+ # Test that constraint values were properly saved and loaded
562+ np .testing .assert_array_equal (
563+ optimizer .space ._constraint_values ,
564+ new_optimizer .space ._constraint_values
565+ )
566+
567+ # Test that both optimizers suggest the same point (should respect constraints)
568+ suggestion1 = optimizer .suggest ()
569+ suggestion2 = new_optimizer .suggest ()
570+ assert suggestion1 == suggestion2
571+
572+ # Verify that suggested point satisfies constraint
573+ constraint_value = constraint_func (** suggestion1 )
574+ assert 0.0 <= constraint_value <= 3.0 , "Suggested point violates constraint"
575+
576+
0 commit comments