88)
99from confopt .utils .configurations .sampling import get_tuning_configurations
1010from confopt .selection .acquisition import QuantileConformalSearcher
11- from confopt .selection .sampling .bound_samplers import LowerBoundSampler
11+ from confopt .selection .sampling .thompson_samplers import ThompsonSampler
1212from confopt .wrapping import FloatRange , IntRange , CategoricalRange , ConformalBounds
1313from sklearn .base import BaseEstimator
1414from confopt .selection .estimator_configuration import (
@@ -136,12 +136,52 @@ def build_estimator_architectures(amended: bool = False):
136136) = build_estimator_architectures (amended = True )
137137
138138
139+ def simple_quadratic_minimization (x ):
140+ """Simple quadratic function for minimization testing.
141+
142+ Global minimum at x = [2, -1] with value 0.
143+ This creates a clear, smooth objective surface that conformal prediction
144+ can easily learn and exploit, unlike random search.
145+ """
146+ x = np .asarray (x )
147+ # Shifted quadratic with minimum at [2, -1]
148+ return (x [0 ] - 2 ) ** 2 + (x [1 ] + 1 ) ** 2
149+
150+
151+ def simple_quadratic_maximization (x ):
152+ """Simple negative quadratic function for maximization testing.
153+
154+ Global maximum at x = [1, 0.5] with value 0.
155+ This creates a clear, smooth objective surface that conformal prediction
156+ can easily learn and exploit, unlike random search.
157+ """
158+ x = np .asarray (x )
159+ # Negative shifted quadratic with maximum at [1, 0.5]
160+ return - ((x [0 ] - 1 ) ** 2 + (x [1 ] - 0.5 ) ** 2 )
161+
162+
139163def rastrigin (x , A = 20 ):
140164 n = len (x )
141165 rastrigin_value = A * n + np .sum (x ** 2 - A * np .cos (2 * np .pi * x ))
142166 return rastrigin_value
143167
144168
169+ def ackley (x , a = 20 , b = 0.2 , c = 2 * np .pi ):
170+ """Ackley function - commonly used maximization benchmark.
171+
172+ Global minimum is at x = [0, 0, ..., 0] with value 0.
173+ For maximization, we negate this so global maximum is 0 at origin.
174+ """
175+ x = np .asarray (x )
176+ n = len (x )
177+ sum1 = np .sum (x ** 2 )
178+ sum2 = np .sum (np .cos (c * x ))
179+ ackley_value = (
180+ - a * np .exp (- b * np .sqrt (sum1 / n )) - np .exp (sum2 / n ) + a + np .exp (1 )
181+ )
182+ return - ackley_value # Negate for maximization
183+
184+
145185class ObjectiveSurfaceGenerator :
146186 def __init__ (self , generator : str ):
147187 self .generator = generator
@@ -213,6 +253,32 @@ def dummy_parameter_grid():
213253 }
214254
215255
256+ @pytest .fixture
257+ def simple_minimization_parameter_grid ():
258+ """Parameter grid for simple quadratic minimization function.
259+
260+ Optimum is at x1=2, x2=-1. This grid covers the optimum with reasonable bounds
261+ that allow the conformal prediction algorithm to learn the pattern efficiently.
262+ """
263+ return {
264+ "x1" : FloatRange (min_value = - 2.0 , max_value = 6.0 ),
265+ "x2" : FloatRange (min_value = - 5.0 , max_value = 3.0 ),
266+ }
267+
268+
269+ @pytest .fixture
270+ def simple_maximization_parameter_grid ():
271+ """Parameter grid for simple quadratic maximization function.
272+
273+ Optimum is at x1=1, x2=0.5. This grid covers the optimum with reasonable bounds
274+ that allow the conformal prediction algorithm to learn the pattern efficiently.
275+ """
276+ return {
277+ "x1" : FloatRange (min_value = - 2.0 , max_value = 4.0 ),
278+ "x2" : FloatRange (min_value = - 2.5 , max_value = 3.5 ),
279+ }
280+
281+
216282@pytest .fixture
217283def rastrigin_parameter_grid ():
218284 """Parameter grid for 6-dimensional Rastrigin function optimization."""
@@ -226,6 +292,19 @@ def rastrigin_parameter_grid():
226292 }
227293
228294
295+ @pytest .fixture
296+ def ackley_parameter_grid ():
297+ """Parameter grid for 6-dimensional Ackley function optimization."""
298+ return {
299+ "x1" : FloatRange (min_value = - 32.768 , max_value = 32.768 ),
300+ "x2" : FloatRange (min_value = - 32.768 , max_value = 32.768 ),
301+ "x3" : FloatRange (min_value = - 32.768 , max_value = 32.768 ),
302+ "x4" : FloatRange (min_value = - 32.768 , max_value = 32.768 ),
303+ "x5" : FloatRange (min_value = - 32.768 , max_value = 32.768 ),
304+ "x6" : FloatRange (min_value = - 32.768 , max_value = 32.768 ),
305+ }
306+
307+
229308@pytest .fixture
230309def linear_data_drift ():
231310 np .random .seed (42 )
@@ -651,27 +730,27 @@ def conformal_bounds_deterministic():
651730
652731
653732@pytest .fixture
654- def comprehensive_tuning_setup (rastrigin_parameter_grid ):
655- """Fixture for comprehensive integration test setup (objective, warm starts, tuner, searcher)."""
733+ def comprehensive_minimizing_tuning_setup (simple_minimization_parameter_grid ):
734+ """Fixture for comprehensive integration test setup (objective, warm starts, tuner, searcher).
735+
736+ Uses a simple quadratic minimization function that's easy for conformal prediction to learn,
737+ ensuring the test validates that conformal search outperforms random search.
738+ """
656739
657740 def optimization_objective (configuration : Dict ) -> float :
658- # Extract 6 -dimensional vector from configuration
741+ # Extract 2 -dimensional vector from configuration
659742 x = np .array (
660743 [
661744 configuration ["x1" ],
662745 configuration ["x2" ],
663- configuration ["x3" ],
664- configuration ["x4" ],
665- configuration ["x5" ],
666- configuration ["x6" ],
667746 ]
668747 )
669748
670- # Use Rastrigin function for minimization
671- return rastrigin (x )
749+ # Use simple quadratic function for minimization (minimum at [2, -1])
750+ return simple_quadratic_minimization (x )
672751
673752 warm_start_configs_raw = get_tuning_configurations (
674- parameter_grid = rastrigin_parameter_grid ,
753+ parameter_grid = simple_minimization_parameter_grid ,
675754 n_configurations = 5 ,
676755 random_state = 123 ,
677756 sampling_method = "uniform" ,
@@ -684,20 +763,73 @@ def optimization_objective(configuration: Dict) -> float:
684763 def make_tuner_and_searcher (dynamic_sampling ):
685764 tuner = ConformalTuner (
686765 objective_function = optimization_objective ,
687- search_space = rastrigin_parameter_grid ,
766+ search_space = simple_minimization_parameter_grid ,
688767 minimize = True ,
689768 n_candidates = 1000 ,
690769 warm_starts = warm_start_configs ,
691770 dynamic_sampling = dynamic_sampling ,
692771 )
693772 searcher = QuantileConformalSearcher (
694773 quantile_estimator_architecture = "qgbm" ,
695- sampler = LowerBoundSampler (
696- interval_width = 0.8 ,
774+ sampler = ThompsonSampler (
775+ n_quantiles = 4 ,
776+ adapter = "DtACI" ,
777+ enable_optimistic_sampling = False ,
778+ ),
779+ n_pre_conformal_trials = 32 ,
780+ calibration_split_strategy = "train_test_split" ,
781+ )
782+ return tuner , searcher , warm_start_configs , optimization_objective
783+
784+ return make_tuner_and_searcher
785+
786+
787+ @pytest .fixture
788+ def comprehensive_maximizing_tuning_setup (simple_maximization_parameter_grid ):
789+ """Fixture for comprehensive integration test setup for maximization (objective, warm starts, tuner, searcher).
790+
791+ Uses a simple quadratic maximization function that's easy for conformal prediction to learn,
792+ ensuring the test validates that conformal search outperforms random search.
793+ """
794+
795+ def optimization_objective (configuration : Dict ) -> float :
796+ # Extract 2-dimensional vector from configuration
797+ x = np .array (
798+ [
799+ configuration ["x1" ],
800+ configuration ["x2" ],
801+ ]
802+ )
803+
804+ # Use simple quadratic function for maximization (maximum at [1, 0.5])
805+ return simple_quadratic_maximization (x )
806+
807+ warm_start_configs_raw = get_tuning_configurations (
808+ parameter_grid = simple_maximization_parameter_grid ,
809+ n_configurations = 5 ,
810+ random_state = 123 ,
811+ sampling_method = "uniform" ,
812+ )
813+ warm_start_configs = []
814+ for config in warm_start_configs_raw :
815+ performance = optimization_objective (config )
816+ warm_start_configs .append ((config , performance ))
817+
818+ def make_tuner_and_searcher (dynamic_sampling ):
819+ tuner = ConformalTuner (
820+ objective_function = optimization_objective ,
821+ search_space = simple_maximization_parameter_grid ,
822+ minimize = False , # Set to False for maximization
823+ n_candidates = 1000 ,
824+ warm_starts = warm_start_configs ,
825+ dynamic_sampling = dynamic_sampling ,
826+ )
827+ searcher = QuantileConformalSearcher (
828+ quantile_estimator_architecture = "qgbm" ,
829+ sampler = ThompsonSampler (
830+ n_quantiles = 4 ,
697831 adapter = "DtACI" ,
698- beta_decay = "logarithmic_decay" ,
699- c = 1.0 ,
700- beta_max = 10.0 ,
832+ enable_optimistic_sampling = False ,
701833 ),
702834 n_pre_conformal_trials = 32 ,
703835 calibration_split_strategy = "train_test_split" ,
0 commit comments