1+ // @(#)root/math/scipy $Id$
2+ // Author: Omar Zapata 2023
3+
4+ #include < gtest/gtest.h>
5+
6+ #include " Math/ScipyMinimizer.h"
7+ #include " Math/Functor.h"
8+ #include " Math/Factory.h"
9+ #include " Math/MultiNumGradFunction.h"
10+ #include " Math/FitMethodFunction.h"
11+
12+ // target function
13+ double RosenBrock (const double *xx)
14+ {
15+ const Double_t x = xx[0 ];
16+ const Double_t y = xx[1 ];
17+ const Double_t tmp1 = y - x * x;
18+ const Double_t tmp2 = 1 - x;
19+ return 100 * tmp1 * tmp1 + tmp2 * tmp2;
20+ }
21+
22+ // gradient function(jacobian)
23+ double RosenBrockGrad (const double *x, unsigned int ipar)
24+ {
25+ if (ipar == 0 )
26+ return -2 * (1 - x[0 ]) + 200 * (x[1 ] - x[0 ] * x[0 ]) * (-2 * x[0 ]);
27+ else
28+ return 200 * (x[1 ] - x[0 ] * x[0 ]);
29+ }
30+ // Hessian function
31+ bool RosenBrockHessian (const std::vector<double > &xx, double *hess)
32+ {
33+ const double x = xx[0 ];
34+ const double y = xx[1 ];
35+
36+ hess[0 ] = 1200 * x * x - 400 * y + 2 ;
37+ hess[1 ] = -400 * x;
38+ hess[2 ] = -400 * x;
39+ hess[3 ] = 200 ;
40+
41+ return true ;
42+ }
43+
44+ // https://de.mathworks.com/help/optim/ug/example-nonlinear-constrained-minimization.html
45+ // constraint function
46+ double ConstRosenBrock (const std::vector<double > &x)
47+ {
48+ return x[0 ] * x[0 ] - x[1 ] * x[1 ] + 1 ;
49+ }
50+ // NOTE: "dogleg" is problematic, requires better tuning of the parameters
51+ std::string methods[] = {" Nelder-Mead" , " L-BFGS-B" , " Powell" , " CG" , " BFGS" ,
52+ " TNC" , " COBYLA" , " SLSQP" , " trust-constr" , " Newton-CG" ,
53+ " trust-ncg" , " trust-exact" , " trust-krylov" };
54+
55+ // Testing fit using class with gradient
56+ class ScipyFitClass : public ::testing::Test {
57+ public:
58+ ROOT::Math::Experimental::ScipyMinimizer *minimizer = nullptr ;
59+
60+ ScipyFitClass () = default ;
61+ void Fit (const char *method, bool useConstraint = false )
62+ {
63+ ROOT::Math::GradFunctor f (&RosenBrock, &RosenBrockGrad, 2 );
64+
65+ minimizer = new ROOT::Math::Experimental::ScipyMinimizer (method);
66+ minimizer->SetMaxFunctionCalls (1000000 );
67+ minimizer->SetMaxIterations (100000 );
68+ minimizer->SetTolerance (0.001 );
69+
70+ double step[2 ] = {0.01 , 0.01 };
71+ double variable[2 ] = {0.1 , 1.2 };
72+
73+ minimizer->SetFunction (f);
74+ minimizer->SetHessianFunction (RosenBrockHessian);
75+
76+ // Set the free variables to be minimized!
77+ minimizer->SetVariable (0 , " x" , variable[0 ], step[0 ]);
78+ minimizer->SetVariable (1 , " y" , variable[1 ], step[1 ]);
79+ if (useConstraint)
80+ minimizer->AddConstraintFunction (ConstRosenBrock, " ineq" );
81+
82+ minimizer->Minimize ();
83+ }
84+ };
85+
86+ TEST_F (ScipyFitClass, Fit)
87+ {
88+ for (const std::string &text : methods) {
89+ Fit (text.c_str (),false );
90+ EXPECT_EQ (1000000 , minimizer->MaxFunctionCalls ());
91+
92+ EXPECT_EQ (100000 , minimizer->MaxIterations ());
93+
94+ EXPECT_EQ (0.001 , minimizer->Tolerance ());
95+
96+ auto step = minimizer->StepSizes ();
97+ EXPECT_EQ (0.01 , step[0 ]);
98+ EXPECT_EQ (0.01 , step[1 ]);
99+
100+ auto v1name = minimizer->VariableName (0 );
101+ auto v2name = minimizer->VariableName (1 );
102+ EXPECT_EQ (" x" , v1name);
103+ EXPECT_EQ (" y" , v2name);
104+
105+ EXPECT_EQ (0 , minimizer->VariableIndex (" x" ));
106+ EXPECT_EQ (1 , minimizer->VariableIndex (" y" ));
107+
108+ auto opts = minimizer->Options ();
109+
110+ EXPECT_EQ (" Scipy" , opts.MinimizerType ());
111+ EXPECT_EQ (text, opts.MinimizerAlgorithm ());
112+
113+ auto x = minimizer->X ();
114+ ASSERT_NEAR (1 , x[0 ], 0.5 );
115+ ASSERT_NEAR (1 , x[1 ], 0.5 );
116+ ASSERT_NEAR (0 , RosenBrock (x), 0.5 );
117+ }
118+ }
119+
120+
121+ TEST_F (ScipyFitClass, FitContraint) // using constraint function
122+ {
123+ for (const std::string &text : methods) {
124+ Fit (text.c_str (), true );
125+ auto x = minimizer->X ();
126+ ASSERT_NEAR (1 , x[0 ], 0.5 );
127+ ASSERT_NEAR (1 , x[1 ], 0.5 );
128+ ASSERT_NEAR (0 , RosenBrock (x), 0.5 );
129+ }
130+ }
0 commit comments