/// \file /// \ingroup tutorial_fit /// \notebook -nodraw /// Example on how to do a profile scan from NumericalMinization scan from Rosenbrox example /// /// \macro_code /// /// \author Lorenzo Moneta #include "Math/Minimizer.h" #include "Math/Factory.h" #include "Math/Functor.h" #include "TRandom2.h" #include "TError.h" #include double RosenBrock(const double *xx ) { const Double_t x = xx[0]; const Double_t y = xx[1]; const Double_t tmp1 = y-x*x; const Double_t tmp2 = 1-x; return 100*tmp1*tmp1+tmp2*tmp2; } int ProfileScan(const char * minName = "Minuit2", const char *algoName = "" , int strategy = 0, int printLevel = 1) { // create minimizer giving a name and a name (optionally) for the specific // algorithm // possible choices are: // minName algoName // Minuit /Minuit2 Migrad, Simplex,Combined,Scan (default is Migrad) // Minuit2 Fumili2 // Fumili // GSLMultiMin ConjugateFR, ConjugatePR, BFGS, // BFGS2, SteepestDescent // GSLMultiFit // GSLSimAn // Genetic ROOT::Math::Minimizer* minim = ROOT::Math::Factory::CreateMinimizer(minName, algoName); // set tolerance , etc... minim->SetMaxFunctionCalls(1000000000); // for Minuit/Minuit2 //minim->SetMaxIterations(10000); // for GSL minim->SetTolerance(0.001); minim->SetPrintLevel(1); // create function wrapper for minimizer // a IMultiGenFunction type ROOT::Math::Functor f(&RosenBrock,2); double step[2] = {0.01,0.01}; // starting point double variable[2] = { -10, 10.}; minim->SetFunction(f); // Set the free variables to be minimized ! minim->SetVariable(0,"x",variable[0], step[0]); minim->SetVariable(1,"y",variable[1], step[1]); // do first the minimization ROOT::Math::MinimizerOptions mopt; mopt.SetMaxFunctionCalls(1000); mopt.SetPrintLevel(printLevel); mopt.SetStrategy(strategy); mopt.Print(); minim->SetOptions (mopt); minim->Minimize(); std::cout << " Status = " << minim->Status() << std::endl; const double *xs = minim->X(); std::cout << "Minimum: f(" << xs[0] << "," << xs[1] << "): " << minim->MinValue() << std::endl; // expected minim is 0 if ( minim->MinValue() < 1.E-4 && f(xs) < 1.E-4) std::cout << "Minimizer " << minName << " - " << algoName << " converged to the right minimum" << std::endl; else { std::cout << "Minimizer " << minName << " - " << algoName << " failed to converge !!!" << std::endl; Error("NumericalMinimization","fail to converge"); } double xMinimum = xs[0]; double funcMinimum = minim->MinValue(); double xError = minim->Errors()[0]; // now run a scan of profile likelihood. Fix parameter 0 and minimize //w.r.t to parameter 1 int npoints = 50; double xmin = -5; double xmax = 5; mopt.SetMaxFunctionCalls(1000); mopt.SetPrintLevel(printLevel-1); mopt.SetStrategy(0); mopt.Print(); minim->SetOptions (mopt); std::vector xp(npoints); std::vector fp(npoints); for (int i = 0; i < npoints; ++i) { double x0 = xmin + i*(xmax-xmin)/double(npoints); minim->SetVariableValue(0, x0); minim->FixVariable(0); minim->Minimize(); xp[i] = x0; fp[i] = minim->MinValue(); } auto gr = new TGraph(xp.size(), xp.data(), fp.data() ); gr->SetMarkerStyle(20); gr->SetTitle("Profile Scan; x; profile function value"); gr->Draw("AP"); // plot also parabola from Hessian auto f1 = new TF1("f1","[1]*(x-[0])^2-[2]"); f1->SetParameters( xMinimum, 1./(xError*xError), funcMinimum); f1->SetLineStyle(2); f1->SetTitle("Parabolic approximation"); f1->Draw("SAME"); f1->SetRange(xmin,xmax); gPad->BuildLegend(); return 0; }