void Chi2test_Example_toys() { // some configuration parameters // number of bins used for the data histogram and data range int nbin = 50; double xmin = 0; double xmax = 20; int n = 500; // size of data sample auto h0 = new TH1D("h0","Original data distribution",nbin,xmin,xmax); // generate data histogram // true function TF1 f1("f1","[A]*exp(-x/[tau])",xmin,xmax); double trueParams[] = {1,5}; f1.SetNpx(1000); f1.SetParameters(trueParams); for (int i = 0; i < n; ++i){ h0->Fill( f1.GetRandom() ); } // fit histogram using Likelihood f1.SetLineColor(kRed); f1.SetTitle("Likelihood Fit"); auto r0L = h0->Fit(&f1,"S L +"); // chi2 value (Baker-Cousins) double chi2DataBC = 2.* r0L->MinFcnValue(); // fit histogram using Neyman chi2 f1.SetLineColor(kBlack); f1.SetTitle("Neyman Fit"); auto r0N = h0->Fit(&f1,"S + "); double chi2DataN = r0N->Chi2(); std::cout << "Baker-Cousins value from Likelihood fit is " << chi2DataBC << std::endl; std::cout << "Neyman chi2 value value from least-square fit is " << chi2DataN << std::endl; // just plot the data h0->Draw(); auto legend = new TLegend(0.6,0.6,0.88,0.88); auto functions = h0->GetListOfFunctions(); // loop on the list of histogram functions and add them in the legend for ( auto * f : *functions) { if (f->IsA()==TF1::Class()) legend->AddEntry(f,f->GetTitle(),"L"); } legend->Draw(); gPad->Draw(); gPad->Update(); double * fit0Params = f1.GetParameters(); // generate toys int nexp = 1000; // number of toys (experiments) auto hist = new TH1D("hist","Data Sample distribution",nbin,xmin,xmax); auto hchi2N = new TH1D("hchi2N","Neyman chi-square distribution",50,0,2*nbin); auto hchi2L = new TH1D("hchi2L","Baker-Cousins LR distribution",50,0,3*nbin); double params0[] = {1,5}; hchi2N->Reset(); // reset in case we run a second time hchi2L->Reset(); std::vector chi2L; std::vector chi2N; for (int iexp = 0; iexp < nexp; ++iexp) { hist->Reset(); f1.SetParameters(trueParams); for (int i = 0; i < n; ++i){ //hist->Fill( h0->GetRandom() ); // use histogram is true function is not known hist->Fill( f1.GetRandom() ); } // set the initial function parameters f1.SetParameters(params0); // Neyman chisquare fit // use option Q to avoid too much printing and option 0 to avoid saving the fit function auto rN = hist->Fit(&f1,"S Q "); f1.SetParameters(params0); // reset parameters // Baker-Cousins log-likelihood fit auto rL = hist->Fit(&f1,"S Q L +"); // use option L // get results from fit results and save them in histograms if (rN == 0) { hchi2N->Fill( rN->Chi2 () ); chi2N.push_back(rN->Chi2()); } // to get LL value from fit result if (rL == 0) { hchi2L->Fill( 2.* rL->MinFcnValue() ); chi2L.push_back(2.* rL->MinFcnValue() ); } } auto canvas = new TCanvas("c","c",1000,1000); canvas->Divide(1,2); canvas->cd(1); hchi2L->Draw(); canvas->cd(2); hchi2N->Draw(); canvas->Draw(); std::sort(chi2N.begin(), chi2N.end()); std::sort(chi2L.begin(), chi2L.end()); // compute p-value from data value auto it1 = std::lower_bound(chi2L.begin(), chi2L.end(),chi2DataBC); double pvalueL = 1. - double(it1-chi2L.begin())/chi2L.size(); auto it2 = std::lower_bound(chi2N.begin(), chi2N.end(),chi2DataN); double pvalueN = 1. - double(it2-chi2N.begin())/chi2N.size(); std::cout << "computed p values are " << pvalueL << " " << pvalueN << std::endl; }