Hello ROOT community,
I am currently working on a project involving the analysis of a distribution, modeled as a combination of known background noise with an exponential decay and a potential signal modeled by a Gaussian function. To achieve a robust estimation of signal and background event counts, I am using the likelihood method. However, I have encountered challenges in implementing and fine-tuning the likelihood fit for optimal discrimination against background noise.
Specifically, I am struggling with the proper implementation of likelihood fitting in ROOT, and I would greatly appreciate any guidance, suggestions, or examples from the experienced members of this community. If you have insights into minimizing background noise and accurately determining signal parameters using likelihood techniques, I would be eager to learn from your expertise.
I made a code and it seems that is not working.
#include <iostream>
#include <TFile.h>
#include <TH1F.h>
#include <TF1.h>
#include <TGraph.h>
#include <TCanvas.h>
#include <TMath.h>
#include <TRandom3.h>
using namespace std;
int projet()
{
TF1 *fonctionPersonnalisee = new TF1("fonctionPersonnalisee", "exp(-x) + gaus(x)", 0, 10);
fonctionPersonnalisee->SetParameters(0.5, 3.0, 0.2); // Amplitude, moyenne, largeur
// Générer des données aléatoires basées sur la fonction personnalisée et les tracer
TH1F *histogramme1 = new TH1F("histogramme", "Distribution fonction personnalisée", 100, 0, 10);
histogramme1->FillRandom("fonctionPersonnalisee", 10000);
TCanvas *canvas1 = new TCanvas("canvas","Distribution fonction personnalisée", 800, 600);
histogramme1->Fit("exp(-x) + gaus(x)","L");
histogramme1->Draw();
canvas1->SaveAs("signal.root");
cout << "Appuyez sur Enter pour quitter...";
cin.get();
TFile *fichier = new TFile("signal.root");
TH1F *histogramme = (TH1F*)fichier->Get("histogramme");
// Définir la fonction de vraisemblance (likelihood)
TF1 *likelihood = new TF1("likelihood", "[0] * exp(-[1] * x) + [2] * TMath::Gaus(x, [3], [4])", 0, 6);
// Initialiser les paramètres de la fonction de vraisemblance
likelihood->SetParameters(8000, 1, 300, 3, 0.2);
// Scanner le likelihood en fonction de la fraction de signal
TGraph *graphLikelihood = new TGraph();
double fractionSignal, likelihoodValue;
for (int i = 0; i <= 100; ++i) {
fractionSignal = i / 100.0;
likelihood->SetParameter(2, fractionSignal);
// Ajuster la fonction de vraisemblance à l'histogramme
TFitResultPtr fitResult = histogramme1->Fit(likelihood, "SQ");
// Obtenir la valeur du log-likelihood
double minimumValue = fitResult->MinFcnValue();
likelihoodValue = -2 * (histogramme1->GetEntries() * (TMath::Log(histogramme1->GetEntries()) - minimumValue));
graphLikelihood->SetPoint(i, fractionSignal, likelihoodValue);
}
// Trouver la valeur maximale du likelihood
int indexMaxLikelihood = TMath::LocMax(graphLikelihood->GetN(), graphLikelihood->GetY());
double maxLikelihood = graphLikelihood->GetX()[indexMaxLikelihood];
double fractionSignalMaxLikelihood = graphLikelihood->GetYaxis()->GetBinCenter(indexMaxLikelihood);
// Ajuster avec une parabole pour obtenir la fraction de signal et son incertitude
TF1 *parabole = new TF1("parabole", "[0] + [1]*(x - [2])*(x - [2])", 0, 1);
parabole->SetParameters(maxLikelihood, -maxLikelihood, fractionSignalMaxLikelihood);
graphLikelihood->Fit(parabole, "Q");
// Afficher les résultats
std::cout << "Fraction de signal : " << parabole->GetParameter(2) << std::endl;
std::cout << "Incertitude sur la fraction de signal : " << 1.0 / TMath::Sqrt(-2 * parabole->GetParameter(1)) << std::endl;
canvas1->SaveAs("signal.root");
// Dessiner le graphique du likelihood
TCanvas *canvas = new TCanvas("canvas2", "Scan du Likelihood", 800, 600);
graphLikelihood->Draw("AP");
// Attendre l'interaction de l'utilisateur avant de fermer la fenêtre
std::cout << "Appuyez sur Enter pour quitter...";
std::cin.get();
return 0;
Thank you in advance for your time and assistance.
Best regards,