I am trying to use TTree::Project within pyROOT, and am having some issues.
My code is as follows:
import ROOT
numBins = 350
minE = 0.0
maxE = 3500.0
# Define the histograms
possible_bjets = ROOT.TH1D("possible_bjets", "Possible b-jets;Jet Energy (GeV);Number of jets", numBins, minE, maxE)
tagged_bjets = ROOT.TH1D("tagged_bjets", "Tagged b-jets;Jet Energy (GeV);Number of jets", numBins, minE, maxE)
possible_mistags = ROOT.TH1D("possible_mistags", "Possible Mistags;Jet Energy (GeV);Number of jets", numBins, minE, maxE)
mistagged_jets = ROOT.TH1D("mistagged_jets", "Mistagged Jets;Jet Energy (GeV);Number of jets", numBins, minE, maxE)
# Define the cuts
etaCut = ROOT.TCut("abs(jeteta) < 2.3")
decayCut = ROOT.TCut("BdecTrans > 25.7 && BdecTrans < 122.5")
bCut = ROOT.TCut("(abs(Bid) < 600 && abs(Bid) > 500) || (abs(Bid) < 6000 && abs(Bid) > 5000)")
notB = ROOT.TCut("!(%s)" % bCut)
hitCut = ROOT.TCut("max( (jethit2_cone1 - jethit1_cone1) / jethit1_cone1," \
" max( (jethit3_cone1 - jethit2_cone1) / jethit2_cone1," \
" (jethit4_cone1 - jethit3_cone1) / jethit3_cone1 ) ) >= 1");
evaluationFile = ROOT.TFile.Open("deep-learning-evaluation.root")
print "Number of possible b-jets: " + str(evaluationFile.Jets.GetEntries(str(etaCut + decayCut + bCut)))
print "Number of possible light-jets: " + str(evaluationFile.Jets.GetEntries(str(etaCut + notB)))
evaluationFile.Jets.Project("possible_bjets", "jetEnergy", str(etaCut + decayCut + bCut))
evaluationFile.Jets.Project("possible_mistags", "jetEnergy", str(etaCut + notB))
evaluationFile.Jets.Project("tagged_bjets", "jetEnergy", str(etaCut + decayCut + bCut + hitCut))
evaluationFile.Jets.Project("mistagged_jets", "jetEnergy", str(etaCut + notB + hitCut))
print "Number of entries for possible_bjets: " + str(possible_bjets.GetEffectiveEntries())
print "Number of entries for tagged_bjets: " + str(tagged_bjets.GetEffectiveEntries())
print "Number of entries for possible_mistags: " + str(possible_mistags.GetEffectiveEntries())
print "Number of entries for mistagged_jets: " + str(mistagged_jets.GetEffectiveEntries())
taggingEfficiency = ROOT.TEfficiency(tagged_bjets, possible_bjets)
mistaggingRate = ROOT.TEfficiency(mistagged_jets, possible_mistags)
canvas = ROOT.TCanvas("canvas", "Efficiency/Mistagging Canvas")
taggingEfficiency.SetLineColor(2)
taggingEfficiency.Draw("e1p")
mistaggingRate.SetLineColor(3)
mistaggingRate.Draw("SAME")
legend = ROOT.TLegend(0.7, 0.7, 0.9, 0.9)
legend.AddEntry(taggingEfficiency, "Efficiency of tagging #b-jets")
legend.AddEntry(mistaggingRate, "Mistagging Rate")
legend.Draw()
canvas.Show()
raw_input("Press enter to exit...")
However, my output goes something like:
Number of possible b-jets: 14223
Number of possible light-jets: 949104
Number of entries for possible_bjets: 0.0
Number of entries for tagged_bjets: 0.0
Number of entries for possible_mistags: 0.0
Number of entries for mistagged_jets: 0.0
Press enter to exit…
Which suggests to me that I am doing something wrong within TTree::Project, however, I am using it how I would use it within CINT or ACLiC and so I am not sure why it is misbehaving!