#include // Stream declarations #include #include #include "TChain.h" #include "TCut.h" #include "TDirectory.h" #include "TH1F.h" #include "TH1.h" #include "TMath.h" #include "TFile.h" #include "TStopwatch.h" #include "TROOT.h" #include "TSystem.h" #include "TMVA/GeneticAlgorithm.h" #include "TMVA/GeneticFitter.h" #include "TMVA/IFitterTarget.h" #include "TMVA/Factory.h" #include "TMVA/DataLoader.h"//required to load dataset #include "TMVA/Reader.h" using namespace std; using namespace TMVA; int tmvatest() { // The explicit loading of the shared libTMVA is done in TMVAlogon.C, defined in .rootrc // if you use your private .rootrc, or run from a different directory, please copy the // corresponding lines from .rootrc // Methods to be processed can be given as an argument; use format: // mylinux~> root -l TMVAClassification.C\(\"myMethod1,myMethod2,myMethod3\"\) //--------------------------------------------------------------- // This loads the library TMVA::Tools::Instance(); // Default MVA methods to be trained + tested std::map Use; TFile* outputFile = TFile::Open( "TMVA.root", "RECREATE" ); TFile* sig_h = new TFile("/hhamdell/h.root"); TFile* bkg_tt = new TFile("/hhamdell/tt.root"); TFile* bkg_tth = new TFile("/hhamdell/tth.root"); TTree* sigh = (TTree*)sig_h->Get("Delphes"); TTree* bkgtt = (TTree*)bkg_tt->Get("Delphes"); TTree* bkgtth = (TTree*)bkg_tth->Get("Delphes"); // TTree * tree = static_cast(input->Get("Delphes")); TMVA::Factory* factory = new TMVA::Factory("tmvaTest", outputFile, "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification"); TMVA::DataLoader *dataloader = new TMVA::DataLoader("dataset"); dataloader->AddVariable( "MET := MissingET.MET","MissingET.MET", "GeV" , 'F' ); dataloader->AddVariable( "Pt_j:= Jet.PT","Jet.PT", 'F' ); //..... // i need more varibles like the invariant mass of the two leading b-jets(M_b1b2), MT_MET(b1), mT2,... // Register the training and test trees int nSig = sighww->GetEntries(); int nSig = sigh->GetEntries(); int nBkgtt = bkgtt->GetEntries(); int nBkgtth = bkgtth->GetEntries(); // std::cout<<"NSig = \t"<SetInputTrees(sig, bkg, sigWeight, bkgWeight); dataloader->AddSignalTree(sighw,sigWeight); dataloader->AddBackgroundTree(bkgtt,bkgWeight); dataloader->AddBackgroundTree(bkgtth,bkgWeight); // dataloader->AddBackgroundTree(bkgtt,bkgWeight); TCut mycuts = "Electron.PT>30 && Muon.PT>30"; TCut mycutb = "Electron.PT>30 && Muon.PT>30"; // TCut mycut = "Electron.PT>30 && Muon.PT>30"; dataloader->PrepareTrainingAndTestTree(mycuts, mycutb, "nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=None:!V" ); int nTrain_Signal = 0; int nTest_Signal = 0; int nTrain_Background = 0; int nTest_Background = 0; factory->BookMethod( dataloader, TMVA::Types::kBDT, "BDTG", "!H:!V:NTrees=400:MinNodeSize=2%:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.5:nCuts=30:MaxDepth=3" ); factory->BookMethod(dataloader, TMVA::Types::kBDT, "BDT", "!H:!V:NTrees=800:MinNodeSize=2.5%:MaxDepth=4:BoostType=AdaBoost:AdaBoostBeta=0.5:UseBaggedBoost:BaggedSampleFraction=0.5:SeparationType=GiniIndex:nCuts=20"); //factory->BookMethod(dataloader ,TMVA::Types::kPDERS, "PDERS", // "!H:V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600"); // k-Nearest Neighbour method (similar to PDE-RS) factory->BookMethod(dataloader ,TMVA::Types::kKNN, "KNN", "H:nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" ); // Feed-forward multilayer perceptron impementation // 4 hidden layers (separated by ",") // Activation function is TANH // Activation function for the output layer is LINEAR // First hidden layer has 16 nodes, then 8, then 4, then 2 // The error of the neural net is determined by a cross entropy function // let's add a new layer and change TString layoutString ("Layout=TANH|32,TANH|16,TANH|8,TANH|4,TANH|2,LINEAR"); TString training0 ("LearningRate=1e-1,Momentum=0.0,Repetitions=1,ConvergenceSteps=100,BatchSize=100,TestRepetitions=15,WeightDecay=0.001,Regularization=NONE,DropConfig=0.0+0.5+0.5+0.5,DropRepetitions=1,Multithreading=True,Optimizer=ADAM"); TString training1 ("LearningRate=1e-2,Momentum=0.5,Repetitions=1,ConvergenceSteps=100,BatchSize=100,TestRepetitions=7,WeightDecay=0.001,Regularization=L2,Multithreading=True,DropConfig=0.0+0.1+0.1+0.1,DropRepetitions=1,Optimizer=ADAM"); TString training2 ("LearningRate=1e-3,Momentum=0.4,Repetitions=1,ConvergenceSteps=50,BatchSize=100,TestRepetitions=7,WeightDecay=0.001,Regularization=L2,Multithreading=True,DropConfig=0.0+0.1+0.1+0.1,DropRepetitions=1,Optimizer=ADAM"); TString training3 ("LearningRate=1e-4,Momentum=0.3,Repetitions=1,ConvergenceSteps=50,BatchSize=100,TestRepetitions=7,WeightDecay=0.0001,Regularization=L2,Multithreading=True,Optimizer=ADAM"); TString training4 ("LearningRate=1e-4,Momentum=0.1,Repetitions=1,ConvergenceSteps=50,BatchSize=100,TestRepetitions=7,WeightDecay=0.0001,Regularization=NONE,Multithreading=True,Optimizer=ADAM"); TString training5 ("LearningRate=1e-4,Momentum=0.1,Repetitions=1,ConvergenceSteps=50,BatchSize=100,TestRepetitions=7,WeightDecay=0.0001,Regularization=L2,Multithreading=True,Optimizer=ADAM"); /* TString training6 ("LearningRate=1e-4,Momentum=0.1,Repetitions=1,ConvergenceSteps=50,BatchSize=100,TestRepetitions=7,WeightDecay=0.0001,Regularization=NONE,Multithreading=True,Optimizer=ADAM"); TString training7 ("LearningRate=1e-4,Momentum=0.1,Repetitions=1,ConvergenceSteps=50,BatchSize=100,TestRepetitions=7,WeightDecay=0.0001,Regularization=NONE,Multithreading=True,Optimizer=ADAM"); */ TString trainingStrategyString ("TrainingStrategy="); trainingStrategyString += training0 + "|" + training1 + "|" + training2 + "|" + training3 + "|" + training4 + "|" + training5 ; TString dnnOptions ("!H:V:ErrorStrategy=CROSSENTROPY:VarTransform=G:WeightInitialization=XAVIERUNIFORM"); dnnOptions.Append (":"); dnnOptions.Append (layoutString); dnnOptions.Append (":"); dnnOptions.Append (trainingStrategyString); TString cpuOptions = dnnOptions + ":Architecture=CPU"; factory->BookMethod(dataloader, TMVA::Types::kDNN, "DNN", cpuOptions); // Artificial Neural Network (Multilayer perceptron) - TMVA version factory->BookMethod( dataloader,TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5" ); // NN with BFGS quadratic minimisation factory->BookMethod( dataloader,TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS" ); // NN (Multilayer perceptron) - ROOT version factory->BookMethod( dataloader,TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // NN (Multilayer perceptron) - ALEPH version (depreciated) factory->BookMethod( dataloader,TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // Support Vector Machine factory->BookMethod( dataloader,TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001" ); // Boosted Decision Trees with bagging factory->BookMethod( dataloader,TMVA::Types::kBDT, "BDTB", "!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" ); /* factory->BookMethod(dataloader, TMVA::Types::kPyGTB, "PyGTB", "H:!V:VarTransform=G:NEstimators=400:LearningRate=0.1:MaxDepth=3"); factory->BookMethod(dataloader, TMVA::Types::kPyRandomForest, "PyRandomForest", "!V:VarTransform=G:NEstimators=400:Criterion=gini:MaxFeatures=auto:MaxDepth=6:MinSamplesLeaf=3:MinWeightFractionLeaf=0:Bootstrap=kTRUE"); factory->BookMethod(dataloader, TMVA::Types::kPyAdaBoost, "PyAdaBoost", "!V:VarTransform=G:NEstimators=400"); */ // Train MVAs using the set of training events factory->TrainAllMethods(); // ---- Evaluate all MVAs using the set of test events factory->TestAllMethods(); // ----- Evaluate and compare performance of all configured MVAs factory->EvaluateAllMethods(); // -------------------------------------------------------------- // Save the output outputFile->Close(); //float SepValue = std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl; std::cout << "==> TMVAClassification is done!" << std::endl; delete factory; delete dataloader; return 0; }