Hi Kim,
thanks for the response, here is a snippet from the scripts
for version 6.06.02:
TMVA::Factory *factory = new TMVA::Factory( sampletype+bdtclass, outputFile,"!V:!Silent:Color:DrawProgressBar:Transformations=I;P;G,D:AnalysisType=Classification" );
std::vector mvaVars_V = splitstr(getVar(bdtclass)," â);
std::vector spectatorVars_V= splitstr(spectatorVars,â ");
std::cout<<âinitializing variablesâŚâ<<std::endl;
for(int i = 0; i<mvaVars_V.size(); i++){
std::cout<<"adding: "<<mvaVars_V.at(i)<<std::endl;
factory->AddVariable(mvaVars_V.at(i),mvaVars_V.at(i), âFâ);
}
for(int i =0; i<spectatorVars_V.size(); i++){
std::cout<<"adding spectator: "<<spectatorVars_V.at(i)<<std::endl;
factory->AddSpectator(spectatorVars_V.at(i),spectatorVars_V.at(i),âFâ);
}
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
factory->AddSignalTree ( signalTree, signalWeight);
factory->AddBackgroundTree(backgroundTree, backgroundWeight);
factory->SetSignalWeightExpression (âweightâ);
factory->SetBackgroundWeightExpression(âweightâ);
TCut mycuts = getcut(bdtclass);
TCut mycutb = getcut(bdtclass);
factory->PrepareTrainingAndTestTree(mycuts,mycutb, ânTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=EqualNumEvents:!Vâ);
factory->BookMethod( TMVA::Types::kBDT, âBDTG1â,"!H:!V:NTrees=800:MinNodeSize=1:BoostType=Grad:Shrinkage=0.06:UseBaggedBoost:BaggedSampleFraction=0.6:nCuts=20:MaxDepth=3");
std::cout<<âTraining all methodsâŚâ<<std::endl;
factory->TrainAllMethods();
std::cout<<âTesting all methodsâŚâ<<std::endl;
factory->TestAllMethods();
std::cout<<âEvaluate all methodsâŚâ<<std::endl;
factory->EvaluateAllMethods();
outputFile->Close();
std::cout<<âFinished the training runâŚâ<<std::endl;
delete factory;
for version 6.12.02
TMVA::Factory *factory = new TMVA::Factory(mvaoutname, outputFile,"!V:!Silent:Color:DrawProgressBar:Transformations=I;P;G,D:AnalysisType=Classification" );
TMVA::DataLoader *dataloader=new TMVA::DataLoader(âdatasetâ);
std::vector mvaVars_V = splitstr(getVar(bdtclass)," â);
std::vector spectatorVars_V= splitstr(spectatorVars,â ");
std::cout<<âinitializing variablesâŚâ<<std::endl;
for(int i = 0; i<mvaVars_V.size(); i++){
std::cout<<"adding: "<<mvaVars_V.at(i)<<std::endl;
dataloader->AddVariable(mvaVars_V.at(i),mvaVars_V.at(i), âFâ);
}
for(int i =0; i<spectatorVars_V.size(); i++){
std::cout<<"adding spectator: "<<spectatorVars_V.at(i)<<std::endl;
dataloader->AddSpectator(spectatorVars_V.at(i),spectatorVars_V.at(i),âFâ);
}
Double_t signalWeight = 1.0;
Double_t backgroundWeight = 1.0;
dataloader->AddSignalTree ( signalTree, signalWeight);
dataloader->AddBackgroundTree(backgroundTree, backgroundWeight);
dataloader->SetSignalWeightExpression (âweightâ);
dataloader->SetBackgroundWeightExpression(âweightâ);
TCut mycuts = getcut(bdtclass);
TCut mycutb = getcut(bdtclass);
dataloader->PrepareTrainingAndTestTree(mycuts,mycutb, ânTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=EqualNumEvents:!Vâ);
factory->BookMethod(dataloader, TMVA::Types::kBDT, âBDTG1â,"!H:!V:NTrees=800:MinNodeSize=1:BoostType=Grad:Shrinkage=0.06:UseBaggedBoost:BaggedSampleFraction=0.6:nCuts=20:MaxDepth=3");
std::cout<<âTraining all methodsâŚâ<<std::endl;
factory->TrainAllMethods();
std::cout<<âTesting all methodsâŚâ<<std::endl;
factory->TestAllMethods();
std::cout<<âEvaluate all methodsâŚâ<<std::endl;
factory->EvaluateAllMethods();
outputFile->Close();
std::cout<<âFinished the training runâŚâ<<std::endl;
delete dataloader;
delete factory;
Thanks
Kehinde