==> Start TMVAClassificationApplication
: Booking "BDT method" of type "BDT" from dataset/weights/TMVAClassification_BDT.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_BDT.weights.xml
<HEADER> DataSetInfo : [Default] : Added class "Signal"
<HEADER> DataSetInfo : [Default] : Added class "Background"
: Booked classifier "BDT" of type: "BDT"
: Booking "Cuts method" of type "Cuts" from dataset/weights/TMVAClassification_Cuts.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_Cuts.weights.xml
: Read cuts optimised using sample of MC events
: Reading 100 signal efficiency bins for 4 variables
: Booked classifier "Cuts" of type: "Cuts"
: Booking "CutsD method" of type "Cuts" from dataset/weights/TMVAClassification_CutsD.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_CutsD.weights.xml
: Read cuts optimised using sample of MC events
: Reading 100 signal efficiency bins for 4 variables
: Booked classifier "CutsD" of type: "Cuts"
: Booking "FDA_GA method" of type "FDA" from dataset/weights/TMVAClassification_FDA_GA.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_FDA_GA.weights.xml
: User-defined formula string : "(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3"
: TFormula-compatible formula string: "[0]+[1]*[5]+[2]*[6]+[3]*[7]+[4]*[8]"
: Booked classifier "FDA_GA" of type: "FDA"
: Booking "KNN method" of type "KNN" from dataset/weights/TMVAClassification_KNN.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_KNN.weights.xml
: Creating kd-tree with 2000 events
: Computing scale factor for 1d distributions: (ifrac, bottom, top) = (80%, 10%, 90%)
<HEADER> ModulekNN : Optimizing tree for 4 variables with 2000 values
: <Fill> Class 1 has 1000 events
: <Fill> Class 2 has 1000 events
: Booked classifier "KNN" of type: "KNN"
: Booking "LD method" of type "LD" from dataset/weights/TMVAClassification_LD.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_LD.weights.xml
: Booked classifier "LD" of type: "LD"
: Booking "Likelihood method" of type "Likelihood" from dataset/weights/TMVAClassification_Likelihood.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_Likelihood.weights.xml
: Booked classifier "Likelihood" of type: "Likelihood"
: Booking "LikelihoodPCA method" of type "Likelihood" from dataset/weights/TMVAClassification_LikelihoodPCA.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_LikelihoodPCA.weights.xml
: Booked classifier "LikelihoodPCA" of type: "Likelihood"
: Booking "MLPBNN method" of type "MLP" from dataset/weights/TMVAClassification_MLPBNN.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_MLPBNN.weights.xml
<HEADER> MLPBNN : Building Network.
: Initializing weights
: Booked classifier "MLPBNN" of type: "MLP"
: Booking "PDEFoam method" of type "PDEFoam" from dataset/weights/TMVAClassification_PDEFoam.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_PDEFoam.weights.xml
: Read foams from file: dataset/weights/TMVAClassification_PDEFoam.weights_foams.root
: Booked classifier "PDEFoam" of type: "PDEFoam"
: Booking "PDERS method" of type "PDERS" from dataset/weights/TMVAClassification_PDERS.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_PDERS.weights.xml
: signal and background scales: 0.001 0.001
: Booked classifier "PDERS" of type: "PDERS"
: Booking "RuleFit method" of type "RuleFit" from dataset/weights/TMVAClassification_RuleFit.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_RuleFit.weights.xml
: Booked classifier "RuleFit" of type: "RuleFit"
: Booking "SVM method" of type "SVM" from dataset/weights/TMVAClassification_SVM.weights.xml.
: Reading weight file: dataset/weights/TMVAClassification_SVM.weights.xml
: Booked classifier "SVM" of type: "SVM"
--- TMVAClassificationApp : Using input file: /github/home/ROOT-CI/build/tutorials/machine_learning/data/tmva_class_example.root
--- Select signal sample
: Rebuilding Dataset Default
--- End of event loop: Real time 0:00:00, CP time 0.860
--- Created root file: "TMVApp.root" containing the MVA output histograms
==> TMVAClassificationApplication is done!
#include <cstdlib>
#include <vector>
#include <iostream>
#include <map>
#include <string>
{
std::map<std::string,int> Use;
Use["Cuts"] = 1;
Use["CutsD"] = 1;
Use["CutsPCA"] = 0;
Use["CutsGA"] = 0;
Use["CutsSA"] = 0;
Use["Likelihood"] = 1;
Use["LikelihoodD"] = 0;
Use["LikelihoodPCA"] = 1;
Use["LikelihoodKDE"] = 0;
Use["LikelihoodMIX"] = 0;
Use["PDERS"] = 1;
Use["PDERSD"] = 0;
Use["PDERSPCA"] = 0;
Use["PDEFoam"] = 1;
Use["PDEFoamBoost"] = 0;
Use["KNN"] = 1;
Use["LD"] = 1;
Use["Fisher"] = 0;
Use["FisherG"] = 0;
Use["BoostedFisher"] = 0;
Use["HMatrix"] = 0;
Use["FDA_GA"] = 1;
Use["FDA_SA"] = 0;
Use["FDA_MC"] = 0;
Use["FDA_MT"] = 0;
Use["FDA_GAMT"] = 0;
Use["FDA_MCMT"] = 0;
Use["MLP"] = 0;
Use["MLPBFGS"] = 0;
Use["MLPBNN"] = 1;
Use["CFMlpANN"] = 0;
Use["TMlpANN"] = 0;
Use["DNN_CPU"] = 0;
Use["DNN_GPU"] = 0;
Use["SVM"] = 1;
Use["BDT"] = 1;
Use["BDTG"] = 0;
Use["BDTB"] = 0;
Use["BDTD"] = 0;
Use["BDTF"] = 0;
Use["RuleFit"] = 1;
Use["Plugin"] = 0;
Use["Category"] = 0;
Use["SVM_Gauss"] = 0;
Use["SVM_Poly"] = 0;
Use["SVM_Lin"] = 0;
std::cout << std::endl;
std::cout << "==> Start TMVAClassificationApplication" << std::endl;
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
<< "\" not known in TMVA under this name. Choose among the following:" << std::endl;
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
std::cout << it->first << " ";
}
std::cout << std::endl;
return;
}
}
}
reader->AddVariable(
"myvar1 := var1+var2", &
var1 );
reader->AddVariable(
"myvar2 := var1-var2", &
var2 );
if (Use["Category"]){
}
TString prefix =
"TMVAClassification";
for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
if (it->second) {
}
}
if (Use[
"Likelihood"])
histLk =
new TH1F(
"MVA_Likelihood",
"MVA_Likelihood",
nbin, -1, 1 );
if (Use[
"LikelihoodD"])
histLkD =
new TH1F(
"MVA_LikelihoodD",
"MVA_LikelihoodD",
nbin, -1, 0.9999 );
if (Use[
"LikelihoodPCA"])
histLkPCA =
new TH1F(
"MVA_LikelihoodPCA",
"MVA_LikelihoodPCA",
nbin, -1, 1 );
if (Use[
"LikelihoodKDE"])
histLkKDE =
new TH1F(
"MVA_LikelihoodKDE",
"MVA_LikelihoodKDE",
nbin, -0.00001, 0.99999 );
if (Use[
"LikelihoodMIX"])
histLkMIX =
new TH1F(
"MVA_LikelihoodMIX",
"MVA_LikelihoodMIX",
nbin, 0, 1 );
if (Use[
"PDERS"])
histPD =
new TH1F(
"MVA_PDERS",
"MVA_PDERS",
nbin, 0, 1 );
if (Use[
"PDERSD"])
histPDD =
new TH1F(
"MVA_PDERSD",
"MVA_PDERSD",
nbin, 0, 1 );
if (Use[
"PDERSPCA"])
histPDPCA =
new TH1F(
"MVA_PDERSPCA",
"MVA_PDERSPCA",
nbin, 0, 1 );
if (Use[
"HMatrix"])
histHm =
new TH1F(
"MVA_HMatrix",
"MVA_HMatrix",
nbin, -0.95, 1.55 );
if (Use[
"Fisher"])
histFi =
new TH1F(
"MVA_Fisher",
"MVA_Fisher",
nbin, -4, 4 );
if (Use[
"FisherG"])
histFiG =
new TH1F(
"MVA_FisherG",
"MVA_FisherG",
nbin, -1, 1 );
if (Use[
"BoostedFisher"])
histFiB =
new TH1F(
"MVA_BoostedFisher",
"MVA_BoostedFisher",
nbin, -2, 2 );
if (Use[
"MLP"])
histNn =
new TH1F(
"MVA_MLP",
"MVA_MLP",
nbin, -1.25, 1.5 );
if (Use[
"MLPBFGS"])
histNnbfgs =
new TH1F(
"MVA_MLPBFGS",
"MVA_MLPBFGS",
nbin, -1.25, 1.5 );
if (Use[
"MLPBNN"])
histNnbnn =
new TH1F(
"MVA_MLPBNN",
"MVA_MLPBNN",
nbin, -1.25, 1.5 );
if (Use[
"CFMlpANN"])
histNnC =
new TH1F(
"MVA_CFMlpANN",
"MVA_CFMlpANN",
nbin, 0, 1 );
if (Use[
"TMlpANN"])
histNnT =
new TH1F(
"MVA_TMlpANN",
"MVA_TMlpANN",
nbin, -1.3, 1.3 );
if (Use[
"BDT"])
histBdt =
new TH1F(
"MVA_BDT",
"MVA_BDT",
nbin, -0.8, 0.8 );
if (Use[
"BDTG"])
histBdtG =
new TH1F(
"MVA_BDTG",
"MVA_BDTG",
nbin, -1.0, 1.0 );
if (Use[
"BDTB"])
histBdtB =
new TH1F(
"MVA_BDTB",
"MVA_BDTB",
nbin, -1.0, 1.0 );
if (Use[
"BDTD"])
histBdtD =
new TH1F(
"MVA_BDTD",
"MVA_BDTD",
nbin, -0.8, 0.8 );
if (Use[
"BDTF"])
histBdtF =
new TH1F(
"MVA_BDTF",
"MVA_BDTF",
nbin, -1.0, 1.0 );
if (Use[
"RuleFit"])
histRf =
new TH1F(
"MVA_RuleFit",
"MVA_RuleFit",
nbin, -2.0, 2.0 );
if (Use[
"SVM_Gauss"])
histSVMG =
new TH1F(
"MVA_SVM_Gauss",
"MVA_SVM_Gauss",
nbin, 0.0, 1.0 );
if (Use[
"SVM_Poly"])
histSVMP =
new TH1F(
"MVA_SVM_Poly",
"MVA_SVM_Poly",
nbin, 0.0, 1.0 );
if (Use[
"SVM_Lin"])
histSVML =
new TH1F(
"MVA_SVM_Lin",
"MVA_SVM_Lin",
nbin, 0.0, 1.0 );
if (Use[
"FDA_MT"])
histFDAMT =
new TH1F(
"MVA_FDA_MT",
"MVA_FDA_MT",
nbin, -2.0, 3.0 );
if (Use[
"FDA_GA"])
histFDAGA =
new TH1F(
"MVA_FDA_GA",
"MVA_FDA_GA",
nbin, -2.0, 3.0 );
if (Use[
"Category"])
histCat =
new TH1F(
"MVA_Category",
"MVA_Category",
nbin, -2., 2. );
if (Use[
"Plugin"])
histPBdt =
new TH1F(
"MVA_PBDT",
"MVA_BDT",
nbin, -0.8, 0.8 );
if (Use["PDEFoam"]) {
}
if (Use["Fisher"]) {
}
TString fname =
gROOT->GetTutorialDir() +
"/machine_learning/data/tmva_class_example.root";
}
std::cout << "ERROR: could not open data file" << std::endl;
}
std::cout <<
"--- TMVAClassificationApp : Using input file: " <<
input->GetName() << std::endl;
std::cout << "--- Select signal sample" << std::endl;
std::vector<Float_t>
vecVar(4);
std::cout <<
"--- Processing: " <<
theTree->GetEntries() <<
" events" << std::endl;
if (
ievt%1000 == 0) std::cout <<
"--- ... Processing event: " <<
ievt << std::endl;
if (Use["CutsGA"]) {
}
if (Use[
"Likelihood" ])
histLk ->Fill(
reader->EvaluateMVA(
"Likelihood method" ) );
if (Use[
"LikelihoodD" ])
histLkD ->Fill(
reader->EvaluateMVA(
"LikelihoodD method" ) );
if (Use[
"LikelihoodPCA"])
histLkPCA ->Fill(
reader->EvaluateMVA(
"LikelihoodPCA method" ) );
if (Use[
"LikelihoodKDE"])
histLkKDE ->Fill(
reader->EvaluateMVA(
"LikelihoodKDE method" ) );
if (Use[
"LikelihoodMIX"])
histLkMIX ->Fill(
reader->EvaluateMVA(
"LikelihoodMIX method" ) );
if (Use[
"PDERS" ])
histPD ->Fill(
reader->EvaluateMVA(
"PDERS method" ) );
if (Use[
"PDERSD" ])
histPDD ->Fill(
reader->EvaluateMVA(
"PDERSD method" ) );
if (Use[
"PDERSPCA" ])
histPDPCA ->Fill(
reader->EvaluateMVA(
"PDERSPCA method" ) );
if (Use[
"KNN" ])
histKNN ->Fill(
reader->EvaluateMVA(
"KNN method" ) );
if (Use[
"HMatrix" ])
histHm ->Fill(
reader->EvaluateMVA(
"HMatrix method" ) );
if (Use[
"Fisher" ])
histFi ->Fill(
reader->EvaluateMVA(
"Fisher method" ) );
if (Use[
"FisherG" ])
histFiG ->Fill(
reader->EvaluateMVA(
"FisherG method" ) );
if (Use[
"BoostedFisher"])
histFiB ->Fill(
reader->EvaluateMVA(
"BoostedFisher method" ) );
if (Use[
"LD" ])
histLD ->Fill(
reader->EvaluateMVA(
"LD method" ) );
if (Use[
"MLP" ])
histNn ->Fill(
reader->EvaluateMVA(
"MLP method" ) );
if (Use[
"MLPBFGS" ])
histNnbfgs ->Fill(
reader->EvaluateMVA(
"MLPBFGS method" ) );
if (Use[
"MLPBNN" ])
histNnbnn ->Fill(
reader->EvaluateMVA(
"MLPBNN method" ) );
if (Use[
"CFMlpANN" ])
histNnC ->Fill(
reader->EvaluateMVA(
"CFMlpANN method" ) );
if (Use[
"TMlpANN" ])
histNnT ->Fill(
reader->EvaluateMVA(
"TMlpANN method" ) );
if (Use[
"BDT" ])
histBdt ->Fill(
reader->EvaluateMVA(
"BDT method" ) );
if (Use[
"BDTG" ])
histBdtG ->Fill(
reader->EvaluateMVA(
"BDTG method" ) );
if (Use[
"BDTB" ])
histBdtB ->Fill(
reader->EvaluateMVA(
"BDTB method" ) );
if (Use[
"BDTD" ])
histBdtD ->Fill(
reader->EvaluateMVA(
"BDTD method" ) );
if (Use[
"BDTF" ])
histBdtF ->Fill(
reader->EvaluateMVA(
"BDTF method" ) );
if (Use[
"RuleFit" ])
histRf ->Fill(
reader->EvaluateMVA(
"RuleFit method" ) );
if (Use[
"SVM_Gauss" ])
histSVMG ->Fill(
reader->EvaluateMVA(
"SVM_Gauss method" ) );
if (Use[
"SVM_Poly" ])
histSVMP ->Fill(
reader->EvaluateMVA(
"SVM_Poly method" ) );
if (Use[
"SVM_Lin" ])
histSVML ->Fill(
reader->EvaluateMVA(
"SVM_Lin method" ) );
if (Use[
"FDA_MT" ])
histFDAMT ->Fill(
reader->EvaluateMVA(
"FDA_MT method" ) );
if (Use[
"FDA_GA" ])
histFDAGA ->Fill(
reader->EvaluateMVA(
"FDA_GA method" ) );
if (Use[
"Category" ])
histCat ->Fill(
reader->EvaluateMVA(
"Category method" ) );
if (Use[
"Plugin" ])
histPBdt ->Fill(
reader->EvaluateMVA(
"P_BDT method" ) );
if (Use["PDEFoam"]) {
}
if (Use["Fisher"]) {
}
}
std::cout <<
"--- End of event loop: ";
sw.Print();
<<
" (for a required signal efficiency of " <<
effS <<
")" << std::endl;
if (Use["CutsGA"]) {
std::cout << "--- -------------------------------------------------------------" << std::endl;
std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
std::cout << "... Cut: "
<< " < \""
<< "\" <= "
}
std::cout << "--- -------------------------------------------------------------" << std::endl;
}
}
if (Use[
"Likelihood" ])
histLk ->Write();
if (Use[
"LikelihoodD" ])
histLkD ->Write();
if (Use[
"LikelihoodPCA"])
histLkPCA ->Write();
if (Use[
"LikelihoodKDE"])
histLkKDE ->Write();
if (Use[
"LikelihoodMIX"])
histLkMIX ->Write();
if (Use[
"PDERS" ])
histPD ->Write();
if (Use[
"PDERSD" ])
histPDD ->Write();
if (Use[
"KNN" ])
histKNN ->Write();
if (Use[
"HMatrix" ])
histHm ->Write();
if (Use[
"Fisher" ])
histFi ->Write();
if (Use[
"FisherG" ])
histFiG ->Write();
if (Use[
"BoostedFisher"])
histFiB ->Write();
if (Use[
"LD" ])
histLD ->Write();
if (Use[
"MLP" ])
histNn ->Write();
if (Use[
"CFMlpANN" ])
histNnC ->Write();
if (Use[
"TMlpANN" ])
histNnT ->Write();
if (Use[
"BDT" ])
histBdt ->Write();
if (Use[
"RuleFit" ])
histRf ->Write();
if (Use[
"SVM_Gauss" ])
histSVMG ->Write();
if (Use[
"SVM_Poly" ])
histSVMP ->Write();
if (Use[
"SVM_Lin" ])
histSVML ->Write();
if (Use[
"Category" ])
histCat ->Write();
std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
std::cout << "==> TMVAClassificationApplication is done!" << std::endl << std::endl;
}
{
for (
int i=1; i<
argc; i++) {
}
return 0;
}
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t target
R__EXTERN TSystem * gSystem
A ROOT file is an on-disk file, usually with extension .root, that stores objects in a file-system-li...
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
1-D histogram with a float per channel (see TH1 documentation)
Multivariate optimisation of signal efficiency for given background efficiency, applying rectangular ...
The Reader class serves to use the MVAs in a specific analysis context.
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
A TTree represents a columnar dataset.
create variable transformations