Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
TMVAClassificationApplication.C
Go to the documentation of this file.
1/// \file
2/// \ingroup tutorial_tmva
3/// \notebook -nodraw
4/// This macro provides a simple example on how to use the trained classifiers
5/// within an analysis module
6/// - Project : TMVA - a Root-integrated toolkit for multivariate data analysis
7/// - Package : TMVA
8/// - Executable: TMVAClassificationApplication
9///
10/// \macro_output
11/// \macro_code
12/// \author Andreas Hoecker
13
14#include <cstdlib>
15#include <vector>
16#include <iostream>
17#include <map>
18#include <string>
19
20#include "TFile.h"
21#include "TTree.h"
22#include "TString.h"
23#include "TSystem.h"
24#include "TROOT.h"
25#include "TStopwatch.h"
26
27#include "TMVA/Tools.h"
28#include "TMVA/Reader.h"
29#include "TMVA/MethodCuts.h"
30
31using namespace TMVA;
32
33void TMVAClassificationApplication( TString myMethodList = "" )
34{
35
36 //---------------------------------------------------------------
37 // This loads the library
39
40 // Default MVA methods to be trained + tested
41 std::map<std::string,int> Use;
42
43 // Cut optimisation
44 Use["Cuts"] = 1;
45 Use["CutsD"] = 1;
46 Use["CutsPCA"] = 0;
47 Use["CutsGA"] = 0;
48 Use["CutsSA"] = 0;
49 //
50 // 1-dimensional likelihood ("naive Bayes estimator")
51 Use["Likelihood"] = 1;
52 Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings)
53 Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
54 Use["LikelihoodKDE"] = 0;
55 Use["LikelihoodMIX"] = 0;
56 //
57 // Mutidimensional likelihood and Nearest-Neighbour methods
58 Use["PDERS"] = 1;
59 Use["PDERSD"] = 0;
60 Use["PDERSPCA"] = 0;
61 Use["PDEFoam"] = 1;
62 Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting
63 Use["KNN"] = 1; // k-nearest neighbour method
64 //
65 // Linear Discriminant Analysis
66 Use["LD"] = 1; // Linear Discriminant identical to Fisher
67 Use["Fisher"] = 0;
68 Use["FisherG"] = 0;
69 Use["BoostedFisher"] = 0; // uses generalised MVA method boosting
70 Use["HMatrix"] = 0;
71 //
72 // Function Discriminant analysis
73 Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm
74 Use["FDA_SA"] = 0;
75 Use["FDA_MC"] = 0;
76 Use["FDA_MT"] = 0;
77 Use["FDA_GAMT"] = 0;
78 Use["FDA_MCMT"] = 0;
79 //
80 // Neural Networks (all are feed-forward Multilayer Perceptrons)
81 Use["MLP"] = 0; // Recommended ANN
82 Use["MLPBFGS"] = 0; // Recommended ANN with optional training method
83 Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator
84 Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH
85 Use["TMlpANN"] = 0; // ROOT's own ANN
86 Use["DNN_CPU"] = 0; // CUDA-accelerated DNN training.
87 Use["DNN_GPU"] = 0; // Multi-core accelerated DNN.
88 //
89 // Support Vector Machine
90 Use["SVM"] = 1;
91 //
92 // Boosted Decision Trees
93 Use["BDT"] = 1; // uses Adaptive Boost
94 Use["BDTG"] = 0; // uses Gradient Boost
95 Use["BDTB"] = 0; // uses Bagging
96 Use["BDTD"] = 0; // decorrelation + Adaptive Boost
97 Use["BDTF"] = 0; // allow usage of fisher discriminant for node splitting
98 //
99 // Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
100 Use["RuleFit"] = 1;
101 // ---------------------------------------------------------------
102 Use["Plugin"] = 0;
103 Use["Category"] = 0;
104 Use["SVM_Gauss"] = 0;
105 Use["SVM_Poly"] = 0;
106 Use["SVM_Lin"] = 0;
107
108 std::cout << std::endl;
109 std::cout << "==> Start TMVAClassificationApplication" << std::endl;
110
111 // Select methods (don't look at this code - not of interest)
112 if (myMethodList != "") {
113 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
114
115 std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
116 for (UInt_t i=0; i<mlist.size(); i++) {
117 std::string regMethod(mlist[i]);
118
119 if (Use.find(regMethod) == Use.end()) {
120 std::cout << "Method \"" << regMethod
121 << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
122 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
123 std::cout << it->first << " ";
124 }
125 std::cout << std::endl;
126 return;
127 }
128 Use[regMethod] = 1;
129 }
130 }
131
132 // --------------------------------------------------------------------------------------------------
133
134 // Create the Reader object
135
136 TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
137
138 // Create a set of variables and declare them to the reader
139 // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
140 Float_t var1, var2;
141 Float_t var3, var4;
142 reader->AddVariable( "myvar1 := var1+var2", &var1 );
143 reader->AddVariable( "myvar2 := var1-var2", &var2 );
144 reader->AddVariable( "var3", &var3 );
145 reader->AddVariable( "var4", &var4 );
146
147 // Spectator variables declared in the training have to be added to the reader, too
148 Float_t spec1,spec2;
149 reader->AddSpectator( "spec1 := var1*2", &spec1 );
150 reader->AddSpectator( "spec2 := var1*3", &spec2 );
151
152 Float_t Category_cat1, Category_cat2, Category_cat3;
153 if (Use["Category"]){
154 // Add artificial spectators for distinguishing categories
155 reader->AddSpectator( "Category_cat1 := var3<=0", &Category_cat1 );
156 reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)", &Category_cat2 );
157 reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
158 }
159
160 // Book the MVA methods
161
162 TString dir = "dataset/weights/";
163 TString prefix = "TMVAClassification";
164
165 // Book method(s)
166 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
167 if (it->second) {
168 TString methodName = TString(it->first) + TString(" method");
169 TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
170 reader->BookMVA( methodName, weightfile );
171 }
172 }
173
174 // Book output histograms
175 UInt_t nbin = 100;
176 TH1F *histLk(0);
177 TH1F *histLkD(0);
178 TH1F *histLkPCA(0);
179 TH1F *histLkKDE(0);
180 TH1F *histLkMIX(0);
181 TH1F *histPD(0);
182 TH1F *histPDD(0);
183 TH1F *histPDPCA(0);
184 TH1F *histPDEFoam(0);
185 TH1F *histPDEFoamErr(0);
186 TH1F *histPDEFoamSig(0);
187 TH1F *histKNN(0);
188 TH1F *histHm(0);
189 TH1F *histFi(0);
190 TH1F *histFiG(0);
191 TH1F *histFiB(0);
192 TH1F *histLD(0);
193 TH1F *histNn(0);
194 TH1F *histNnbfgs(0);
195 TH1F *histNnbnn(0);
196 TH1F *histNnC(0);
197 TH1F *histNnT(0);
198 TH1F *histBdt(0);
199 TH1F *histBdtG(0);
200 TH1F *histBdtB(0);
201 TH1F *histBdtD(0);
202 TH1F *histBdtF(0);
203 TH1F *histRf(0);
204 TH1F *histSVMG(0);
205 TH1F *histSVMP(0);
206 TH1F *histSVML(0);
207 TH1F *histFDAMT(0);
208 TH1F *histFDAGA(0);
209 TH1F *histCat(0);
210 TH1F *histPBdt(0);
211 TH1F *histDnnGpu(0);
212 TH1F *histDnnCpu(0);
213
214 if (Use["Likelihood"]) histLk = new TH1F( "MVA_Likelihood", "MVA_Likelihood", nbin, -1, 1 );
215 if (Use["LikelihoodD"]) histLkD = new TH1F( "MVA_LikelihoodD", "MVA_LikelihoodD", nbin, -1, 0.9999 );
216 if (Use["LikelihoodPCA"]) histLkPCA = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
217 if (Use["LikelihoodKDE"]) histLkKDE = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin, -0.00001, 0.99999 );
218 if (Use["LikelihoodMIX"]) histLkMIX = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin, 0, 1 );
219 if (Use["PDERS"]) histPD = new TH1F( "MVA_PDERS", "MVA_PDERS", nbin, 0, 1 );
220 if (Use["PDERSD"]) histPDD = new TH1F( "MVA_PDERSD", "MVA_PDERSD", nbin, 0, 1 );
221 if (Use["PDERSPCA"]) histPDPCA = new TH1F( "MVA_PDERSPCA", "MVA_PDERSPCA", nbin, 0, 1 );
222 if (Use["KNN"]) histKNN = new TH1F( "MVA_KNN", "MVA_KNN", nbin, 0, 1 );
223 if (Use["HMatrix"]) histHm = new TH1F( "MVA_HMatrix", "MVA_HMatrix", nbin, -0.95, 1.55 );
224 if (Use["Fisher"]) histFi = new TH1F( "MVA_Fisher", "MVA_Fisher", nbin, -4, 4 );
225 if (Use["FisherG"]) histFiG = new TH1F( "MVA_FisherG", "MVA_FisherG", nbin, -1, 1 );
226 if (Use["BoostedFisher"]) histFiB = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
227 if (Use["LD"]) histLD = new TH1F( "MVA_LD", "MVA_LD", nbin, -2, 2 );
228 if (Use["MLP"]) histNn = new TH1F( "MVA_MLP", "MVA_MLP", nbin, -1.25, 1.5 );
229 if (Use["MLPBFGS"]) histNnbfgs = new TH1F( "MVA_MLPBFGS", "MVA_MLPBFGS", nbin, -1.25, 1.5 );
230 if (Use["MLPBNN"]) histNnbnn = new TH1F( "MVA_MLPBNN", "MVA_MLPBNN", nbin, -1.25, 1.5 );
231 if (Use["CFMlpANN"]) histNnC = new TH1F( "MVA_CFMlpANN", "MVA_CFMlpANN", nbin, 0, 1 );
232 if (Use["TMlpANN"]) histNnT = new TH1F( "MVA_TMlpANN", "MVA_TMlpANN", nbin, -1.3, 1.3 );
233 if (Use["DNN_GPU"]) histDnnGpu = new TH1F("MVA_DNN_GPU", "MVA_DNN_GPU", nbin, -0.1, 1.1);
234 if (Use["DNN_CPU"]) histDnnCpu = new TH1F("MVA_DNN_CPU", "MVA_DNN_CPU", nbin, -0.1, 1.1);
235 if (Use["BDT"]) histBdt = new TH1F( "MVA_BDT", "MVA_BDT", nbin, -0.8, 0.8 );
236 if (Use["BDTG"]) histBdtG = new TH1F( "MVA_BDTG", "MVA_BDTG", nbin, -1.0, 1.0 );
237 if (Use["BDTB"]) histBdtB = new TH1F( "MVA_BDTB", "MVA_BDTB", nbin, -1.0, 1.0 );
238 if (Use["BDTD"]) histBdtD = new TH1F( "MVA_BDTD", "MVA_BDTD", nbin, -0.8, 0.8 );
239 if (Use["BDTF"]) histBdtF = new TH1F( "MVA_BDTF", "MVA_BDTF", nbin, -1.0, 1.0 );
240 if (Use["RuleFit"]) histRf = new TH1F( "MVA_RuleFit", "MVA_RuleFit", nbin, -2.0, 2.0 );
241 if (Use["SVM_Gauss"]) histSVMG = new TH1F( "MVA_SVM_Gauss", "MVA_SVM_Gauss", nbin, 0.0, 1.0 );
242 if (Use["SVM_Poly"]) histSVMP = new TH1F( "MVA_SVM_Poly", "MVA_SVM_Poly", nbin, 0.0, 1.0 );
243 if (Use["SVM_Lin"]) histSVML = new TH1F( "MVA_SVM_Lin", "MVA_SVM_Lin", nbin, 0.0, 1.0 );
244 if (Use["FDA_MT"]) histFDAMT = new TH1F( "MVA_FDA_MT", "MVA_FDA_MT", nbin, -2.0, 3.0 );
245 if (Use["FDA_GA"]) histFDAGA = new TH1F( "MVA_FDA_GA", "MVA_FDA_GA", nbin, -2.0, 3.0 );
246 if (Use["Category"]) histCat = new TH1F( "MVA_Category", "MVA_Category", nbin, -2., 2. );
247 if (Use["Plugin"]) histPBdt = new TH1F( "MVA_PBDT", "MVA_BDT", nbin, -0.8, 0.8 );
248
249 // PDEFoam also returns per-event error, fill in histogram, and also fill significance
250 if (Use["PDEFoam"]) {
251 histPDEFoam = new TH1F( "MVA_PDEFoam", "MVA_PDEFoam", nbin, 0, 1 );
252 histPDEFoamErr = new TH1F( "MVA_PDEFoamErr", "MVA_PDEFoam error", nbin, 0, 1 );
253 histPDEFoamSig = new TH1F( "MVA_PDEFoamSig", "MVA_PDEFoam significance", nbin, 0, 10 );
254 }
255
256 // Book example histogram for probability (the other methods are done similarly)
257 TH1F *probHistFi(0), *rarityHistFi(0);
258 if (Use["Fisher"]) {
259 probHistFi = new TH1F( "MVA_Fisher_Proba", "MVA_Fisher_Proba", nbin, 0, 1 );
260 rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
261 }
262
263 // Prepare input tree (this must be replaced by your data source)
264 // in this example, there is a toy tree with signal and one with background events
265 // we'll later on use only the "signal" events for the test in this example.
266 //
267 TFile *input(0);
268 TString fname = "./tmva_class_example.root";
269 if (!gSystem->AccessPathName( fname )) {
270 input = TFile::Open( fname ); // check if file in local directory exists
271 }
272 else {
274 input = TFile::Open("http://root.cern.ch/files/tmva_class_example.root", "CACHEREAD"); // if not: download from ROOT server
275 }
276 if (!input) {
277 std::cout << "ERROR: could not open data file" << std::endl;
278 exit(1);
279 }
280 std::cout << "--- TMVAClassificationApp : Using input file: " << input->GetName() << std::endl;
281
282 // Event loop
283
284 // Prepare the event tree
285 // - Here the variable names have to corresponds to your tree
286 // - You can use the same variables as above which is slightly faster,
287 // but of course you can use different ones and copy the values inside the event loop
288 //
289 std::cout << "--- Select signal sample" << std::endl;
290 TTree* theTree = (TTree*)input->Get("TreeS");
291 Float_t userVar1, userVar2;
292 theTree->SetBranchAddress( "var1", &userVar1 );
293 theTree->SetBranchAddress( "var2", &userVar2 );
294 theTree->SetBranchAddress( "var3", &var3 );
295 theTree->SetBranchAddress( "var4", &var4 );
296
297 // Efficiency calculator for cut method
298 Int_t nSelCutsGA = 0;
299 Double_t effS = 0.7;
300
301 std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
302
303 std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
304 TStopwatch sw;
305 sw.Start();
306 for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
307
308 if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
309
310 theTree->GetEntry(ievt);
311
312 var1 = userVar1 + userVar2;
313 var2 = userVar1 - userVar2;
314
315 // Return the MVA outputs and fill into histograms
316
317 if (Use["CutsGA"]) {
318 // Cuts is a special case: give the desired signal efficiency
319 Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
320 if (passed) nSelCutsGA++;
321 }
322
323 if (Use["Likelihood" ]) histLk ->Fill( reader->EvaluateMVA( "Likelihood method" ) );
324 if (Use["LikelihoodD" ]) histLkD ->Fill( reader->EvaluateMVA( "LikelihoodD method" ) );
325 if (Use["LikelihoodPCA"]) histLkPCA ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
326 if (Use["LikelihoodKDE"]) histLkKDE ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
327 if (Use["LikelihoodMIX"]) histLkMIX ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
328 if (Use["PDERS" ]) histPD ->Fill( reader->EvaluateMVA( "PDERS method" ) );
329 if (Use["PDERSD" ]) histPDD ->Fill( reader->EvaluateMVA( "PDERSD method" ) );
330 if (Use["PDERSPCA" ]) histPDPCA ->Fill( reader->EvaluateMVA( "PDERSPCA method" ) );
331 if (Use["KNN" ]) histKNN ->Fill( reader->EvaluateMVA( "KNN method" ) );
332 if (Use["HMatrix" ]) histHm ->Fill( reader->EvaluateMVA( "HMatrix method" ) );
333 if (Use["Fisher" ]) histFi ->Fill( reader->EvaluateMVA( "Fisher method" ) );
334 if (Use["FisherG" ]) histFiG ->Fill( reader->EvaluateMVA( "FisherG method" ) );
335 if (Use["BoostedFisher"]) histFiB ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
336 if (Use["LD" ]) histLD ->Fill( reader->EvaluateMVA( "LD method" ) );
337 if (Use["MLP" ]) histNn ->Fill( reader->EvaluateMVA( "MLP method" ) );
338 if (Use["MLPBFGS" ]) histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method" ) );
339 if (Use["MLPBNN" ]) histNnbnn ->Fill( reader->EvaluateMVA( "MLPBNN method" ) );
340 if (Use["CFMlpANN" ]) histNnC ->Fill( reader->EvaluateMVA( "CFMlpANN method" ) );
341 if (Use["TMlpANN" ]) histNnT ->Fill( reader->EvaluateMVA( "TMlpANN method" ) );
342 if (Use["DNN_GPU"]) histDnnGpu->Fill(reader->EvaluateMVA("DNN_GPU method"));
343 if (Use["DNN_CPU"]) histDnnCpu->Fill(reader->EvaluateMVA("DNN_CPU method"));
344 if (Use["BDT" ]) histBdt ->Fill( reader->EvaluateMVA( "BDT method" ) );
345 if (Use["BDTG" ]) histBdtG ->Fill( reader->EvaluateMVA( "BDTG method" ) );
346 if (Use["BDTB" ]) histBdtB ->Fill( reader->EvaluateMVA( "BDTB method" ) );
347 if (Use["BDTD" ]) histBdtD ->Fill( reader->EvaluateMVA( "BDTD method" ) );
348 if (Use["BDTF" ]) histBdtF ->Fill( reader->EvaluateMVA( "BDTF method" ) );
349 if (Use["RuleFit" ]) histRf ->Fill( reader->EvaluateMVA( "RuleFit method" ) );
350 if (Use["SVM_Gauss" ]) histSVMG ->Fill( reader->EvaluateMVA( "SVM_Gauss method" ) );
351 if (Use["SVM_Poly" ]) histSVMP ->Fill( reader->EvaluateMVA( "SVM_Poly method" ) );
352 if (Use["SVM_Lin" ]) histSVML ->Fill( reader->EvaluateMVA( "SVM_Lin method" ) );
353 if (Use["FDA_MT" ]) histFDAMT ->Fill( reader->EvaluateMVA( "FDA_MT method" ) );
354 if (Use["FDA_GA" ]) histFDAGA ->Fill( reader->EvaluateMVA( "FDA_GA method" ) );
355 if (Use["Category" ]) histCat ->Fill( reader->EvaluateMVA( "Category method" ) );
356 if (Use["Plugin" ]) histPBdt ->Fill( reader->EvaluateMVA( "P_BDT method" ) );
357
358 // Retrieve also per-event error
359 if (Use["PDEFoam"]) {
360 Double_t val = reader->EvaluateMVA( "PDEFoam method" );
361 Double_t err = reader->GetMVAError();
362 histPDEFoam ->Fill( val );
363 histPDEFoamErr->Fill( err );
364 if (err>1.e-50) histPDEFoamSig->Fill( val/err );
365 }
366
367 // Retrieve probability instead of MVA output
368 if (Use["Fisher"]) {
369 probHistFi ->Fill( reader->GetProba ( "Fisher method" ) );
370 rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
371 }
372 }
373
374 // Get elapsed time
375 sw.Stop();
376 std::cout << "--- End of event loop: "; sw.Print();
377
378 // Get efficiency for cuts classifier
379 if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
380 << " (for a required signal efficiency of " << effS << ")" << std::endl;
381
382 if (Use["CutsGA"]) {
383
384 // test: retrieve cuts for particular signal efficiency
385 // CINT ignores dynamic_casts so we have to use a cuts-specific Reader function to access the pointer
386 TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;
387
388 if (mcuts) {
389 std::vector<Double_t> cutsMin;
390 std::vector<Double_t> cutsMax;
391 mcuts->GetCuts( 0.7, cutsMin, cutsMax );
392 std::cout << "--- -------------------------------------------------------------" << std::endl;
393 std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
394 for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
395 std::cout << "... Cut: "
396 << cutsMin[ivar]
397 << " < \""
398 << mcuts->GetInputVar(ivar)
399 << "\" <= "
400 << cutsMax[ivar] << std::endl;
401 }
402 std::cout << "--- -------------------------------------------------------------" << std::endl;
403 }
404 }
405
406 // Write histograms
407
408 TFile *target = new TFile( "TMVApp.root","RECREATE" );
409 if (Use["Likelihood" ]) histLk ->Write();
410 if (Use["LikelihoodD" ]) histLkD ->Write();
411 if (Use["LikelihoodPCA"]) histLkPCA ->Write();
412 if (Use["LikelihoodKDE"]) histLkKDE ->Write();
413 if (Use["LikelihoodMIX"]) histLkMIX ->Write();
414 if (Use["PDERS" ]) histPD ->Write();
415 if (Use["PDERSD" ]) histPDD ->Write();
416 if (Use["PDERSPCA" ]) histPDPCA ->Write();
417 if (Use["KNN" ]) histKNN ->Write();
418 if (Use["HMatrix" ]) histHm ->Write();
419 if (Use["Fisher" ]) histFi ->Write();
420 if (Use["FisherG" ]) histFiG ->Write();
421 if (Use["BoostedFisher"]) histFiB ->Write();
422 if (Use["LD" ]) histLD ->Write();
423 if (Use["MLP" ]) histNn ->Write();
424 if (Use["MLPBFGS" ]) histNnbfgs ->Write();
425 if (Use["MLPBNN" ]) histNnbnn ->Write();
426 if (Use["CFMlpANN" ]) histNnC ->Write();
427 if (Use["TMlpANN" ]) histNnT ->Write();
428 if (Use["DNN_GPU"]) histDnnGpu->Write();
429 if (Use["DNN_CPU"]) histDnnCpu->Write();
430 if (Use["BDT" ]) histBdt ->Write();
431 if (Use["BDTG" ]) histBdtG ->Write();
432 if (Use["BDTB" ]) histBdtB ->Write();
433 if (Use["BDTD" ]) histBdtD ->Write();
434 if (Use["BDTF" ]) histBdtF ->Write();
435 if (Use["RuleFit" ]) histRf ->Write();
436 if (Use["SVM_Gauss" ]) histSVMG ->Write();
437 if (Use["SVM_Poly" ]) histSVMP ->Write();
438 if (Use["SVM_Lin" ]) histSVML ->Write();
439 if (Use["FDA_MT" ]) histFDAMT ->Write();
440 if (Use["FDA_GA" ]) histFDAGA ->Write();
441 if (Use["Category" ]) histCat ->Write();
442 if (Use["Plugin" ]) histPBdt ->Write();
443
444 // Write also error and significance histos
445 if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }
446
447 // Write also probability hists
448 if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
449 target->Close();
450
451 std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
452
453 delete reader;
454
455 std::cout << "==> TMVAClassificationApplication is done!" << std::endl << std::endl;
456}
457
458int main( int argc, char** argv )
459{
460 TString methodList;
461 for (int i=1; i<argc; i++) {
462 TString regMethod(argv[i]);
463 if(regMethod=="-b" || regMethod=="--batch") continue;
464 if (!methodList.IsNull()) methodList += TString(",");
465 methodList += regMethod;
466 }
467 TMVAClassificationApplication(methodList);
468 return 0;
469}
int main()
Definition Prototype.cxx:12
bool Bool_t
Definition RtypesCore.h:63
int Int_t
Definition RtypesCore.h:45
unsigned int UInt_t
Definition RtypesCore.h:46
float Float_t
Definition RtypesCore.h:57
double Double_t
Definition RtypesCore.h:59
long long Long64_t
Definition RtypesCore.h:80
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void input
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t target
R__EXTERN TSystem * gSystem
Definition TSystem.h:560
A ROOT file is composed of a header, followed by consecutive data records (TKey instances) with a wel...
Definition TFile.h:53
static Bool_t SetCacheFileDir(ROOT::Internal::TStringView cacheDir, Bool_t operateDisconnected=kTRUE, Bool_t forceCacheread=kFALSE)
Definition TFile.h:332
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=ROOT::RCompressionSetting::EDefaults::kUseCompiledDefault, Int_t netopt=0)
Create / open a file.
Definition TFile.cxx:4075
1-D histogram with a float per channel (see TH1 documentation)}
Definition TH1.h:577
const TString & GetInputVar(Int_t i) const
Definition MethodBase.h:349
Multivariate optimisation of signal efficiency for given background efficiency, applying rectangular ...
Definition MethodCuts.h:61
Double_t GetCuts(Double_t effS, std::vector< Double_t > &cutMin, std::vector< Double_t > &cutMax) const
retrieve cut values for given signal efficiency
The Reader class serves to use the MVAs in a specific analysis context.
Definition Reader.h:64
Double_t EvaluateMVA(const std::vector< Float_t > &, const TString &methodTag, Double_t aux=0)
Evaluate a std::vector<float> of input data for a given method The parameter aux is obligatory for th...
Definition Reader.cxx:468
Double_t GetRarity(const TString &methodTag, Double_t mvaVal=-9999999)
evaluates the MVA's rarity
Definition Reader.cxx:746
Double_t GetProba(const TString &methodTag, Double_t ap_sig=0.5, Double_t mvaVal=-9999999)
evaluates probability of MVA for given set of input variables
Definition Reader.cxx:715
MethodCuts * FindCutsMVA(const TString &methodTag)
special function for Cuts to avoid dynamic_casts in ROOT macros, which are not properly handled by CI...
Definition Reader.cxx:707
IMethod * BookMVA(const TString &methodTag, const TString &weightfile)
read method name from weight file
Definition Reader.cxx:368
void AddSpectator(const TString &expression, Float_t *)
Add a float spectator or expression to the reader.
Definition Reader.cxx:321
void AddVariable(const TString &expression, Float_t *)
Add a float variable or expression to the reader.
Definition Reader.cxx:303
Double_t GetMVAError() const
Definition Reader.h:98
static Tools & Instance()
Definition Tools.cxx:71
std::vector< TString > SplitString(const TString &theOpt, const char separator) const
splits the option string at 'separator' and fills the list 'splitV' with the primitive strings
Definition Tools.cxx:1199
Stopwatch class.
Definition TStopwatch.h:28
void Start(Bool_t reset=kTRUE)
Start the stopwatch.
void Stop()
Stop the stopwatch.
void Print(Option_t *option="") const override
Print the real and cpu time passed between the start and stop events.
Basic string class.
Definition TString.h:139
Bool_t IsNull() const
Definition TString.h:418
virtual Bool_t AccessPathName(const char *path, EAccessMode mode=kFileExists)
Returns FALSE if one can access a file using the specified access mode.
Definition TSystem.cxx:1283
A TTree represents a columnar dataset.
Definition TTree.h:79
virtual Int_t GetEntry(Long64_t entry, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
Definition TTree.cxx:5635
virtual Int_t SetBranchAddress(const char *bname, void *add, TBranch **ptr=nullptr)
Change branch address, dealing with clone trees properly.
Definition TTree.cxx:8377
virtual Long64_t GetEntries() const
Definition TTree.h:463
create variable transformations
Tools & gTools()