Logo ROOT  
Reference Guide
MethodPyAdaBoost.cxx
Go to the documentation of this file.
1 // @(#)root/tmva/pymva $Id$
2 // Authors: Omar Zapata, Lorenzo Moneta, Sergei Gleyzer 2015
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : MethodPyAdaBoost *
8  * Web : http://oproject.org *
9  * *
10  * Description: *
11  * AdaBoost Classifier from Scikit learn *
12  * *
13  * *
14  * Redistribution and use in source and binary forms, with or without *
15  * modification, are permitted according to the terms listed in LICENSE *
16  * (http://tmva.sourceforge.net/LICENSE) *
17  * *
18  **********************************************************************************/
19 
20 #include <Python.h> // Needs to be included first to avoid redefinition of _POSIX_C_SOURCE
21 #include "TMVA/MethodPyAdaBoost.h"
22 
23 #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
24 #include <numpy/arrayobject.h>
25 
26 #include "TMVA/Config.h"
27 #include "TMVA/Configurable.h"
28 #include "TMVA/ClassifierFactory.h"
29 #include "TMVA/DataSet.h"
30 #include "TMVA/Event.h"
31 #include "TMVA/IMethod.h"
32 #include "TMVA/MsgLogger.h"
33 #include "TMVA/PDF.h"
34 #include "TMVA/Ranking.h"
35 #include "TMVA/Tools.h"
36 #include "TMVA/Types.h"
37 #include "TMVA/Timer.h"
39 #include "TMVA/Results.h"
40 
41 #include "TMatrix.h"
42 
43 using namespace TMVA;
44 
45 namespace TMVA {
46 namespace Internal {
47 class PyGILRAII {
48  PyGILState_STATE m_GILState;
49 
50 public:
51  PyGILRAII() : m_GILState(PyGILState_Ensure()) {}
52  ~PyGILRAII() { PyGILState_Release(m_GILState); }
53 };
54 } // namespace Internal
55 } // namespace TMVA
56 
57 REGISTER_METHOD(PyAdaBoost)
58 
60 
61 //_______________________________________________________________________
63  const TString &methodTitle,
64  DataSetInfo &dsi,
65  const TString &theOption) :
66  PyMethodBase(jobName, Types::kPyAdaBoost, methodTitle, dsi, theOption),
67  fBaseEstimator("None"),
68  fNestimators(50),
69  fLearningRate(1.0),
70  fAlgorithm("SAMME.R"),
71  fRandomState("None")
72 {
73 }
74 
75 //_______________________________________________________________________
77  const TString &theWeightFile) :
78  PyMethodBase(Types::kPyAdaBoost, theData, theWeightFile),
79  fBaseEstimator("None"),
80  fNestimators(50),
81  fLearningRate(1.0),
82  fAlgorithm("SAMME.R"),
83  fRandomState("None")
84 {
85 }
86 
87 //_______________________________________________________________________
89 {
90 }
91 
92 //_______________________________________________________________________
94 {
95  if (type == Types::kClassification && numberClasses == 2) return kTRUE;
96  if (type == Types::kMulticlass && numberClasses >= 2) return kTRUE;
97  return kFALSE;
98 }
99 
100 //_______________________________________________________________________
102 {
104 
105  DeclareOptionRef(fBaseEstimator, "BaseEstimator", "object, optional (default=DecisionTreeClassifier)\
106  The base estimator from which the boosted ensemble is built.\
107  Support for sample weighting is required, as well as proper `classes_`\
108  and `n_classes_` attributes.");
109 
110  DeclareOptionRef(fNestimators, "NEstimators", "integer, optional (default=50)\
111  The maximum number of estimators at which boosting is terminated.\
112  In case of perfect fit, the learning procedure is stopped early.");
113 
114  DeclareOptionRef(fLearningRate, "LearningRate", "float, optional (default=1.)\
115  Learning rate shrinks the contribution of each classifier by\
116  ``learning_rate``. There is a trade-off between ``learning_rate`` and\
117  ``n_estimators``.");
118 
119  DeclareOptionRef(fAlgorithm, "Algorithm", "{'SAMME', 'SAMME.R'}, optional (default='SAMME.R')\
120  If 'SAMME.R' then use the SAMME.R real boosting algorithm.\
121  ``base_estimator`` must support calculation of class probabilities.\
122  If 'SAMME' then use the SAMME discrete boosting algorithm.\
123  The SAMME.R algorithm typically converges faster than SAMME,\
124  achieving a lower test error with fewer boosting iterations.");
125 
126  DeclareOptionRef(fRandomState, "RandomState", "int, RandomState instance or None, optional (default=None)\
127  If int, random_state is the seed used by the random number generator;\
128  If RandomState instance, random_state is the random number generator;\
129  If None, the random number generator is the RandomState instance used\
130  by `np.random`.");
131 
132  DeclareOptionRef(fFilenameClassifier, "FilenameClassifier",
133  "Store trained classifier in this file");
134 }
135 
136 //_______________________________________________________________________
137 // Check options and load them to local python namespace
139 {
141  if (!pBaseEstimator) {
142  Log() << kFATAL << Form("BaseEstimator = %s ... that does not work!", fBaseEstimator.Data())
143  << " The options are Object or None." << Endl;
144  }
145  PyDict_SetItemString(fLocalNS, "baseEstimator", pBaseEstimator);
146 
147  if (fNestimators <= 0) {
148  Log() << kFATAL << "NEstimators <=0 ... that does not work!" << Endl;
149  }
151  PyDict_SetItemString(fLocalNS, "nEstimators", pNestimators);
152 
153  if (fLearningRate <= 0) {
154  Log() << kFATAL << "LearningRate <=0 ... that does not work!" << Endl;
155  }
157  PyDict_SetItemString(fLocalNS, "learningRate", pLearningRate);
158 
159  if (fAlgorithm != "SAMME" && fAlgorithm != "SAMME.R") {
160  Log() << kFATAL << Form("Algorithm = %s ... that does not work!", fAlgorithm.Data())
161  << " The options are SAMME of SAMME.R." << Endl;
162  }
163  pAlgorithm = Eval(Form("'%s'", fAlgorithm.Data()));
164  PyDict_SetItemString(fLocalNS, "algorithm", pAlgorithm);
165 
167  if (!pRandomState) {
168  Log() << kFATAL << Form(" RandomState = %s... that does not work !! ", fRandomState.Data())
169  << "If int, random_state is the seed used by the random number generator;"
170  << "If RandomState instance, random_state is the random number generator;"
171  << "If None, the random number generator is the RandomState instance used by `np.random`." << Endl;
172  }
173  PyDict_SetItemString(fLocalNS, "randomState", pRandomState);
174 
175  // If no filename is given, set default
177  fFilenameClassifier = GetWeightFileDir() + "/PyAdaBoostModel_" + GetName() + ".PyData";
178  }
179 }
180 
181 //_______________________________________________________________________
183 {
185  _import_array(); //require to use numpy arrays
186 
187  // Check options and load them to local python namespace
188  ProcessOptions();
189 
190  // Import module for ada boost classifier
191  PyRunString("import sklearn.ensemble");
192 
193  // Get data properties
194  fNvars = GetNVariables();
196 }
197 
198 //_______________________________________________________________________
200 {
201  // Load training data (data, classes, weights) to python arrays
202  int fNrowsTraining = Data()->GetNTrainingEvents(); //every row is an event, a class type and a weight
203  npy_intp dimsData[2];
204  dimsData[0] = fNrowsTraining;
205  dimsData[1] = fNvars;
206  PyArrayObject * fTrainData = (PyArrayObject *)PyArray_SimpleNew(2, dimsData, NPY_FLOAT);
207  PyDict_SetItemString(fLocalNS, "trainData", (PyObject*)fTrainData);
208  float *TrainData = (float *)(PyArray_DATA(fTrainData));
209 
210  npy_intp dimsClasses = (npy_intp) fNrowsTraining;
211  PyArrayObject * fTrainDataClasses = (PyArrayObject *)PyArray_SimpleNew(1, &dimsClasses, NPY_FLOAT);
212  PyDict_SetItemString(fLocalNS, "trainDataClasses", (PyObject*)fTrainDataClasses);
213  float *TrainDataClasses = (float *)(PyArray_DATA(fTrainDataClasses));
214 
215  PyArrayObject * fTrainDataWeights = (PyArrayObject *)PyArray_SimpleNew(1, &dimsClasses, NPY_FLOAT);
216  PyDict_SetItemString(fLocalNS, "trainDataWeights", (PyObject*)fTrainDataWeights);
217  float *TrainDataWeights = (float *)(PyArray_DATA(fTrainDataWeights));
218 
219  for (int i = 0; i < fNrowsTraining; i++) {
220  // Fill training data matrix
221  const TMVA::Event *e = Data()->GetTrainingEvent(i);
222  for (UInt_t j = 0; j < fNvars; j++) {
223  TrainData[j + i * fNvars] = e->GetValue(j);
224  }
225 
226  // Fill target classes
227  TrainDataClasses[i] = e->GetClass();
228 
229  // Get event weight
230  TrainDataWeights[i] = e->GetWeight();
231  }
232 
233  // Create classifier object
234  PyRunString("classifier = sklearn.ensemble.AdaBoostClassifier(base_estimator=baseEstimator, n_estimators=nEstimators, learning_rate=learningRate, algorithm=algorithm, random_state=randomState)",
235  "Failed to setup classifier");
236 
237  // Fit classifier
238  // NOTE: We dump the output to a variable so that the call does not pollute stdout
239  PyRunString("dump = classifier.fit(trainData, trainDataClasses, trainDataWeights)", "Failed to train classifier");
240 
241  // Store classifier
242  fClassifier = PyDict_GetItemString(fLocalNS, "classifier");
243  if(fClassifier == 0) {
244  Log() << kFATAL << "Can't create classifier object from AdaBoostClassifier" << Endl;
245  Log() << Endl;
246  }
247 
248  if (IsModelPersistence()) {
249  Log() << Endl;
250  Log() << gTools().Color("bold") << "Saving state file: " << gTools().Color("reset") << fFilenameClassifier << Endl;
251  Log() << Endl;
253  }
254 }
255 
256 //_______________________________________________________________________
258 {
260 }
261 
262 //_______________________________________________________________________
263 std::vector<Double_t> MethodPyAdaBoost::GetMvaValues(Long64_t firstEvt, Long64_t lastEvt, Bool_t logProgress)
264 {
265  // Load model if not already done
266  if (fClassifier == 0) ReadModelFromFile();
267 
268  // Determine number of events
269  Long64_t nEvents = Data()->GetNEvents();
270  if (firstEvt > lastEvt || lastEvt > nEvents) lastEvt = nEvents;
271  if (firstEvt < 0) firstEvt = 0;
272  nEvents = lastEvt-firstEvt;
273 
274  // use timer
275  Timer timer( nEvents, GetName(), kTRUE );
276 
277  if (logProgress)
278  Log() << kHEADER << Form("[%s] : ",DataInfo().GetName())
279  << "Evaluation of " << GetMethodName() << " on "
280  << (Data()->GetCurrentType() == Types::kTraining ? "training" : "testing")
281  << " sample (" << nEvents << " events)" << Endl;
282 
283  // Get data
284  npy_intp dims[2];
285  dims[0] = nEvents;
286  dims[1] = fNvars;
287  PyArrayObject *pEvent= (PyArrayObject *)PyArray_SimpleNew(2, dims, NPY_FLOAT);
288  float *pValue = (float *)(PyArray_DATA(pEvent));
289 
290  for (Int_t ievt=0; ievt<nEvents; ievt++) {
291  Data()->SetCurrentEvent(ievt);
292  const TMVA::Event *e = Data()->GetEvent();
293  for (UInt_t i = 0; i < fNvars; i++) {
294  pValue[ievt * fNvars + i] = e->GetValue(i);
295  }
296  }
297 
298  // Get prediction from classifier
299  PyArrayObject *result = (PyArrayObject *)PyObject_CallMethod(fClassifier, const_cast<char *>("predict_proba"), const_cast<char *>("(O)"), pEvent);
300  double *proba = (double *)(PyArray_DATA(result));
301 
302  // Return signal probabilities
303  if(Long64_t(mvaValues.size()) != nEvents) mvaValues.resize(nEvents);
304  for (int i = 0; i < nEvents; ++i) {
305  mvaValues[i] = proba[fNoutputs*i + TMVA::Types::kSignal];
306  }
307 
308  Py_DECREF(pEvent);
309  Py_DECREF(result);
310 
311  if (logProgress) {
312  Log() << kINFO
313  << "Elapsed time for evaluation of " << nEvents << " events: "
314  << timer.GetElapsedTime() << " " << Endl;
315  }
316 
317  return mvaValues;
318 }
319 
320 //_______________________________________________________________________
322 {
323  // cannot determine error
324  NoErrorCalc(errLower, errUpper);
325 
326  // Load model if not already done
327  if (fClassifier == 0) ReadModelFromFile();
328 
329  // Get current event and load to python array
330  const TMVA::Event *e = Data()->GetEvent();
331  npy_intp dims[2];
332  dims[0] = 1;
333  dims[1] = fNvars;
334  PyArrayObject *pEvent= (PyArrayObject *)PyArray_SimpleNew(2, dims, NPY_FLOAT);
335  float *pValue = (float *)(PyArray_DATA(pEvent));
336  for (UInt_t i = 0; i < fNvars; i++) pValue[i] = e->GetValue(i);
337 
338  // Get prediction from classifier
339  PyArrayObject *result = (PyArrayObject *)PyObject_CallMethod(fClassifier, const_cast<char *>("predict_proba"), const_cast<char *>("(O)"), pEvent);
340  double *proba = (double *)(PyArray_DATA(result));
341 
342  // Return MVA value
343  Double_t mvaValue;
344  mvaValue = proba[TMVA::Types::kSignal]; // getting signal probability
345 
346  Py_DECREF(result);
347  Py_DECREF(pEvent);
348 
349  return mvaValue;
350 }
351 
352 //_______________________________________________________________________
354 {
355  // Load model if not already done
356  if (fClassifier == 0) ReadModelFromFile();
357 
358  // Get current event and load to python array
359  const TMVA::Event *e = Data()->GetEvent();
360  npy_intp dims[2];
361  dims[0] = 1;
362  dims[1] = fNvars;
363  PyArrayObject *pEvent= (PyArrayObject *)PyArray_SimpleNew(2, dims, NPY_FLOAT);
364  float *pValue = (float *)(PyArray_DATA(pEvent));
365  for (UInt_t i = 0; i < fNvars; i++) pValue[i] = e->GetValue(i);
366 
367  // Get prediction from classifier
368  PyArrayObject *result = (PyArrayObject *)PyObject_CallMethod(fClassifier, const_cast<char *>("predict_proba"), const_cast<char *>("(O)"), pEvent);
369  double *proba = (double *)(PyArray_DATA(result));
370 
371  // Return MVA values
372  if(UInt_t(classValues.size()) != fNoutputs) classValues.resize(fNoutputs);
373  for(UInt_t i = 0; i < fNoutputs; i++) classValues[i] = proba[i];
374 
375  return classValues;
376 }
377 
378 //_______________________________________________________________________
380 {
381  if (!PyIsInitialized()) {
382  PyInitialize();
383  }
384 
385  Log() << Endl;
386  Log() << gTools().Color("bold") << "Loading state file: " << gTools().Color("reset") << fFilenameClassifier << Endl;
387  Log() << Endl;
388 
389  // Load classifier from file
391  if(err != 0)
392  {
393  Log() << kFATAL << Form("Failed to load classifier from file (error code: %i): %s", err, fFilenameClassifier.Data()) << Endl;
394  }
395 
396  // Book classifier object in python dict
397  PyDict_SetItemString(fLocalNS, "classifier", fClassifier);
398 
399  // Load data properties
400  // NOTE: This has to be repeated here for the reader application
401  fNvars = GetNVariables();
403 }
404 
405 //_______________________________________________________________________
407 {
408  // Get feature importance from classifier as an array with length equal
409  // number of variables, higher value signals a higher importance
410  PyArrayObject* pRanking = (PyArrayObject*) PyObject_GetAttrString(fClassifier, "feature_importances_");
411  // The python object is null if the base estimator does not support
412  // variable ranking. Then, return NULL, which disables ranking.
413  if(pRanking == 0) return NULL;
414 
415  // Fill ranking object and return it
416  fRanking = new Ranking(GetName(), "Variable Importance");
417  Double_t* rankingData = (Double_t*) PyArray_DATA(pRanking);
418  for(UInt_t iVar=0; iVar<fNvars; iVar++){
419  fRanking->AddRank(Rank(GetInputLabel(iVar), rankingData[iVar]));
420  }
421 
422  Py_DECREF(pRanking);
423 
424  return fRanking;
425 }
426 
427 //_______________________________________________________________________
429 {
430  // typical length of text line:
431  // "|--------------------------------------------------------------|"
432  Log() << "An AdaBoost classifier is a meta-estimator that begins by fitting" << Endl;
433  Log() << "a classifier on the original dataset and then fits additional copies" << Endl;
434  Log() << "of the classifier on the same dataset but where the weights of incorrectly" << Endl;
435  Log() << "classified instances are adjusted such that subsequent classifiers focus" << Endl;
436  Log() << "more on difficult cases." << Endl;
437  Log() << Endl;
438  Log() << "Check out the scikit-learn documentation for more information." << Endl;
439 }
TMVA::MethodBase::TestClassification
virtual void TestClassification()
initialization
Definition: MethodBase.cxx:1125
TMVA::Ranking::AddRank
virtual void AddRank(const Rank &rank)
Add a new rank take ownership of it.
Definition: Ranking.cxx:86
TMVA::PyMethodBase::UnSerialize
static Int_t UnSerialize(TString file, PyObject **obj)
Unserialize Python object.
Definition: PyMethodBase.cxx:286
TMVA::MethodPyAdaBoost::GetMulticlassValues
std::vector< Float_t > & GetMulticlassValues()
Definition: MethodPyAdaBoost.cxx:353
TMVA::MethodPyAdaBoost::fRandomState
TString fRandomState
Definition: MethodPyAdaBoost.h:113
TMVA::MethodPyAdaBoost::MethodPyAdaBoost
MethodPyAdaBoost(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
Definition: MethodPyAdaBoost.cxx:62
TMVA::MethodPyAdaBoost::fNestimators
Int_t fNestimators
Definition: MethodPyAdaBoost.h:96
kTRUE
const Bool_t kTRUE
Definition: RtypesCore.h:100
TMVA::Configurable::Log
MsgLogger & Log() const
Definition: Configurable.h:122
TMVA::DataSet::GetCurrentType
Types::ETreeType GetCurrentType() const
Definition: DataSet.h:194
TMVA::Types::kMulticlass
@ kMulticlass
Definition: Types.h:131
e
#define e(i)
Definition: RSha256.hxx:103
TMVA::MethodBase::Data
DataSet * Data() const
Definition: MethodBase.h:409
TMVA::PyMethodBase::Eval
PyObject * Eval(TString code)
Evaluate Python code.
Definition: PyMethodBase.cxx:108
TMVA::PyMethodBase
Definition: PyMethodBase.h:56
PyObject
_object PyObject
Definition: PyMethodBase.h:42
TMVA::MethodPyAdaBoost::TestClassification
virtual void TestClassification()
initialization
Definition: MethodPyAdaBoost.cxx:257
TString::Data
const char * Data() const
Definition: TString.h:369
TMVA::MethodBase::IsModelPersistence
Bool_t IsModelPersistence() const
Definition: MethodBase.h:383
ClassImp
#define ClassImp(name)
Definition: Rtypes.h:364
Form
char * Form(const char *fmt,...)
TMVA::Ranking
Ranking for variables in method (implementation)
Definition: Ranking.h:48
TMVA::MethodPyAdaBoost::GetHelpMessage
void GetHelpMessage() const
Definition: MethodPyAdaBoost.cxx:428
TMVA::MethodPyAdaBoost::pAlgorithm
PyObject * pAlgorithm
Definition: MethodPyAdaBoost.h:104
IMethod.h
Long64_t
long long Long64_t
Definition: RtypesCore.h:80
TMVA::MethodPyAdaBoost
Definition: MethodPyAdaBoost.h:35
TMVA::MethodBase::DeclareCompatibilityOptions
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
Definition: MethodBase.cxx:596
TMVA::MethodPyAdaBoost::GetMvaValue
Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)
Definition: MethodPyAdaBoost.cxx:321
Ranking.h
TMVA::MethodPyAdaBoost::Train
void Train()
Definition: MethodPyAdaBoost.cxx:199
TMVA::MethodPyAdaBoost::ReadModelFromFile
virtual void ReadModelFromFile()
Definition: MethodPyAdaBoost.cxx:379
TMVA::Rank
Definition: Ranking.h:76
VariableTransformBase.h
TString
Basic string class.
Definition: TString.h:136
TMVA::MethodPyAdaBoost::~MethodPyAdaBoost
~MethodPyAdaBoost()
Definition: MethodPyAdaBoost.cxx:88
TMVA::MethodPyAdaBoost::pLearningRate
PyObject * pLearningRate
Definition: MethodPyAdaBoost.h:99
TMVA::Types::kSignal
@ kSignal
Definition: Types.h:137
TMVA::MethodPyAdaBoost::ProcessOptions
void ProcessOptions()
Definition: MethodPyAdaBoost.cxx:138
REGISTER_METHOD
#define REGISTER_METHOD(CLASS)
for example
Definition: ClassifierFactory.h:124
bool
TMatrix.h
TMVA::MethodBase::GetNVariables
UInt_t GetNVariables() const
Definition: MethodBase.h:345
PDF.h
TMVA::MethodBase::DataInfo
DataSetInfo & DataInfo() const
Definition: MethodBase.h:410
TMVA::DataSetInfo::GetNClasses
UInt_t GetNClasses() const
Definition: DataSetInfo.h:155
MethodPyAdaBoost.h
TMVA::PyMethodBase::fLocalNS
PyObject * fLocalNS
Definition: PyMethodBase.h:131
TMVA::MethodPyAdaBoost::pBaseEstimator
PyObject * pBaseEstimator
Definition: MethodPyAdaBoost.h:89
TMVA::DataSetInfo
Class that contains all the data information.
Definition: DataSetInfo.h:62
TMVA::Timer::GetElapsedTime
TString GetElapsedTime(Bool_t Scientific=kTRUE)
returns pretty string with elapsed time
Definition: Timer.cxx:146
TMVA::Internal::PyGILRAII::PyGILRAII
PyGILRAII()
Definition: MethodPyAdaBoost.cxx:51
TMVA::MethodBase::GetMethodName
const TString & GetMethodName() const
Definition: MethodBase.h:331
MsgLogger.h
Timer.h
TMVA::PyMethodBase::fClassifier
PyObject * fClassifier
Definition: PyMethodBase.h:112
TMVA::Internal::PyGILRAII::~PyGILRAII
~PyGILRAII()
Definition: MethodPyAdaBoost.cxx:52
TMVA::MethodPyAdaBoost::fNoutputs
UInt_t fNoutputs
Definition: MethodPyAdaBoost.h:84
TMVA::Types::EAnalysisType
EAnalysisType
Definition: Types.h:128
TMVA::MethodPyAdaBoost::fNvars
UInt_t fNvars
Definition: MethodPyAdaBoost.h:83
TMVA::DataSet::GetEvent
const Event * GetEvent() const
Definition: DataSet.cxx:202
TMVA::DataSet::GetNEvents
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Definition: DataSet.h:206
TMVA::MethodPyAdaBoost::pRandomState
PyObject * pRandomState
Definition: MethodPyAdaBoost.h:112
kFALSE
const Bool_t kFALSE
Definition: RtypesCore.h:101
TMVA::Internal::PyGILRAII
Definition: MethodPyAdaBoost.cxx:47
TMVA::Types::kClassification
@ kClassification
Definition: Types.h:129
TMVA::MethodBase::fRanking
Ranking * fRanking
Definition: MethodBase.h:587
TMVA::MethodBase::NoErrorCalc
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
Definition: MethodBase.cxx:836
TMVA::MethodPyAdaBoost::fBaseEstimator
TString fBaseEstimator
Definition: MethodPyAdaBoost.h:90
Event.h
TMVA::MethodPyAdaBoost::GetMvaValues
std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
Definition: MethodPyAdaBoost.cxx:263
TMVA::MethodPyAdaBoost::classValues
std::vector< Float_t > classValues
Definition: MethodPyAdaBoost.h:81
TMVA::MethodBase::GetWeightFileDir
const TString & GetWeightFileDir() const
Definition: MethodBase.h:492
TMVA::MethodPyAdaBoost::DeclareOptions
void DeclareOptions()
Definition: MethodPyAdaBoost.cxx:101
UInt_t
unsigned int UInt_t
Definition: RtypesCore.h:46
TMVA::Types
Singleton class for Global types used by TMVA.
Definition: Types.h:73
Types.h
TMVA::PyMethodBase::PyRunString
void PyRunString(TString code, TString errorMessage="Failed to run python code", int start=Py_single_input)
Execute Python code from string.
Definition: PyMethodBase.cxx:317
Configurable.h
TMVA::Endl
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:158
Config.h
unsigned int
TMVA::Timer
Timing information for training and evaluation of MVA methods.
Definition: Timer.h:58
TMVA::Tools::Color
const TString & Color(const TString &)
human readable color strings
Definition: Tools.cxx:840
TMVA::Types::kTraining
@ kTraining
Definition: Types.h:145
TMVA::MethodBase::GetInputLabel
const TString & GetInputLabel(Int_t i) const
Definition: MethodBase.h:350
TMVA::MethodPyAdaBoost::HasAnalysisType
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
Definition: MethodPyAdaBoost.cxx:93
TMVA::MethodPyAdaBoost::fLearningRate
Double_t fLearningRate
Definition: MethodPyAdaBoost.h:100
TMVA::DataSet::GetTrainingEvent
const Event * GetTrainingEvent(Long64_t ievt) const
Definition: DataSet.h:74
TMVA::DataSet::SetCurrentEvent
void SetCurrentEvent(Long64_t ievt) const
Definition: DataSet.h:88
TString::IsNull
Bool_t IsNull() const
Definition: TString.h:407
Double_t
double Double_t
Definition: RtypesCore.h:59
TMVA::kFATAL
@ kFATAL
Definition: Types.h:63
TMVA::MethodPyAdaBoost::fFilenameClassifier
TString fFilenameClassifier
Definition: MethodPyAdaBoost.h:85
TMVA::MethodBase::GetName
const char * GetName() const
Definition: MethodBase.h:334
TMVA::Event
Definition: Event.h:51
TMVA::kHEADER
@ kHEADER
Definition: Types.h:65
TMVA::PyMethodBase::Serialize
static void Serialize(TString file, PyObject *classifier)
Serialize Python object.
Definition: PyMethodBase.cxx:264
TMVA::kINFO
@ kINFO
Definition: Types.h:60
TMVA::PyMethodBase::PyIsInitialized
static int PyIsInitialized()
Check Python interpreter initialization status.
Definition: PyMethodBase.cxx:245
TMVA::MethodPyAdaBoost::CreateRanking
const Ranking * CreateRanking()
Definition: MethodPyAdaBoost.cxx:406
Tools.h
TMVA::DataSet::GetNTrainingEvents
Long64_t GetNTrainingEvents() const
Definition: DataSet.h:68
ClassifierFactory.h
type
int type
Definition: TGX11.cxx:121
TMVA::MethodPyAdaBoost::pNestimators
PyObject * pNestimators
Definition: MethodPyAdaBoost.h:95
Results.h
TMVA::gTools
Tools & gTools()
TMVA::Configurable::DeclareOptionRef
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
TMVA::MethodPyAdaBoost::fAlgorithm
TString fAlgorithm
Definition: MethodPyAdaBoost.h:105
DataSet.h
TMVA::MethodPyAdaBoost::mvaValues
std::vector< Double_t > mvaValues
Definition: MethodPyAdaBoost.h:80
TMVA::MethodPyAdaBoost::Init
void Init()
Definition: MethodPyAdaBoost.cxx:182
TMVA::Internal::PyGILRAII::m_GILState
PyGILState_STATE m_GILState
Definition: MethodPyAdaBoost.cxx:48
TMVA::PyMethodBase::PyInitialize
static void PyInitialize()
Initialize Python interpreter.
Definition: PyMethodBase.cxx:125
TMVA
create variable transformations
Definition: GeneticMinimizer.h:22
int