Logo ROOT   6.10/09
Reference Guide
MethodPyGTB.h
Go to the documentation of this file.
1 // @(#)root/tmva/pymva $Id$
2 // Authors: Omar Zapata, Lorenzo Moneta, Sergei Gleyzer 2015
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : MethodPyGTB *
8  * Web : http://oproject.org *
9  * *
10  * Description: *
11  * scikit-learn Package GradientBoostingClassifier method based on python *
12  * *
13  **********************************************************************************/
14 
15 #ifndef ROOT_TMVA_MethodPyGTB
16 #define ROOT_TMVA_MethodPyGTB
17 
18 //////////////////////////////////////////////////////////////////////////
19 // //
20 // MethodPyGTB //
21 // //
22 //////////////////////////////////////////////////////////////////////////
23 
24 #include "TMVA/PyMethodBase.h"
25 
26 namespace TMVA {
27 
28  class Factory;
29  class Reader;
30  class DataSetManager;
31  class Types;
32  class MethodPyGTB : public PyMethodBase {
33 
34  public :
35  MethodPyGTB(const TString &jobName,
36  const TString &methodTitle,
37  DataSetInfo &theData,
38  const TString &theOption = "");
40  const TString &theWeightFile);
41  ~MethodPyGTB(void);
42 
43  void Train();
44  void Init();
45  void DeclareOptions();
46  void ProcessOptions();
47 
48  const Ranking *CreateRanking();
49 
50  Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets);
51 
52  virtual void TestClassification();
53 
54  Double_t GetMvaValue(Double_t *errLower = 0, Double_t *errUpper = 0);
55  std::vector<Double_t> GetMvaValues(Long64_t firstEvt = 0, Long64_t lastEvt = -1, Bool_t logProgress = false);
56  std::vector<Float_t>& GetMulticlassValues();
57 
58  virtual void ReadModelFromFile();
59 
61  // the actual "weights"
62  virtual void AddWeightsXMLTo(void * /* parent */ ) const {} // = 0;
63  virtual void ReadWeightsFromXML(void * /*wghtnode*/) {} // = 0;
64  virtual void ReadWeightsFromStream(std::istream &) {} //= 0; backward compatibility
65 
66  private :
68  friend class Factory;
69  friend class Reader;
70 
71  protected:
72  std::vector<Double_t> mvaValues;
73  std::vector<Float_t> classValues;
74 
75  UInt_t fNvars; // number of variables
76  UInt_t fNoutputs; // number of outputs
77  TString fFilenameClassifier; // Path to serialized classifier (default in `weights` folder)
78 
79  //GTB options
80 
82  TString fLoss; // {'deviance', 'exponential'}, optional (default='deviance')
83  //loss function to be optimized. 'deviance' refers to
84  //deviance (= logistic regression) for classification
85  //with probabilistic outputs. For loss 'exponential' gradient
86  //boosting recovers the AdaBoost algorithm.
87 
89  Double_t fLearningRate; //float, optional (default=0.1)
90  //learning rate shrinks the contribution of each tree by `learning_rate`.
91  //There is a trade-off between learning_rate and n_estimators.
92 
94  Int_t fNestimators; //integer, optional (default=10)
95  //The number of trees in the forest.
96 
98  Double_t fSubsample; //float, optional (default=1.0)
99  //The fraction of samples to be used for fitting the individual base
100  //learners. If smaller than 1.0 this results in Stochastic Gradient
101  //Boosting. `subsample` interacts with the parameter `n_estimators`.
102  //Choosing `subsample < 1.0` leads to a reduction of variance
103  //and an increase in bias.
104 
106  Int_t fMinSamplesSplit; // integer, optional (default=2)
107  //The minimum number of samples required to split an internal node.
108 
110  Int_t fMinSamplesLeaf; //integer, optional (default=1)
111  //The minimum number of samples required to be at a leaf node.
112 
114  Double_t fMinWeightFractionLeaf; //float, optional (default=0.)
115  //The minimum weighted fraction of the input samples required to be at a leaf node.
116 
118  Int_t fMaxDepth; //integer, optional (default=3)
119  //maximum depth of the individual regression estimators. The maximum
120  //depth limits the number of nodes in the tree. Tune this parameter
121  //for best performance; the best value depends on the interaction
122  //of the input variables.
123  //Ignored if ``max_leaf_nodes`` is not None.
124 
126  TString fInit; //BaseEstimator, None, optional (default=None)
127  //An estimator object that is used to compute the initial
128  //predictions. ``init`` has to provide ``fit`` and ``predict``.
129  //If None it uses ``loss.init_estimator``.
130 
132  TString fRandomState; //int, RandomState instance or None, optional (default=None)
133  //If int, random_state is the seed used by the random number generator;
134  //If RandomState instance, random_state is the random number generator;
135  //If None, the random number generator is the RandomState instance used
136  //by `np.random`.
137 
139  TString fMaxFeatures; //int, float, string or None, optional (default="auto")
140  //The number of features to consider when looking for the best split:
141  //- If int, then consider `max_features` features at each split.
142  //- If float, then `max_features` is a percentage and
143  //`int(max_features * n_features)` features are considered at each split.
144  //- If "auto", then `max_features=sqrt(n_features)`.
145  //- If "sqrt", then `max_features=sqrt(n_features)`.
146  //- If "log2", then `max_features=log2(n_features)`.
147  //- If None, then `max_features=n_features`.
148  // Note: the search for a split does not stop until at least one
149  // valid partition of the node samples is found, even if it requires to
150  // effectively inspect more than ``max_features`` features.
151  // Note: this parameter is tree-specific.
152 
154  Int_t fVerbose; //Controls the verbosity of the tree building process.
155 
157  TString fMaxLeafNodes; //int or None, optional (default=None)
158  //Grow trees with ``max_leaf_nodes`` in best-first fashion.
159  //Best nodes are defined as relative reduction in impurity.
160  //If None then unlimited number of leaf nodes.
161  //If not None then ``max_depth`` will be ignored.
162 
164  Bool_t fWarmStart; //bool, optional (default=False)
165  //When set to ``True``, reuse the solution of the previous call to fit
166  //and add more estimators to the ensemble, otherwise, just fit a whole
167  //new forest.
168 
169  // get help message text
170  void GetHelpMessage() const;
171 
173  };
174 
175 } // namespace TMVA
176 
177 #endif // ROOT_TMVA_PyMethodGTB
Double_t fSubsample
Definition: MethodPyGTB.h:98
PyObject * pNestimators
Definition: MethodPyGTB.h:93
long long Long64_t
Definition: RtypesCore.h:69
PyObject * pMaxFeatures
Definition: MethodPyGTB.h:138
virtual void ReadWeightsFromStream(std::istream &)
Definition: MethodPyGTB.h:64
Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)
EAnalysisType
Definition: Types.h:125
PyObject * pMinSamplesSplit
Definition: MethodPyGTB.h:105
Basic string class.
Definition: TString.h:129
PyObject * pWarmStart
Definition: MethodPyGTB.h:163
Ranking for variables in method (implementation)
Definition: Ranking.h:48
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
PyObject * pRandomState
Definition: MethodPyGTB.h:131
PyObject * pInit
Definition: MethodPyGTB.h:125
#define ClassDef(name, id)
Definition: Rtypes.h:297
void GetHelpMessage() const
PyObject * pVerbose
Definition: MethodPyGTB.h:153
TString fFilenameClassifier
Definition: MethodPyGTB.h:77
PyObject * pMaxDepth
Definition: MethodPyGTB.h:117
Class that contains all the data information.
Definition: DataSetInfo.h:60
PyObject * pSubsample
Definition: MethodPyGTB.h:97
std::vector< Double_t > mvaValues
Definition: MethodPyGTB.h:72
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
const Ranking * CreateRanking()
unsigned int UInt_t
Definition: RtypesCore.h:42
PyObject * pLoss
Definition: MethodPyGTB.h:81
This is the main MVA steering class.
Definition: Factory.h:81
Double_t fLearningRate
Definition: MethodPyGTB.h:89
double Double_t
Definition: RtypesCore.h:55
Class that contains all the data information.
DataSetManager * fDataSetManager
Definition: MethodPyGTB.h:67
int type
Definition: TGX11.cxx:120
virtual void ReadModelFromFile()
PyObject * pMinWeightFractionLeaf
Definition: MethodPyGTB.h:113
std::vector< Float_t > & GetMulticlassValues()
virtual void AddWeightsXMLTo(void *) const
Definition: MethodPyGTB.h:62
The Reader class serves to use the MVAs in a specific analysis context.
Definition: Reader.h:63
virtual void ReadWeightsFromXML(void *)
Definition: MethodPyGTB.h:63
Abstract ClassifierFactory template that handles arbitrary types.
Double_t fMinWeightFractionLeaf
Definition: MethodPyGTB.h:114
TString fMaxFeatures
Definition: MethodPyGTB.h:139
PyObject * pLearningRate
Definition: MethodPyGTB.h:88
PyObject * pMinSamplesLeaf
Definition: MethodPyGTB.h:109
virtual void ReadWeightsFromStream(std::istream &)=0
std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
TString fRandomState
Definition: MethodPyGTB.h:132
MethodPyGTB(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
Definition: MethodPyGTB.cxx:58
PyObject * pMaxLeafNodes
Definition: MethodPyGTB.h:156
virtual void TestClassification()
initialization
_object PyObject
Definition: TPyArg.h:20
TString fMaxLeafNodes
Definition: MethodPyGTB.h:157
std::vector< Float_t > classValues
Definition: MethodPyGTB.h:73