22#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
23#include <numpy/arrayobject.h>
54 PyGILState_STATE m_GILState;
57 PyGILRAII() : m_GILState(PyGILState_Ensure()) {}
58 ~PyGILRAII() { PyGILState_Release(m_GILState); }
68MethodPyRandomForest::MethodPyRandomForest(
const TString &jobName,
78 fMinWeightFractionLeaf(0),
79 fMaxFeatures(
"'auto'"),
80 fMaxLeafNodes(
"None"),
99 fMinWeightFractionLeaf(0),
100 fMaxFeatures(
"'auto'"),
101 fMaxLeafNodes(
"None"),
105 fRandomState(
"None"),
133 The function to measure the quality of a split. Supported criteria are \
134 'gini' for the Gini impurity and 'entropy' for the information gain. \
135 Note: this parameter is tree-specific.");
138 The maximum depth of the tree. If None, then nodes are expanded until \
139 all leaves are pure or until all leaves contain less than \
140 min_samples_split samples. \
141 Ignored if ``max_leaf_nodes`` is not None.");
144 The minimum number of samples required to split an internal node.");
147 The minimum number of samples in newly created leaves. A split is \
148 discarded if after the split, one of the leaves would contain less then \
149 ``min_samples_leaf`` samples.");
151 The minimum weighted fraction of the input samples required to be at a \
156 Grow trees with ``max_leaf_nodes`` in best-first fashion.\
157 Best nodes are defined as relative reduction in impurity.\
158 If None then unlimited number of leaf nodes.\
159 If not None then ``max_depth`` will be ignored.");
162 Whether bootstrap samples are used when building trees.");
165 the generalization error.");
168 The number of jobs to run in parallel for both `fit` and `predict`. \
169 If -1, then the number of jobs is set to the number of cores.");
172 If int, random_state is the seed used by the random number generator;\
173 If RandomState instance, random_state is the random number generator;\
174 If None, the random number generator is the RandomState instance used\
178 Controls the verbosity of the tree building process.");
181 When set to ``True``, reuse the solution of the previous call to fit\
182 and add more estimators to the ensemble, otherwise, just fit a whole\
186 Weights associated with classes in the form ``{class_label: weight}``.\
187 If not given, all classes are supposed to have weight one. For\
188 multi-output problems, a list of dicts can be provided in the same\
189 order as the columns of y.\
190 The \"auto\" mode uses the values of y to automatically adjust\
191 weights inversely proportional to class frequencies in the input data.\
192 The \"subsample\" mode is the same as \"auto\" except that weights are\
193 computed based on the bootstrap sample for every tree grown.\
194 For multi-output, the weights of each column of y will be multiplied.\
195 Note that these weights will be multiplied with sample_weight (passed\
196 through the fit method) if sample_weight is specified.");
199 "Store trained classifier in this file");
207 Log() << kFATAL <<
" NEstimators <=0... that does not work !! " <<
Endl;
214 <<
" The options are `gini` or `entropy`." <<
Endl;
223 <<
" The options are None or integer." <<
Endl;
227 Log() << kFATAL <<
" MinSamplesSplit < 0... that does not work !! " <<
Endl;
233 Log() << kFATAL <<
" MinSamplesLeaf < 0... that does not work !! " <<
Endl;
239 Log() << kERROR <<
" MinWeightFractionLeaf < 0... that does not work !! " <<
Endl;
252 <<
"int, float, string or None, optional (default='auto')"
253 <<
"The number of features to consider when looking for the best split:"
254 <<
"If int, then consider `max_features` features at each split."
255 <<
"If float, then `max_features` is a percentage and"
256 <<
"`int(max_features * n_features)` features are considered at each split."
257 <<
"If 'auto', then `max_features=sqrt(n_features)`."
258 <<
"If 'sqrt', then `max_features=sqrt(n_features)`."
259 <<
"If 'log2', then `max_features=log2(n_features)`."
260 <<
"If None, then `max_features=n_features`." <<
Endl;
266 <<
" The options are None or integer." <<
Endl;
273 <<
"If int, random_state is the seed used by the random number generator;"
274 <<
"If RandomState instance, random_state is the random number generator;"
275 <<
"If None, the random number generator is the RandomState instance used by `np.random`." <<
Endl;
282 <<
"dict, list of dicts, 'auto', 'subsample' or None, optional" <<
Endl;
287 Log() << kFATAL <<
Form(
" NJobs = %i... that does not work !! ",
fNjobs)
288 <<
"Value has to be greater than zero." <<
Endl;
312 TMVA::Internal::PyGILRAII raii;
331 npy_intp dimsData[2];
332 dimsData[0] = fNrowsTraining;
334 fTrainData = (PyArrayObject *)PyArray_SimpleNew(2, dimsData, NPY_FLOAT);
336 float *TrainData = (
float *)(PyArray_DATA(
fTrainData));
338 npy_intp dimsClasses = (npy_intp) fNrowsTraining;
339 fTrainDataClasses = (PyArrayObject *)PyArray_SimpleNew(1, &dimsClasses, NPY_FLOAT);
343 fTrainDataWeights = (PyArrayObject *)PyArray_SimpleNew(1, &dimsClasses, NPY_FLOAT);
347 for (
int i = 0; i < fNrowsTraining; i++) {
351 TrainData[j + i *
fNvars] =
e->GetValue(j);
355 TrainDataClasses[i] =
e->GetClass();
358 TrainDataWeights[i] =
e->GetWeight();
362 PyRunString(
"classifier = sklearn.ensemble.RandomForestClassifier(bootstrap=bootstrap, class_weight=classWeight, criterion=criterion, max_depth=maxDepth, max_features=maxFeatures, max_leaf_nodes=maxLeafNodes, min_samples_leaf=minSamplesLeaf, min_samples_split=minSamplesSplit, min_weight_fraction_leaf=minWeightFractionLeaf, n_estimators=nEstimators, n_jobs=nJobs, oob_score=oobScore, random_state=randomState, verbose=verbose, warm_start=warmStart)",
363 "Failed to setup classifier");
367 PyRunString(
"dump = classifier.fit(trainData, trainDataClasses, trainDataWeights)",
"Failed to train classifier");
372 Log() << kFATAL <<
"Can't create classifier object from RandomForestClassifier" <<
Endl;
398 if (firstEvt > lastEvt || lastEvt > nEvents) lastEvt = nEvents;
399 if (firstEvt < 0) firstEvt = 0;
400 nEvents = lastEvt-firstEvt;
409 <<
" sample (" << nEvents <<
" events)" <<
Endl;
415 PyArrayObject *pEvent= (PyArrayObject *)PyArray_SimpleNew(2, dims, NPY_FLOAT);
416 float *pValue = (
float *)(PyArray_DATA(pEvent));
418 for (
Int_t ievt=0; ievt<nEvents; ievt++) {
422 pValue[ievt *
fNvars + i] =
e->GetValue(i);
427 PyArrayObject *result = (PyArrayObject *)PyObject_CallMethod(
fClassifier,
const_cast<char *
>(
"predict_proba"),
const_cast<char *
>(
"(O)"), pEvent);
428 double *proba = (
double *)(PyArray_DATA(result));
432 for (
int i = 0; i < nEvents; ++i) {
441 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
462 PyArrayObject *pEvent= (PyArrayObject *)PyArray_SimpleNew(2, dims, NPY_FLOAT);
463 float *pValue = (
float *)(PyArray_DATA(pEvent));
464 for (
UInt_t i = 0; i <
fNvars; i++) pValue[i] =
e->GetValue(i);
467 PyArrayObject *result = (PyArrayObject *)PyObject_CallMethod(
fClassifier,
const_cast<char *
>(
"predict_proba"),
const_cast<char *
>(
"(O)"), pEvent);
468 double *proba = (
double *)(PyArray_DATA(result));
491 PyArrayObject *pEvent= (PyArrayObject *)PyArray_SimpleNew(2, dims, NPY_FLOAT);
492 float *pValue = (
float *)(PyArray_DATA(pEvent));
493 for (
UInt_t i = 0; i <
fNvars; i++) pValue[i] =
e->GetValue(i);
496 PyArrayObject *result = (PyArrayObject *)PyObject_CallMethod(
fClassifier,
const_cast<char *
>(
"predict_proba"),
const_cast<char *
>(
"(O)"), pEvent);
497 double *proba = (
double *)(PyArray_DATA(result));
541 PyArrayObject* pRanking = (PyArrayObject*) PyObject_GetAttrString(
fClassifier,
"feature_importances_");
542 if(pRanking == 0)
Log() << kFATAL <<
"Failed to get ranking from classifier" <<
Endl;
561 Log() <<
"A random forest is a meta estimator that fits a number of decision" <<
Endl;
562 Log() <<
"tree classifiers on various sub-samples of the dataset and use" <<
Endl;
563 Log() <<
"averaging to improve the predictive accuracy and control over-fitting." <<
Endl;
565 Log() <<
"Check out the scikit-learn documentation for more information." <<
Endl;
#define REGISTER_METHOD(CLASS)
for example
char * Form(const char *fmt,...)
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
Class that contains all the data information.
UInt_t GetNClasses() const
const Event * GetEvent() const
Types::ETreeType GetCurrentType() const
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Long64_t GetNTrainingEvents() const
void SetCurrentEvent(Long64_t ievt) const
const Event * GetTrainingEvent(Long64_t ievt) const
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
const char * GetName() const
const TString & GetWeightFileDir() const
const TString & GetMethodName() const
DataSetInfo & DataInfo() const
virtual void TestClassification()
initialization
UInt_t GetNVariables() const
Bool_t IsModelPersistence()
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
const TString & GetInputLabel(Int_t i) const
PyObject * pMinWeightFractionLeaf
MethodPyRandomForest(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
std::vector< Float_t > & GetMulticlassValues()
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
std::vector< Double_t > GetMvaValues(Long64_t firstEvt=0, Long64_t lastEvt=-1, Bool_t logProgress=false)
get all the MVA values for the events of the current Data type
~MethodPyRandomForest(void)
std::vector< Float_t > classValues
PyObject * pMinSamplesLeaf
TString fFilenameClassifier
void GetHelpMessage() const
std::vector< Double_t > mvaValues
virtual void TestClassification()
initialization
const Ranking * CreateRanking()
Double_t fMinWeightFractionLeaf
PyObject * pMinSamplesSplit
Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)
static int PyIsInitialized()
Check Python interpreter initialization status.
PyArrayObject * fTrainData
PyObject * Eval(TString code)
Evaluate Python code.
static void PyInitialize()
Initialize Python interpreter.
static void Serialize(TString file, PyObject *classifier)
Serialize Python object.
PyArrayObject * fTrainDataWeights
static Int_t UnSerialize(TString file, PyObject **obj)
Unserialize Python object.
PyArrayObject * fTrainDataClasses
void PyRunString(TString code, TString errorMessage="Failed to run python code", int start=Py_single_input)
Execute Python code from string.
Ranking for variables in method (implementation)
virtual void AddRank(const Rank &rank)
Add a new rank take ownership of it.
Timing information for training and evaluation of MVA methods.
TString GetElapsedTime(Bool_t Scientific=kTRUE)
returns pretty string with elapsed time
Singleton class for Global types used by TMVA.
const char * Data() const
create variable transformations
MsgLogger & Endl(MsgLogger &ml)