140 , fNodePurityLimit(0)
146 , fRandomisedTrees(
kFALSE)
148 , fUsePoissonNvars(0)
149 , fDeltaPruneStrength(0)
158 const TString& theWeightFile) :
218 DeclareOptionRef(
fUsePoissonNvars,
"UsePoissonNvars",
"Interpret \"UseNvars\" not as fixed number but as mean of a Poisson distribution in each split with RandomisedTree option");
220 "Use Sig or Bkg node type or the ratio S/B as classification in the leaf node");
228 DeclareOptionRef(
fMinNodeSizeS,
"MinNodeSize",
"Minimum percentage of training events required in a leaf node (default: Classification: 10%, Regression: 1%)");
252 "--> removed option .. only kept for reader backward compatibility");
267 Log() << kFATAL <<
"<ProcessOptions> unknown Separation Index option called" <<
Endl;
278 Log() << kFATAL <<
"<ProcessOptions> unknown PruneMethod option:" <<
fPruneMethodS <<
" called" <<
Endl;
285 <<
"Sorry automatic pruning strength determination is not implemented yet for ExpectedErrorPruning" <<
Endl;
289 if (this->
Data()->HasNegativeEventWeights()){
290 Log() << kINFO <<
" You are using a Monte Carlo that has also negative weights. " 291 <<
"That should in principle be fine as long as on average you end up with " 292 <<
"something positive. For this you have to make sure that the minimal number " 293 <<
"of (un-weighted) events demanded for a tree node (currently you use: MinNodeSize=" 295 <<
", (or the deprecated equivalent nEventsMin) you can set this via the " 296 <<
"MethodDT option string when booking the " 297 <<
"classifier) is large enough to allow for reasonable averaging!!! " 298 <<
" If this does not help.. maybe you want to try the option: IgnoreNegWeightsInTraining " 299 <<
"which ignores events with negative weight in the training. " <<
Endl 300 <<
Endl <<
"Note: You'll get a WARNING message during the training if that should ever happen" <<
Endl;
304 Log() << kINFO <<
" Randomised trees should use *bagging* as *boost* method. Did you set this in the *MethodBoost* ? . Here I can enforce only the *no pruning*" <<
Endl;
311 Log() << kWARNING <<
"You have explicitly set *nEventsMin*, the min absolute number \n" 312 <<
"of events in a leaf node. This is DEPRECATED, please use the option \n" 313 <<
"*MinNodeSize* giving the relative number as percentage of training \n" 314 <<
"events instead. \n" 323 if (sizeInPercent > 0 && sizeInPercent < 50){
327 Log() << kERROR <<
"you have demanded a minimal node size of " 328 << sizeInPercent <<
"% of the training events.. \n" 329 <<
" that somehow does not make sense "<<
Endl;
337 Log() << kERROR <<
"I had problems reading the option MinNodeEvents, which\n" 338 <<
"after removing a possible % sign now reads " << sizeInPercent <<
Endl;
383 if (
fRandomisedTrees)
Log()<<kWARNING<<
" randomised Trees do not work yet in this framework," 384 <<
" as I do not know how to give each tree a new random seed, now they" 385 <<
" will be all the same and that is not good " <<
Endl;
391 std::vector<const TMVA::Event*> tmp;
392 for (
Long64_t ievt=0; ievt<nevents; ievt++) {
394 tmp.push_back(event);
418 for(
UInt_t i = 0; i < nodes.size(); i++)
520 return SumCorrect / (SumCorrect + SumWrong);
Types::EAnalysisType fAnalysisType
virtual void * AddXMLTo(void *parent) const
add attributes to XML
void Optimize()
determine the pruning sequence
MsgLogger & Endl(MsgLogger &ml)
Singleton class for Global types used by TMVA.
Double_t CheckEvent(const TMVA::Event *, Bool_t UseYesNoLeaf=kFALSE) const
the event e is put into the decision tree (starting at the root node) and the output is NodeType (sig...
void Init(void)
common initialisation with defaults for the DT-Method
virtual void Read(std::istream &istr, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
Read the binary tree from an input stream.
TString & ReplaceAll(const TString &s1, const TString &s2)
Double_t GetNodePurityLimit() const
DecisionTree::EPruneMethod fPruneMethod
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
Virtual base Class for all MVA method.
Float_t GetOptimalPruneStrength() const
Ranking for variables in method (implementation)
void ToLower()
Change string to lower-case.
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
FDA can handle classification with 2 classes and regression with one regression-target.
Double_t fNodePurityLimit
Double_t PruneTree()
prune the decision tree if requested (good for individual trees that are best grown out...
void SetMinNodeSize(Double_t sizeInPercent)
void SetAnalysisType(Types::EAnalysisType t)
Implementation of the CrossEntropy as separation criterion.
UInt_t GetTrainingTMVAVersionCode() const
void ReadWeightsFromStream(std::istream &istr)
const Event * GetEvent() const
Bool_t IsAlnum() const
Returns true if all characters in string are alphanumeric.
void DeclareOptions()
Define the options (their key words) that can be set in the option string.
DataSetInfo & DataInfo() const
Bool_t DoRegression() const
Double_t fDeltaPruneStrength
Class that contains all the data information.
Implementation of the SdivSqrtSplusB as separation criterion.
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Long64_t GetNTrainingEvents() const
Implementation of the MisClassificationError as separation criterion.
void AddWeightsXMLTo(void *parent) const
void ProcessOptions()
the option string is decoded, for available options see "DeclareOptions"
Implementation of the GiniIndex as separation criterion.
void SetPruneStrength(Double_t p)
Implementation of a Decision Tree.
virtual void ReadXML(void *node, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
read attributes from XML
Double_t TestTreeQuality(DecisionTree *dt)
std::vector< TMVA::DecisionTreeNode * > GetOptimalPruneSequence() const
return the prune strength (=alpha) corresponding to the prune sequence
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
returns MVA value
void SetCurrentType(Types::ETreeType type) const
void AddPreDefVal(const T &)
MethodDT(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="")
the standard constructor for just an ordinar "decision trees"
const TString & GetOptions() const
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility
#define REGISTER_METHOD(CLASS)
for example
Double_t PruneTree(const EventConstList *validationSample=NULL)
prune (get rid of internal nodes) the Decision tree to avoid overtraining several different pruning m...
Abstract ClassifierFactory template that handles arbitrary types.
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
UInt_t BuildTree(const EventConstList &eventSample, DecisionTreeNode *node=NULL)
building the decision tree by recursively calling the splitting of one (root-) node into two daughter...
virtual ~MethodDT(void)
destructor
void ReadWeightsFromXML(void *wghtnode)
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
Bool_t IsSignal(const Event *ev) const
Double_t Atof() const
Return floating-point value contained in string.
Types::EAnalysisType GetAnalysisType() const
const Event * GetEvent() const
void GetHelpMessage() const
Analysis of Boosted Decision Trees.
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void SetSignalReferenceCut(Double_t cut)
A helper class to prune a decision tree using the Cost Complexity method (see Classification and Regr...
const Ranking * CreateRanking()
void PruneNode(TMVA::DecisionTreeNode *node)
prune away the subtree below the node
SeparationBase * fSepType