31#ifndef ROOT_TMVA_MethodBDT
32#define ROOT_TMVA_MethodBDT
147 inline const std::vector<TMVA::DecisionTree*> &
GetForest()
const;
167 const TString& className )
const;
226 std::map< const TMVA::Event*,std::vector<double> >
fResiduals;
#define ClassDef(name, id)
Class that contains all the data information.
Implementation of a Decision Tree.
Analysis of Boosted Decision Trees.
std::vector< Double_t > fHighBkgCut
void SetBaggedSampleFraction(Double_t f)
DecisionTree::EPruneMethod fPruneMethod
std::vector< const TMVA::Event * > fEventSample
void Init(void)
Common initialisation with defaults for the BDT-Method.
static const Int_t fgDebugLevel
void BoostMonitor(Int_t iTree)
Fills the ROCIntegral vs Itree from the testSample for the monitoring plots during the training .
const std::vector< Float_t > & GetMulticlassValues()
Get the multiclass MVA response for the BDT classifier.
std::map< const TMVA::Event *, LossFunctionEventInfo > fLossFunctionEventInfo
std::vector< const TMVA::Event * > * fTrainSample
std::vector< Bool_t > fIsHighSigCut
Double_t AdaBoostR2(std::vector< const TMVA::Event * > &, DecisionTree *dt)
Adaption of the AdaBoost to regression problems (see H.Drucker 1997).
void MakeClassSpecific(std::ostream &, const TString &) const
Make ROOT-independent C++ class for classifier response (classifier-specific implementation).
Bool_t fPairNegWeightsGlobal
Bool_t fSkipNormalization
void GetHelpMessage() const
Get help message text.
LossFunctionBDT * fRegressionLossFunctionBDTG
void DeterminePreselectionCuts(const std::vector< const TMVA::Event * > &eventSample)
Find useful preselection cuts that will be applied before and Decision Tree training.
Double_t GradBoost(std::vector< const TMVA::Event * > &, DecisionTree *dt, UInt_t cls=0)
Calculate the desired response value for each region.
const Ranking * CreateRanking()
Compute ranking of input variables.
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
Set the tuning parameters according to the argument.
void SetAdaBoostBeta(Double_t b)
Double_t AdaCost(std::vector< const TMVA::Event * > &, DecisionTree *dt)
The AdaCost boosting algorithm takes a simple cost Matrix (currently fixed for all events....
void DeclareOptions()
Define the options (their key words).
Bool_t fTrainWithNegWeights
TString fRegressionLossFunctionBDTGS
std::vector< double > fBoostWeights
std::vector< Double_t > fVariableImportance
std::vector< Bool_t > fIsLowBkgCut
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
Call the Optimizer with the set of parameters and ranges that are meant to be tuned.
Double_t Boost(std::vector< const TMVA::Event * > &, DecisionTree *dt, UInt_t cls=0)
Apply the boosting algorithm (the algorithm is selecte via the the "option" given in the constructor.
Double_t TestTreeQuality(DecisionTree *dt)
Test the tree quality.. in terms of Misclassification.
std::vector< DecisionTree * > fForest
std::vector< Bool_t > fIsLowSigCut
Double_t Bagging()
Call it boot-strapping, re-sampling or whatever you like, in the end it is nothing else but applying ...
Double_t fBaggedSampleFraction
const std::vector< double > & GetBoostWeights() const
void SetMaxDepth(Int_t d)
void UpdateTargets(std::vector< const TMVA::Event * > &, UInt_t cls=0)
Calculate residual for all events.
Double_t fFValidationEvents
std::vector< const TMVA::Event * > fSubSample
void UpdateTargetsRegression(std::vector< const TMVA::Event * > &, Bool_t first=kFALSE)
Calculate residuals for all events and update targets for next iter.
Double_t GradBoostRegression(std::vector< const TMVA::Event * > &, DecisionTree *dt)
Implementation of M_TreeBoost using any loss function as described by Friedman 1999.
void WriteMonitoringHistosToFile(void) const
Here we could write some histograms created during the processing to the output file.
std::vector< Double_t > fLowBkgCut
void SetShrinkage(Double_t s)
virtual ~MethodBDT(void)
Destructor.
void AddWeightsXMLTo(void *parent) const
Write weights to XML.
Double_t GetGradBoostMVA(const TMVA::Event *e, UInt_t nTrees)
Returns MVA value: -1 for background, 1 for signal.
Double_t fNodePurityLimit
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
BDT can handle classification with multiple classes and regression with one regression-target.
void SetNodePurityLimit(Double_t l)
Double_t RegBoost(std::vector< const TMVA::Event * > &, DecisionTree *dt)
A special boosting only for Regression (not implemented).
void InitEventSample()
Initialize the event sample (i.e. reset the boost-weights... etc).
Double_t ApplyPreselectionCuts(const Event *ev)
Apply the preselection cuts before even bothering about any Decision Trees in the GetMVA .
void SetMinNodeSize(Double_t sizeInPercent)
void ProcessOptions()
The option string is decoded, for available options see "DeclareOptions".
void PreProcessNegativeEventWeights()
O.k.
std::vector< const TMVA::Event * > fValidationSample
std::vector< Double_t > fLowSigCut
Bool_t fInverseBoostNegWeights
std::map< const TMVA::Event *, std::vector< double > > fResiduals
void MakeClassInstantiateNode(DecisionTreeNode *n, std::ostream &fout, const TString &className) const
Recursively descends a tree and writes the node instance to the output stream.
Double_t AdaBoost(std::vector< const TMVA::Event * > &, DecisionTree *dt)
The AdaBoost implementation.
std::vector< Double_t > GetVariableImportance()
Return the relative variable importance, normalized to all variables together having the importance 1...
void SetUseNvars(Int_t n)
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
Bool_t fNoNegWeightsInTraining
Double_t PrivateGetMvaValue(const TMVA::Event *ev, Double_t *err=0, Double_t *errUpper=0, UInt_t useNTrees=0)
Return the MVA value (range [-1;1]) that classifies the event according to the majority vote from the...
void InitGradBoost(std::vector< const TMVA::Event * > &)
Initialize targets for first tree.
void Train(void)
BDT training.
const std::vector< TMVA::DecisionTree * > & GetForest() const
void GetBaggedSubSample(std::vector< const TMVA::Event * > &)
Fills fEventSample with fBaggedSampleFraction*NEvents random training events.
const std::vector< const TMVA::Event * > & GetTrainingEvents() const
const std::vector< Float_t > & GetRegressionValues()
Get the regression value generated by the BDTs.
std::vector< Double_t > fHighSigCut
SeparationBase * fSepType
void ReadWeightsFromXML(void *parent)
Reads the BDT from the xml file.
void ReadWeightsFromStream(std::istream &istr)
Read the weights (BDT coefficients).
TString fNegWeightTreatment
std::vector< Bool_t > fIsHighBkgCut
void Reset(void)
Reset the method, as if it had just been instantiated (forget all training etc.).
Double_t fSigToBkgFraction
void MakeClassSpecificHeader(std::ostream &, const TString &) const
Specific class header.
Double_t fMinLinCorrForFisher
void DeclareCompatibilityOptions()
Options that are used ONLY for the READER to ensure backward compatibility.
Virtual base Class for all MVA method.
virtual void ReadWeightsFromStream(std::istream &)=0
Ranking for variables in method (implementation)
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
A TTree represents a columnar dataset.
create variable transformations