ROOT logo
ROOT » TMVA » TMVA::DecisionTreeNode

class TMVA::DecisionTreeNode: public TMVA::Node


 Node for the Decision Tree

 The node specifies ONE variable out of the given set of selection variable
 that is used to split the sample which "arrives" at the node, into a left
 (background-enhanced) and a right (signal-enhanced) sample.

Function Members (Methods)

public:
virtual~DecisionTreeNode()
virtual voidAddAttributesToNode(void* node) const
virtual voidAddContentToNode(stringstream& s) const
voidAddToSumTarget(Float_t t)
voidAddToSumTarget2(Float_t t2)
void*TMVA::Node::AddXMLTo(void* parent) const
static TClass*Class()
voidClearNodeAndAllDaughters()
Int_tTMVA::Node::CountMeAndAllDaughters() const
virtual TMVA::Node*CreateNode() const
TMVA::DecisionTreeNodeDecisionTreeNode()
TMVA::DecisionTreeNodeDecisionTreeNode(TMVA::Node* p, char pos)
TMVA::DecisionTreeNodeDecisionTreeNode(const TMVA::DecisionTreeNode& n, TMVA::DecisionTreeNode* parent = NULL)
Double_tGetAlpha() const
Double_tGetAlphaMinSubtree() const
Double_tGetCC() const
intTMVA::Node::GetCount()
Bool_tGetCutType() const
Float_tGetCutValue() const
UInt_tTMVA::Node::GetDepth() const
Double_tGetFisherCoeff(Int_t ivar) const
virtual TMVA::DecisionTreeNode*GetLeft() const
Float_tGetNBkgEvents() const
Float_tGetNBkgEvents_unboosted() const
Float_tGetNBkgEvents_unweighted() const
Double_tGetNBValidation() const
Float_tGetNEvents() const
Float_tGetNEvents_unboosted() const
Float_tGetNEvents_unweighted() const
UInt_tGetNFisherCoeff() const
Double_tGetNodeR() const
Int_tGetNodeType() const
Float_tGetNSigEvents() const
Float_tGetNSigEvents_unboosted() const
Float_tGetNSigEvents_unweighted() const
Double_tGetNSValidation() const
Int_tGetNTerminal() const
virtual TMVA::DecisionTreeNode*GetParent() const
virtual TMVA::BinaryTree*TMVA::Node::GetParentTree() const
charTMVA::Node::GetPos() const
Float_tGetPurity() const
Float_tGetResponse() const
virtual TMVA::DecisionTreeNode*GetRight() const
Float_tGetRMS() const
Float_tGetSampleMax(UInt_t ivar) const
Float_tGetSampleMin(UInt_t ivar) const
Short_tGetSelector() const
Float_tGetSeparationGain() const
Float_tGetSeparationIndex() const
Double_tGetSubTreeR() const
Float_tGetSumTarget() const
Float_tGetSumTarget2() const
virtual Bool_tGoesLeft(const TMVA::Event&) const
virtual Bool_tGoesRight(const TMVA::Event&) const
voidIncrementNBkgEvents(Float_t b)
voidIncrementNBkgEvents_unweighted()
voidIncrementNEvents(Float_t nev)
voidIncrementNEvents_unweighted()
voidIncrementNSigEvents(Float_t s)
voidIncrementNSigEvents_unweighted()
virtual TClass*IsA() const
Bool_tIsTerminal() const
TMVA::DecisionTreeNode&operator=(const TMVA::DecisionTreeNode&)
virtual voidPrint(ostream& os) const
voidPrintPrune(ostream& os) const
virtual voidPrintRec(ostream& os) const
voidPrintRecPrune(ostream& os) const
voidTMVA::Node::ReadXML(void* node, UInt_t tmva_Version_Code = TMVA_VERSION_CODE)
voidResetValidationData()
voidSetAlpha(Double_t alpha)
voidSetAlphaMinSubtree(Double_t g)
voidSetCC(Double_t cc)
voidSetCutType(Bool_t t)
voidSetCutValue(Float_t c)
voidTMVA::Node::SetDepth(UInt_t d)
voidSetFisherCoeff(Int_t ivar, Double_t coeff)
virtual voidSetLeft(TMVA::Node* l)
voidSetNBkgEvents(Float_t b)
voidSetNBkgEvents_unboosted(Float_t b)
voidSetNBkgEvents_unweighted(Float_t b)
voidSetNBValidation(Double_t b)
voidSetNEvents(Float_t nev)
voidSetNEvents_unboosted(Float_t nev)
voidSetNEvents_unweighted(Float_t nev)
voidSetNFisherCoeff(Int_t nvars)
voidSetNodeR(Double_t r)
voidSetNodeType(Int_t t)
voidSetNSigEvents(Float_t s)
voidSetNSigEvents_unboosted(Float_t s)
voidSetNSigEvents_unweighted(Float_t s)
voidSetNSValidation(Double_t s)
voidSetNTerminal(Int_t n)
virtual voidSetParent(TMVA::Node* p)
virtual voidTMVA::Node::SetParentTree(TMVA::BinaryTree* t)
voidTMVA::Node::SetPos(char s)
voidSetPurity()
voidSetResponse(Float_t r)
virtual voidSetRight(TMVA::Node* r)
voidSetRMS(Float_t r)
voidSetSampleMax(UInt_t ivar, Float_t xmax)
voidSetSampleMin(UInt_t ivar, Float_t xmin)
voidSetSelector(Short_t i)
voidSetSeparationGain(Float_t sep)
voidSetSeparationIndex(Float_t sep)
voidSetSubTreeR(Double_t r)
voidSetSumTarget(Float_t t)
voidSetSumTarget2(Float_t t2)
voidSetTerminal(Bool_t s = kTRUE)
virtual voidShowMembers(TMemberInspector&)
virtual voidStreamer(TBuffer&)
voidStreamerNVirtual(TBuffer& ClassDef_StreamerNVirtual_b)
private:
virtual voidReadAttributes(void* node, UInt_t tmva_Version_Code = TMVA_VERSION_CODE)
virtual voidReadContent(stringstream& s)
virtual Bool_tReadDataRecord(istream& is, UInt_t tmva_Version_Code = TMVA_VERSION_CODE)

Data Members

public:
static boolfgIsTrainingstatic variable to flag training phase in which we need fTrainInfo
static UInt_tfgTmva_Version_Codeset only when read from weightfile
protected:
Bool_tfCutTypetrue: if event variable > cutValue ==> signal , false otherwise
Float_tfCutValuecut value appplied on this node to discriminate bkg against sig
UInt_tTMVA::Node::fDepthdepth of the node within the tree (seen from root node)
vector<Double_t>fFisherCoeffthe fisher coeff (offset at the last element)
Bool_tfIsTerminalNode! flag to set node as terminal (i.e., without deleting its descendants)
TMVA::Node*TMVA::Node::fLeftpointers to the two "daughter" nodes
Int_tfNodeTypeType of node: -1 == Bkg-leaf, 1 == Signal-leaf, 0 = internal
TMVA::Node*TMVA::Node::fParentthe previous (parent) node
TMVA::BinaryTree*TMVA::Node::fParentTreepointer to the parent tree to which the Node belongs
charTMVA::Node::fPosposition, i.e. it is a left (l) or right (r) daughter
Float_tfPuritythe node purity
Float_tfRMSresponse RMS of the regression node
Float_tfResponseresponse value in case of regression
TMVA::Node*TMVA::Node::fRightpointers to the two "daughter" nodes
Short_tfSelectorindex of variable used in node selection (decision tree)
TMVA::DTNodeTrainingInfo*fTrainInfo
static TMVA::MsgLogger*fgLoggerstatic because there is a huge number of nodes...

Class Charts

Inheritance Inherited Members Includes Libraries
Class Charts

Function documentation

DecisionTreeNode()
 constructor of an essentially "empty" node floating in space
DecisionTreeNode(TMVA::Node* p, char pos)
 constructor of a daughter node as a daughter of 'p'
DecisionTreeNode(const TMVA::DecisionTreeNode& n, TMVA::DecisionTreeNode* parent = NULL)
 copy constructor of a node. It will result in an explicit copy of
 the node and recursively all it's daughters
~DecisionTreeNode()
 destructor
Bool_t GoesRight(const TMVA::Event& ) const
 test event if it decends the tree at this node to the right
Bool_t GoesLeft(const TMVA::Event& ) const
 test event if it decends the tree at this node to the left
void SetPurity( void )
 return the S/(S+B) (purity) for the node
 REM: even if nodes with purity 0.01 are very PURE background nodes, they still
      get a small value of the purity.
void Print(ostream& os) const
print the node
void PrintRec(ostream& os) const
recursively print the node and its daughters (--> print the 'tree')
Bool_t ReadDataRecord(istream& is, UInt_t tmva_Version_Code = TMVA_VERSION_CODE)
 Read the data block
void ClearNodeAndAllDaughters()
 clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
void ResetValidationData()
 temporary stored node values (number of events, etc.) that originate
 not from the training but from the validation data (used in pruning)
void PrintPrune(ostream& os) const
 printout of the node (can be read in with ReadDataRecord)
void PrintRecPrune(ostream& os) const
 recursive printout of the node and its daughters
void SetCC(Double_t cc)
Float_t GetSampleMin(UInt_t ivar) const
 return the minimum of variable ivar from the training sample
 that pass/end up in this node
Float_t GetSampleMax(UInt_t ivar) const
 return the maximum of variable ivar from the training sample
 that pass/end up in this node
void SetSampleMin(UInt_t ivar, Float_t xmin)
 set the minimum of variable ivar from the training sample
 that pass/end up in this node
void SetSampleMax(UInt_t ivar, Float_t xmax)
 set the maximum of variable ivar from the training sample
 that pass/end up in this node
void ReadAttributes(void* node, UInt_t tmva_Version_Code = TMVA_VERSION_CODE)
void AddAttributesToNode(void* node) const
 add attribute to xml
void SetFisherCoeff(Int_t ivar, Double_t coeff)
 set fisher coefficients
void AddContentToNode(stringstream& s) const
 adding attributes to tree node  (well, was used in BinarySearchTree,
 and somehow I guess someone programmed it such that we need this in
 this tree too, although we don't..)
void ReadContent(stringstream& s)
 reading attributes from tree node  (well, was used in BinarySearchTree,
 and somehow I guess someone programmed it such that we need this in
 this tree too, although we don't..)
Node* CreateNode() const
{ return new DecisionTreeNode(); }
void SetNFisherCoeff(Int_t nvars)
{fFisherCoeff.resize(nvars);}
UInt_t GetNFisherCoeff() const
 set fisher coefficients
{ return fFisherCoeff.size();}
Double_t GetFisherCoeff(Int_t ivar) const
 get fisher coefficients
{return fFisherCoeff.at(ivar);}
void SetSelector(Short_t i)
 set index of variable used for discrimination at this node
{ fSelector = i; }
Short_t GetSelector() const
 return index of variable used for discrimination at this node
{ return fSelector; }
void SetCutValue(Float_t c)
 set the cut value applied at this node
{ fCutValue = c; }
Float_t GetCutValue( void )
 return the cut value applied at this node
{ return fCutValue; }
void SetCutType(Bool_t t)
 set true: if event variable > cutValue ==> signal , false otherwise
{ fCutType = t; }
Bool_t GetCutType( void )
 return kTRUE: Cuts select signal, kFALSE: Cuts select bkg
{ return fCutType; }
void SetNodeType(Int_t t)
 set node type: 1 signal node, -1 bkg leave, 0 intermediate Node
{ fNodeType = t;}
Int_t GetNodeType( void )
 return node type: 1 signal node, -1 bkg leave, 0 intermediate Node
{ return fNodeType; }
Float_t GetPurity( void )
return  S/(S+B) (purity) at this node (from  training)
{ return fPurity;}
void SetResponse(Float_t r)
set the response of the node (for regression)
{ fResponse = r;}
Float_t GetResponse( void )
return the response of the node (for regression)
{ return fResponse;}
void SetRMS(Float_t r)
set the RMS of the response of the node (for regression)
{ fRMS = r;}
Float_t GetRMS( void )
return the RMS of the response of the node (for regression)
{ return fRMS;}
void SetNSigEvents(Float_t s)
 set the sum of the signal weights in the node
{ fTrainInfo->fNSigEvents = s; }
void SetNBkgEvents(Float_t b)
 set the sum of the backgr weights in the node
{ fTrainInfo->fNBkgEvents = b; }
void SetNEvents(Float_t nev)
 set the number of events that entered the node (during training)
{ fTrainInfo->fNEvents =nev ; }
void SetNSigEvents_unweighted(Float_t s)
 set the sum of the unweighted signal events in the node
{ fTrainInfo->fNSigEvents_unweighted = s; }
void SetNBkgEvents_unweighted(Float_t b)
 set the sum of the unweighted backgr events in the node
{ fTrainInfo->fNBkgEvents_unweighted = b; }
void SetNEvents_unweighted(Float_t nev)
 set the number of unweighted events that entered the node (during training)
{ fTrainInfo->fNEvents_unweighted =nev ; }
void SetNSigEvents_unboosted(Float_t s)
 set the sum of the unboosted signal events in the node
{ fTrainInfo->fNSigEvents_unboosted = s; }
void SetNBkgEvents_unboosted(Float_t b)
 set the sum of the unboosted backgr events in the node
{ fTrainInfo->fNBkgEvents_unboosted = b; }
void SetNEvents_unboosted(Float_t nev)
 set the number of unboosted events that entered the node (during training)
{ fTrainInfo->fNEvents_unboosted =nev ; }
void IncrementNSigEvents(Float_t s)
 increment the sum of the signal weights in the node
{ fTrainInfo->fNSigEvents += s; }
void IncrementNBkgEvents(Float_t b)
 increment the sum of the backgr weights in the node
{ fTrainInfo->fNBkgEvents += b; }
void IncrementNEvents(Float_t nev)
 increment the number of events that entered the node (during training)
{ fTrainInfo->fNEvents +=nev ; }
void IncrementNSigEvents_unweighted()
 increment the sum of the signal weights in the node
{ fTrainInfo->fNSigEvents_unweighted += 1; }
void IncrementNBkgEvents_unweighted()
 increment the sum of the backgr weights in the node
{ fTrainInfo->fNBkgEvents_unweighted += 1; }
void IncrementNEvents_unweighted()
 increment the number of events that entered the node (during training)
{ fTrainInfo->fNEvents_unweighted +=1 ; }
Float_t GetNSigEvents( void )
 return the sum of the signal weights in the node
{ return fTrainInfo->fNSigEvents; }
Float_t GetNBkgEvents( void )
 return the sum of the backgr weights in the node
{ return fTrainInfo->fNBkgEvents; }
Float_t GetNEvents( void )
 return  the number of events that entered the node (during training)
{ return fTrainInfo->fNEvents; }
Float_t GetNSigEvents_unweighted( void )
 return the sum of unweighted signal weights in the node
{ return fTrainInfo->fNSigEvents_unweighted; }
Float_t GetNBkgEvents_unweighted( void )
 return the sum of unweighted backgr weights in the node
{ return fTrainInfo->fNBkgEvents_unweighted; }
Float_t GetNEvents_unweighted( void )
 return  the number of unweighted events that entered the node (during training)
{ return fTrainInfo->fNEvents_unweighted; }
Float_t GetNSigEvents_unboosted( void )
 return the sum of unboosted signal weights in the node
{ return fTrainInfo->fNSigEvents_unboosted; }
Float_t GetNBkgEvents_unboosted( void )
 return the sum of unboosted backgr weights in the node
{ return fTrainInfo->fNBkgEvents_unboosted; }
Float_t GetNEvents_unboosted( void )
 return  the number of unboosted events that entered the node (during training)
{ return fTrainInfo->fNEvents_unboosted; }
void SetSeparationIndex(Float_t sep)
 set the choosen index, measure of "purity" (separation between S and B) AT this node
{ fTrainInfo->fSeparationIndex =sep ; }
Float_t GetSeparationIndex( void )
 return the separation index AT this node
{ return fTrainInfo->fSeparationIndex; }
void SetSeparationGain(Float_t sep)
 set the separation, or information gained BY this nodes selection
{ fTrainInfo->fSeparationGain =sep ; }
Float_t GetSeparationGain( void )
 return the gain in separation obtained by this nodes selection
{ return fTrainInfo->fSeparationGain; }
DecisionTreeNode* GetLeft() const
 get pointers to children, mother in the tree
 return pointer to the left/right daughter or parent node
{ return dynamic_cast<DecisionTreeNode*>(fLeft); }
DecisionTreeNode* GetRight() const
{ return dynamic_cast<DecisionTreeNode*>(fRight); }
DecisionTreeNode* GetParent() const
{ return dynamic_cast<DecisionTreeNode*>(fParent); }
void SetLeft(TMVA::Node* l)
 set pointer to the left/right daughter and parent node
{ fLeft = dynamic_cast<DecisionTreeNode*>(l);}
void SetRight(TMVA::Node* r)
{ fRight = dynamic_cast<DecisionTreeNode*>(r);}
void SetParent(TMVA::Node* p)
{ fParent = dynamic_cast<DecisionTreeNode*>(p);}
void SetNodeR(Double_t r)
 the node resubstitution estimate, R(t), for Cost Complexity pruning
{ fTrainInfo->fNodeR = r; }
Double_t GetNodeR() const
{ return fTrainInfo->fNodeR; }
void SetSubTreeR(Double_t r)
 the resubstitution estimate, R(T_t), of the tree rooted at this node
{ fTrainInfo->fSubTreeR = r; }
Double_t GetSubTreeR() const
{ return fTrainInfo->fSubTreeR; }
void SetAlpha(Double_t alpha)
                             R(t) - R(T_t)
 the critical point alpha =  -------------
                              |~T_t| - 1
{ fTrainInfo->fAlpha = alpha; }
Double_t GetAlpha() const
{ return fTrainInfo->fAlpha; }
void SetAlphaMinSubtree(Double_t g)
 the minimum alpha in the tree rooted at this node
{ fTrainInfo->fG = g; }
Double_t GetAlphaMinSubtree() const
{ return fTrainInfo->fG; }
void SetNTerminal(Int_t n)
 number of terminal nodes in the subtree rooted here
{ fTrainInfo->fNTerminal = n; }
Int_t GetNTerminal() const
{ return fTrainInfo->fNTerminal; }
void SetNBValidation(Double_t b)
 number of background/signal events from the pruning validation sample
{ fTrainInfo->fNB = b; }
void SetNSValidation(Double_t s)
{ fTrainInfo->fNS = s; }
Double_t GetNBValidation() const
{ return fTrainInfo->fNB; }
Double_t GetNSValidation() const
{ return fTrainInfo->fNS; }
void SetSumTarget(Float_t t)
{fTrainInfo->fSumTarget = t; }
void SetSumTarget2(Float_t t2)
{fTrainInfo->fSumTarget2 = t2; }
void AddToSumTarget(Float_t t)
{fTrainInfo->fSumTarget += t; }
void AddToSumTarget2(Float_t t2)
{fTrainInfo->fSumTarget2 += t2; }
Float_t GetSumTarget() const
{return fTrainInfo? fTrainInfo->fSumTarget : -9999;}
Float_t GetSumTarget2() const
{return fTrainInfo? fTrainInfo->fSumTarget2: -9999;}
Bool_t IsTerminal() const
 flag indicates whether this node is terminal
{ return fIsTerminalNode; }
void SetTerminal(Bool_t s = kTRUE)
Double_t GetCC() const
{return (fTrainInfo? fTrainInfo->fCC : -1.);}