ROOT  6.06/09
Reference Guide
DecisionTreeNode.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : TMVA::DecisionTreeNode *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Implementation of a Decision Tree Node *
12  * *
13  * Authors (alphabetical): *
14  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
15  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
16  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
17  * Eckhard von Toerne <evt@physik.uni-bonn.de> - U. of Bonn, Germany *
18  * *
19  * CopyRight (c) 2009: *
20  * CERN, Switzerland *
21  * U. of Victoria, Canada *
22  * MPI-K Heidelberg, Germany *
23  * U. of Bonn, Germany *
24  * *
25  * Redistribution and use in source and binary forms, with or without *
26  * modification, are permitted according to the terms listed in LICENSE *
27  * (http://tmva.sourceforge.net/LICENSE) *
28  **********************************************************************************/
29 
30 //_______________________________________________________________________
31 //
32 // Node for the Decision Tree
33 //
34 // The node specifies ONE variable out of the given set of selection variable
35 // that is used to split the sample which "arrives" at the node, into a left
36 // (background-enhanced) and a right (signal-enhanced) sample.
37 //_______________________________________________________________________
38 
39 #include <algorithm>
40 #include <exception>
41 #include <iomanip>
42 #include <limits>
43 
44 #include "TMVA/MsgLogger.h"
45 #include "TMVA/DecisionTreeNode.h"
46 #include "TMVA/Tools.h"
47 #include "TMVA/Event.h"
48 
49 using std::string;
50 
52 
53 bool TMVA::DecisionTreeNode::fgIsTraining = false;
54 UInt_t TMVA::DecisionTreeNode::fgTmva_Version_Code = 0;
55 ////////////////////////////////////////////////////////////////////////////////
56 /// constructor of an essentially "empty" node floating in space
57 
59  : TMVA::Node(),
60  fCutValue(0),
61  fCutType ( kTRUE ),
62  fSelector ( -1 ),
63  fResponse(-99 ),
64  fRMS(0),
65  fNodeType (-99 ),
66  fPurity (-99),
67  fIsTerminalNode( kFALSE )
68 {
69  if (DecisionTreeNode::fgIsTraining){
70  fTrainInfo = new DTNodeTrainingInfo();
71  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
72  }
73  else {
74  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
75  fTrainInfo = 0;
76  }
77 }
78 
79 ////////////////////////////////////////////////////////////////////////////////
80 /// constructor of a daughter node as a daughter of 'p'
81 
83  : TMVA::Node(p, pos),
84  fCutValue( 0 ),
85  fCutType ( kTRUE ),
86  fSelector( -1 ),
87  fResponse(-99 ),
88  fRMS(0),
89  fNodeType( -99 ),
90  fPurity (-99),
91  fIsTerminalNode( kFALSE )
92 {
95  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
96  }
97  else {
98  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
99  fTrainInfo = 0;
100  }
101 }
102 
103 ////////////////////////////////////////////////////////////////////////////////
104 /// copy constructor of a node. It will result in an explicit copy of
105 /// the node and recursively all it's daughters
106 
108  DecisionTreeNode* parent)
109  : TMVA::Node(n),
110  fCutValue( n.fCutValue ),
111  fCutType ( n.fCutType ),
112  fSelector( n.fSelector ),
113  fResponse( n.fResponse ),
114  fRMS ( n.fRMS),
115  fNodeType( n.fNodeType ),
116  fPurity ( n.fPurity),
117  fIsTerminalNode( n.fIsTerminalNode )
118 {
119  this->SetParent( parent );
120  if (n.GetLeft() == 0 ) this->SetLeft(NULL);
121  else this->SetLeft( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetLeft())),this));
122 
123  if (n.GetRight() == 0 ) this->SetRight(NULL);
124  else this->SetRight( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetRight())),this));
125 
128  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
129  }
130  else {
131  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
132  fTrainInfo = 0;
133  }
134 }
135 
136 ////////////////////////////////////////////////////////////////////////////////
137 /// destructor
138 
140  delete fTrainInfo;
141 }
142 
143 
144 ////////////////////////////////////////////////////////////////////////////////
145 /// test event if it decends the tree at this node to the right
146 
148 {
149  Bool_t result;
150  // first check if the fisher criterium is used or ordinary cuts:
151  if (GetNFisherCoeff() == 0){
152 
153  result = (e.GetValue(this->GetSelector()) >= this->GetCutValue() );
154 
155  }else{
156 
157  Double_t fisher = this->GetFisherCoeff(fFisherCoeff.size()-1); // the offset
158  for (UInt_t ivar=0; ivar<fFisherCoeff.size()-1; ivar++)
159  fisher += this->GetFisherCoeff(ivar)*(e.GetValue(ivar));
160 
161  result = fisher > this->GetCutValue();
162  }
163 
164  if (fCutType == kTRUE) return result; //the cuts are selecting Signal ;
165  else return !result;
166 }
167 
168 ////////////////////////////////////////////////////////////////////////////////
169 /// test event if it decends the tree at this node to the left
170 
172 {
173  if (!this->GoesRight(e)) return kTRUE;
174  else return kFALSE;
175 }
176 
177 
178 ////////////////////////////////////////////////////////////////////////////////
179 /// return the S/(S+B) (purity) for the node
180 /// REM: even if nodes with purity 0.01 are very PURE background nodes, they still
181 /// get a small value of the purity.
182 
184 {
185  if ( ( this->GetNSigEvents() + this->GetNBkgEvents() ) > 0 ) {
186  fPurity = this->GetNSigEvents() / ( this->GetNSigEvents() + this->GetNBkgEvents());
187  }
188  else {
189  Log() << kINFO << "Zero events in purity calcuation , return purity=0.5" << Endl;
190  this->Print(Log());
191  fPurity = 0.5;
192  }
193  return;
194 }
195 
196 // print a node
197 ////////////////////////////////////////////////////////////////////////////////
198 ///print the node
199 
200 void TMVA::DecisionTreeNode::Print(std::ostream& os) const
201 {
202  os << "< *** " << std::endl;
203  os << " d: " << this->GetDepth()
204  << std::setprecision(6)
205  << "NCoef: " << this->GetNFisherCoeff();
206  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) { os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
207  os << " ivar: " << this->GetSelector()
208  << " cut: " << this->GetCutValue()
209  << " cType: " << this->GetCutType()
210  << " s: " << this->GetNSigEvents()
211  << " b: " << this->GetNBkgEvents()
212  << " nEv: " << this->GetNEvents()
213  << " suw: " << this->GetNSigEvents_unweighted()
214  << " buw: " << this->GetNBkgEvents_unweighted()
215  << " nEvuw: " << this->GetNEvents_unweighted()
216  << " sepI: " << this->GetSeparationIndex()
217  << " sepG: " << this->GetSeparationGain()
218  << " nType: " << this->GetNodeType()
219  << std::endl;
220 
221  os << "My address is " << long(this) << ", ";
222  if (this->GetParent() != NULL) os << " parent at addr: " << long(this->GetParent()) ;
223  if (this->GetLeft() != NULL) os << " left daughter at addr: " << long(this->GetLeft());
224  if (this->GetRight() != NULL) os << " right daughter at addr: " << long(this->GetRight()) ;
225 
226  os << " **** > " << std::endl;
227 }
228 
229 ////////////////////////////////////////////////////////////////////////////////
230 ///recursively print the node and its daughters (--> print the 'tree')
231 
232 void TMVA::DecisionTreeNode::PrintRec(std::ostream& os) const
233 {
234  os << this->GetDepth()
235  << std::setprecision(6)
236  << " " << this->GetPos()
237  << "NCoef: " << this->GetNFisherCoeff();
238  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
239  os << " ivar: " << this->GetSelector()
240  << " cut: " << this->GetCutValue()
241  << " cType: " << this->GetCutType()
242  << " s: " << this->GetNSigEvents()
243  << " b: " << this->GetNBkgEvents()
244  << " nEv: " << this->GetNEvents()
245  << " suw: " << this->GetNSigEvents_unweighted()
246  << " buw: " << this->GetNBkgEvents_unweighted()
247  << " nEvuw: " << this->GetNEvents_unweighted()
248  << " sepI: " << this->GetSeparationIndex()
249  << " sepG: " << this->GetSeparationGain()
250  << " res: " << this->GetResponse()
251  << " rms: " << this->GetRMS()
252  << " nType: " << this->GetNodeType();
253  if (this->GetCC() > 10000000000000.) os << " CC: " << 100000. << std::endl;
254  else os << " CC: " << this->GetCC() << std::endl;
255 
256  if (this->GetLeft() != NULL) this->GetLeft() ->PrintRec(os);
257  if (this->GetRight() != NULL) this->GetRight()->PrintRec(os);
258 }
259 
260 ////////////////////////////////////////////////////////////////////////////////
261 /// Read the data block
262 
263 Bool_t TMVA::DecisionTreeNode::ReadDataRecord( std::istream& is, UInt_t tmva_Version_Code )
264 {
265  fgTmva_Version_Code=tmva_Version_Code;
266  string tmp;
267 
268  Float_t cutVal, cutType, nsig, nbkg, nEv, nsig_unweighted, nbkg_unweighted, nEv_unweighted;
269  Float_t separationIndex, separationGain, response(-99), cc(0);
270  Int_t depth, ivar, nodeType;
271  ULong_t lseq;
272  char pos;
273 
274  is >> depth; // 2
275  if ( depth==-1 ) { return kFALSE; }
276  // if ( depth==-1 ) { delete this; return kFALSE; }
277  is >> pos ; // r
278  this->SetDepth(depth);
279  this->SetPos(pos);
280 
281  if (tmva_Version_Code < TMVA_VERSION(4,0,0)) {
282  is >> tmp >> lseq
283  >> tmp >> ivar
284  >> tmp >> cutVal
285  >> tmp >> cutType
286  >> tmp >> nsig
287  >> tmp >> nbkg
288  >> tmp >> nEv
289  >> tmp >> nsig_unweighted
290  >> tmp >> nbkg_unweighted
291  >> tmp >> nEv_unweighted
292  >> tmp >> separationIndex
293  >> tmp >> separationGain
294  >> tmp >> nodeType;
295  } else {
296  is >> tmp >> lseq
297  >> tmp >> ivar
298  >> tmp >> cutVal
299  >> tmp >> cutType
300  >> tmp >> nsig
301  >> tmp >> nbkg
302  >> tmp >> nEv
303  >> tmp >> nsig_unweighted
304  >> tmp >> nbkg_unweighted
305  >> tmp >> nEv_unweighted
306  >> tmp >> separationIndex
307  >> tmp >> separationGain
308  >> tmp >> response
309  >> tmp >> nodeType
310  >> tmp >> cc;
311  }
312 
313  this->SetSelector((UInt_t)ivar);
314  this->SetCutValue(cutVal);
315  this->SetCutType(cutType);
316  this->SetNodeType(nodeType);
317  if (fTrainInfo){
318  this->SetNSigEvents(nsig);
319  this->SetNBkgEvents(nbkg);
320  this->SetNEvents(nEv);
321  this->SetNSigEvents_unweighted(nsig_unweighted);
322  this->SetNBkgEvents_unweighted(nbkg_unweighted);
323  this->SetNEvents_unweighted(nEv_unweighted);
324  this->SetSeparationIndex(separationIndex);
325  this->SetSeparationGain(separationGain);
326  this->SetPurity();
327  // this->SetResponse(response); old .txt weightfiles don't know regression yet
328  this->SetCC(cc);
329  }
330 
331  return kTRUE;
332 }
333 
334 ////////////////////////////////////////////////////////////////////////////////
335 /// clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
336 
338 {
339  SetNSigEvents(0);
340  SetNBkgEvents(0);
341  SetNEvents(0);
342  SetNSigEvents_unweighted(0);
343  SetNBkgEvents_unweighted(0);
344  SetNEvents_unweighted(0);
345  SetSeparationIndex(-1);
346  SetSeparationGain(-1);
347  SetPurity();
348 
349  if (this->GetLeft() != NULL) ((DecisionTreeNode*)(this->GetLeft()))->ClearNodeAndAllDaughters();
350  if (this->GetRight() != NULL) ((DecisionTreeNode*)(this->GetRight()))->ClearNodeAndAllDaughters();
351 }
352 
353 ////////////////////////////////////////////////////////////////////////////////
354 /// temporary stored node values (number of events, etc.) that originate
355 /// not from the training but from the validation data (used in pruning)
356 
358  SetNBValidation( 0.0 );
359  SetNSValidation( 0.0 );
360  SetSumTarget( 0 );
361  SetSumTarget2( 0 );
362 
363  if(GetLeft() != NULL && GetRight() != NULL) {
364  GetLeft()->ResetValidationData();
365  GetRight()->ResetValidationData();
366  }
367 }
368 
369 ////////////////////////////////////////////////////////////////////////////////
370 /// printout of the node (can be read in with ReadDataRecord)
371 
372 void TMVA::DecisionTreeNode::PrintPrune( std::ostream& os ) const {
373  os << "----------------------" << std::endl
374  << "|~T_t| " << GetNTerminal() << std::endl
375  << "R(t): " << GetNodeR() << std::endl
376  << "R(T_t): " << GetSubTreeR() << std::endl
377  << "g(t): " << GetAlpha() << std::endl
378  << "G(t): " << GetAlphaMinSubtree() << std::endl;
379 }
380 
381 ////////////////////////////////////////////////////////////////////////////////
382 /// recursive printout of the node and its daughters
383 
384 void TMVA::DecisionTreeNode::PrintRecPrune( std::ostream& os ) const {
385  this->PrintPrune(os);
386  if(this->GetLeft() != NULL && this->GetRight() != NULL) {
387  ((DecisionTreeNode*)this->GetLeft())->PrintRecPrune(os);
388  ((DecisionTreeNode*)this->GetRight())->PrintRecPrune(os);
389  }
390 }
391 
392 ////////////////////////////////////////////////////////////////////////////////
393 
395 {
396  if (fTrainInfo) fTrainInfo->fCC = cc;
397  else Log() << kFATAL << "call to SetCC without trainingInfo" << Endl;
398 }
399 
400 ////////////////////////////////////////////////////////////////////////////////
401 /// return the minimum of variable ivar from the training sample
402 /// that pass/end up in this node
403 
405  if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMin[ivar];
406  else Log() << kFATAL << "You asked for Min of the event sample in node for variable "
407  << ivar << " that is out of range" << Endl;
408  return -9999;
409 }
410 
411 ////////////////////////////////////////////////////////////////////////////////
412 /// return the maximum of variable ivar from the training sample
413 /// that pass/end up in this node
414 
416  if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMax[ivar];
417  else Log() << kFATAL << "You asked for Max of the event sample in node for variable "
418  << ivar << " that is out of range" << Endl;
419  return 9999;
420 }
421 
422 ////////////////////////////////////////////////////////////////////////////////
423 /// set the minimum of variable ivar from the training sample
424 /// that pass/end up in this node
425 
427  if ( fTrainInfo) {
428  if ( ivar >= fTrainInfo->fSampleMin.size()) fTrainInfo->fSampleMin.resize(ivar+1);
429  fTrainInfo->fSampleMin[ivar]=xmin;
430  }
431 }
432 
433 ////////////////////////////////////////////////////////////////////////////////
434 /// set the maximum of variable ivar from the training sample
435 /// that pass/end up in this node
436 
438  if( ! fTrainInfo ) return;
439  if ( ivar >= fTrainInfo->fSampleMax.size() )
440  fTrainInfo->fSampleMax.resize(ivar+1);
441  fTrainInfo->fSampleMax[ivar]=xmax;
442 }
443 
444 ////////////////////////////////////////////////////////////////////////////////
445 
446 void TMVA::DecisionTreeNode::ReadAttributes(void* node, UInt_t /* tmva_Version_Code */ )
447 {
448  Float_t tempNSigEvents,tempNBkgEvents;
449 
450  Int_t nCoef;
451  if (gTools().HasAttr(node, "NCoef")){
452  gTools().ReadAttr(node, "NCoef", nCoef );
453  this->SetNFisherCoeff(nCoef);
454  Double_t tmp;
455  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {
456  gTools().ReadAttr(node, Form("fC%d",i), tmp );
457  this->SetFisherCoeff(i,tmp);
458  }
459  }else{
460  this->SetNFisherCoeff(0);
461  }
462  gTools().ReadAttr(node, "IVar", fSelector );
463  gTools().ReadAttr(node, "Cut", fCutValue );
464  gTools().ReadAttr(node, "cType", fCutType );
465  if (gTools().HasAttr(node,"res")) gTools().ReadAttr(node, "res", fResponse);
466  if (gTools().HasAttr(node,"rms")) gTools().ReadAttr(node, "rms", fRMS);
467  // else {
468  if( gTools().HasAttr(node, "purity") ) {
469  gTools().ReadAttr(node, "purity",fPurity );
470  } else {
471  gTools().ReadAttr(node, "nS", tempNSigEvents );
472  gTools().ReadAttr(node, "nB", tempNBkgEvents );
473  fPurity = tempNSigEvents / (tempNSigEvents + tempNBkgEvents);
474  }
475  // }
476  gTools().ReadAttr(node, "nType", fNodeType );
477 }
478 
479 
480 ////////////////////////////////////////////////////////////////////////////////
481 /// add attribute to xml
482 
484 {
485  gTools().AddAttr(node, "NCoef", GetNFisherCoeff());
486  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++)
487  gTools().AddAttr(node, Form("fC%d",i), this->GetFisherCoeff(i));
488 
489  gTools().AddAttr(node, "IVar", GetSelector());
490  gTools().AddAttr(node, "Cut", GetCutValue());
491  gTools().AddAttr(node, "cType", GetCutType());
492 
493  //UInt_t analysisType = (dynamic_cast<const TMVA::DecisionTree*>(GetParentTree()) )->GetAnalysisType();
494  // if ( analysisType == TMVA::Types:: kRegression) {
495  gTools().AddAttr(node, "res", GetResponse());
496  gTools().AddAttr(node, "rms", GetRMS());
497  //} else if ( analysisType == TMVA::Types::kClassification) {
498  gTools().AddAttr(node, "purity",GetPurity());
499  //}
500  gTools().AddAttr(node, "nType", GetNodeType());
501 }
502 
503 ////////////////////////////////////////////////////////////////////////////////
504 /// set fisher coefficients
505 
507 {
508  if ((Int_t) fFisherCoeff.size()<ivar+1) fFisherCoeff.resize(ivar+1) ;
509  fFisherCoeff[ivar]=coeff;
510 }
511 
512 ////////////////////////////////////////////////////////////////////////////////
513 /// adding attributes to tree node (well, was used in BinarySearchTree,
514 /// and somehow I guess someone programmed it such that we need this in
515 /// this tree too, although we don't..)
516 
517 void TMVA::DecisionTreeNode::AddContentToNode( std::stringstream& /*s*/ ) const
518 {
519 }
520 
521 ////////////////////////////////////////////////////////////////////////////////
522 /// reading attributes from tree node (well, was used in BinarySearchTree,
523 /// and somehow I guess someone programmed it such that we need this in
524 /// this tree too, although we don't..)
525 
526 void TMVA::DecisionTreeNode::ReadContent( std::stringstream& /*s*/ )
527 {
528 }
529 ////////////////////////////////////////////////////////////////////////////////
530 
532  TTHREAD_TLS_DECL_ARG(MsgLogger,logger,"DecisionTreeNode"); // static because there is a huge number of nodes...
533  return logger;
534 }
virtual void ReadAttributes(void *node, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
DTNodeTrainingInfo * fTrainInfo
flag to set node as terminal (i.e., without deleting its descendants)
float xmin
Definition: THbookFile.cxx:93
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:162
void SetFisherCoeff(Int_t ivar, Double_t coeff)
set fisher coefficients
virtual void AddContentToNode(std::stringstream &s) const
adding attributes to tree node (well, was used in BinarySearchTree, and somehow I guess someone progr...
static MsgLogger & Log()
float Float_t
Definition: RtypesCore.h:53
virtual void ReadContent(std::stringstream &s)
reading attributes from tree node (well, was used in BinarySearchTree, and somehow I guess someone pr...
virtual DecisionTreeNode * GetRight() const
virtual void AddAttributesToNode(void *node) const
add attribute to xml
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const Bool_t kFALSE
Definition: Rtypes.h:92
DecisionTreeNode()
constructor of an essentially "empty" node floating in space
virtual void SetRight(Node *r)
virtual DecisionTreeNode * GetLeft() const
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
Definition: Tools.h:308
ClassImp(TIterator) Bool_t TIterator return false
Compare two iterator objects.
Definition: TIterator.cxx:20
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:231
Tools & gTools()
Definition: Tools.cxx:79
virtual Bool_t GoesLeft(const Event &) const
test event if it decends the tree at this node to the left
void PrintRecPrune(std::ostream &os) const
recursive printout of the node and its daughters
void ResetValidationData()
temporary stored node values (number of events, etc.) that originate not from the training but from t...
void PrintPrune(std::ostream &os) const
printout of the node (can be read in with ReadDataRecord)
virtual void SetLeft(Node *l)
void SetSampleMin(UInt_t ivar, Float_t xmin)
set the minimum of variable ivar from the training sample that pass/end up in this node ...
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
void SetPurity(void)
return the S/(S+B) (purity) for the node REM: even if nodes with purity 0.01 are very PURE background...
void ReadAttr(void *node, const char *, T &value)
Definition: Tools.h:295
float xmax
Definition: THbookFile.cxx:93
Float_t GetSampleMin(UInt_t ivar) const
return the minimum of variable ivar from the training sample that pass/end up in this node ...
virtual void SetParent(Node *p)
void SetSampleMax(UInt_t ivar, Float_t xmax)
set the maximum of variable ivar from the training sample that pass/end up in this node ...
#define TMVA_VERSION(a, b, c)
Definition: Version.h:48
#define ClassImp(name)
Definition: Rtypes.h:279
void Print(std::ostream &os, const OptionType &opt)
double Double_t
Definition: RtypesCore.h:55
unsigned long ULong_t
Definition: RtypesCore.h:51
Abstract ClassifierFactory template that handles arbitrary types.
virtual Bool_t ReadDataRecord(std::istream &is, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
Read the data block.
#define NULL
Definition: Rtypes.h:82
virtual void Print(std::ostream &os) const
print the node
void ClearNodeAndAllDaughters()
clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
double result[121]
const Bool_t kTRUE
Definition: Rtypes.h:91
virtual Bool_t GoesRight(const Event &) const
test event if it decends the tree at this node to the right
const Int_t n
Definition: legend1.C:16
virtual void PrintRec(std::ostream &os) const
recursively print the node and its daughters (–> print the 'tree')
virtual ~DecisionTreeNode()
destructor
Definition: math.cpp:60
Float_t GetSampleMax(UInt_t ivar) const
return the maximum of variable ivar from the training sample that pass/end up in this node ...