ROOT  6.07/01
Reference Guide
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Groups Pages
DecisionTreeNode.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : TMVA::DecisionTreeNode *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Implementation of a Decision Tree Node *
12  * *
13  * Authors (alphabetical): *
14  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
15  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
16  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
17  * Eckhard von Toerne <evt@physik.uni-bonn.de> - U. of Bonn, Germany *
18  * *
19  * CopyRight (c) 2009: *
20  * CERN, Switzerland *
21  * U. of Victoria, Canada *
22  * MPI-K Heidelberg, Germany *
23  * U. of Bonn, Germany *
24  * *
25  * Redistribution and use in source and binary forms, with or without *
26  * modification, are permitted according to the terms listed in LICENSE *
27  * (http://tmva.sourceforge.net/LICENSE) *
28  **********************************************************************************/
29 
30 //_______________________________________________________________________
31 //
32 // Node for the Decision Tree
33 //
34 // The node specifies ONE variable out of the given set of selection variable
35 // that is used to split the sample which "arrives" at the node, into a left
36 // (background-enhanced) and a right (signal-enhanced) sample.
37 //_______________________________________________________________________
38 
39 #include <algorithm>
40 #include <exception>
41 #include <iomanip>
42 #include <limits>
43 
44 #include "TMVA/Types.h"
45 #include "TMVA/MsgLogger.h"
46 #include "TMVA/DecisionTreeNode.h"
47 #include "TMVA/Tools.h"
48 #include "TMVA/Event.h"
49 
50 using std::string;
51 
53 
54 bool TMVA::DecisionTreeNode::fgIsTraining = false;
55 UInt_t TMVA::DecisionTreeNode::fgTmva_Version_Code = 0;
56 ////////////////////////////////////////////////////////////////////////////////
57 /// constructor of an essentially "empty" node floating in space
58 
60  : TMVA::Node(),
61  fCutValue(0),
62  fCutType ( kTRUE ),
63  fSelector ( -1 ),
64  fResponse(-99 ),
65  fRMS(0),
66  fNodeType (-99 ),
67  fPurity (-99),
68  fIsTerminalNode( kFALSE )
69 {
70  if (DecisionTreeNode::fgIsTraining){
71  fTrainInfo = new DTNodeTrainingInfo();
72  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
73  }
74  else {
75  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
76  fTrainInfo = 0;
77  }
78 }
79 
80 ////////////////////////////////////////////////////////////////////////////////
81 /// constructor of a daughter node as a daughter of 'p'
82 
84  : TMVA::Node(p, pos),
85  fCutValue( 0 ),
86  fCutType ( kTRUE ),
87  fSelector( -1 ),
88  fResponse(-99 ),
89  fRMS(0),
90  fNodeType( -99 ),
91  fPurity (-99),
92  fIsTerminalNode( kFALSE )
93 {
96  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
97  }
98  else {
99  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
100  fTrainInfo = 0;
101  }
102 }
103 
104 ////////////////////////////////////////////////////////////////////////////////
105 /// copy constructor of a node. It will result in an explicit copy of
106 /// the node and recursively all it's daughters
107 
109  DecisionTreeNode* parent)
110  : TMVA::Node(n),
111  fCutValue( n.fCutValue ),
112  fCutType ( n.fCutType ),
113  fSelector( n.fSelector ),
114  fResponse( n.fResponse ),
115  fRMS ( n.fRMS),
116  fNodeType( n.fNodeType ),
117  fPurity ( n.fPurity),
118  fIsTerminalNode( n.fIsTerminalNode )
119 {
120  this->SetParent( parent );
121  if (n.GetLeft() == 0 ) this->SetLeft(NULL);
122  else this->SetLeft( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetLeft())),this));
123 
124  if (n.GetRight() == 0 ) this->SetRight(NULL);
125  else this->SetRight( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetRight())),this));
126 
129  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
130  }
131  else {
132  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
133  fTrainInfo = 0;
134  }
135 }
136 
137 ////////////////////////////////////////////////////////////////////////////////
138 /// destructor
139 
141  delete fTrainInfo;
142 }
143 
144 
145 ////////////////////////////////////////////////////////////////////////////////
146 /// test event if it decends the tree at this node to the right
147 
149 {
150  Bool_t result;
151  // first check if the fisher criterium is used or ordinary cuts:
152  if (GetNFisherCoeff() == 0){
153 
154  result = (e.GetValue(this->GetSelector()) >= this->GetCutValue() );
155 
156  }else{
157 
158  Double_t fisher = this->GetFisherCoeff(fFisherCoeff.size()-1); // the offset
159  for (UInt_t ivar=0; ivar<fFisherCoeff.size()-1; ivar++)
160  fisher += this->GetFisherCoeff(ivar)*(e.GetValue(ivar));
161 
162  result = fisher > this->GetCutValue();
163  }
164 
165  if (fCutType == kTRUE) return result; //the cuts are selecting Signal ;
166  else return !result;
167 }
168 
169 ////////////////////////////////////////////////////////////////////////////////
170 /// test event if it decends the tree at this node to the left
171 
173 {
174  if (!this->GoesRight(e)) return kTRUE;
175  else return kFALSE;
176 }
177 
178 
179 ////////////////////////////////////////////////////////////////////////////////
180 /// return the S/(S+B) (purity) for the node
181 /// REM: even if nodes with purity 0.01 are very PURE background nodes, they still
182 /// get a small value of the purity.
183 
185 {
186  if ( ( this->GetNSigEvents() + this->GetNBkgEvents() ) > 0 ) {
187  fPurity = this->GetNSigEvents() / ( this->GetNSigEvents() + this->GetNBkgEvents());
188  }
189  else {
190  Log() << kINFO << "Zero events in purity calcuation , return purity=0.5" << Endl;
191  this->Print(Log());
192  fPurity = 0.5;
193  }
194  return;
195 }
196 
197 // print a node
198 ////////////////////////////////////////////////////////////////////////////////
199 ///print the node
200 
201 void TMVA::DecisionTreeNode::Print(std::ostream& os) const
202 {
203  os << "< *** " << std::endl;
204  os << " d: " << this->GetDepth()
205  << std::setprecision(6)
206  << "NCoef: " << this->GetNFisherCoeff();
207  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) { os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
208  os << " ivar: " << this->GetSelector()
209  << " cut: " << this->GetCutValue()
210  << " cType: " << this->GetCutType()
211  << " s: " << this->GetNSigEvents()
212  << " b: " << this->GetNBkgEvents()
213  << " nEv: " << this->GetNEvents()
214  << " suw: " << this->GetNSigEvents_unweighted()
215  << " buw: " << this->GetNBkgEvents_unweighted()
216  << " nEvuw: " << this->GetNEvents_unweighted()
217  << " sepI: " << this->GetSeparationIndex()
218  << " sepG: " << this->GetSeparationGain()
219  << " nType: " << this->GetNodeType()
220  << std::endl;
221 
222  os << "My address is " << long(this) << ", ";
223  if (this->GetParent() != NULL) os << " parent at addr: " << long(this->GetParent()) ;
224  if (this->GetLeft() != NULL) os << " left daughter at addr: " << long(this->GetLeft());
225  if (this->GetRight() != NULL) os << " right daughter at addr: " << long(this->GetRight()) ;
226 
227  os << " **** > " << std::endl;
228 }
229 
230 ////////////////////////////////////////////////////////////////////////////////
231 ///recursively print the node and its daughters (--> print the 'tree')
232 
233 void TMVA::DecisionTreeNode::PrintRec(std::ostream& os) const
234 {
235  os << this->GetDepth()
236  << std::setprecision(6)
237  << " " << this->GetPos()
238  << "NCoef: " << this->GetNFisherCoeff();
239  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
240  os << " ivar: " << this->GetSelector()
241  << " cut: " << this->GetCutValue()
242  << " cType: " << this->GetCutType()
243  << " s: " << this->GetNSigEvents()
244  << " b: " << this->GetNBkgEvents()
245  << " nEv: " << this->GetNEvents()
246  << " suw: " << this->GetNSigEvents_unweighted()
247  << " buw: " << this->GetNBkgEvents_unweighted()
248  << " nEvuw: " << this->GetNEvents_unweighted()
249  << " sepI: " << this->GetSeparationIndex()
250  << " sepG: " << this->GetSeparationGain()
251  << " res: " << this->GetResponse()
252  << " rms: " << this->GetRMS()
253  << " nType: " << this->GetNodeType();
254  if (this->GetCC() > 10000000000000.) os << " CC: " << 100000. << std::endl;
255  else os << " CC: " << this->GetCC() << std::endl;
256 
257  if (this->GetLeft() != NULL) this->GetLeft() ->PrintRec(os);
258  if (this->GetRight() != NULL) this->GetRight()->PrintRec(os);
259 }
260 
261 ////////////////////////////////////////////////////////////////////////////////
262 /// Read the data block
263 
264 Bool_t TMVA::DecisionTreeNode::ReadDataRecord( std::istream& is, UInt_t tmva_Version_Code )
265 {
266  fgTmva_Version_Code=tmva_Version_Code;
267  string tmp;
268 
269  Float_t cutVal, cutType, nsig, nbkg, nEv, nsig_unweighted, nbkg_unweighted, nEv_unweighted;
270  Float_t separationIndex, separationGain, response(-99), cc(0);
271  Int_t depth, ivar, nodeType;
272  ULong_t lseq;
273  char pos;
274 
275  is >> depth; // 2
276  if ( depth==-1 ) { return kFALSE; }
277  // if ( depth==-1 ) { delete this; return kFALSE; }
278  is >> pos ; // r
279  this->SetDepth(depth);
280  this->SetPos(pos);
281 
282  if (tmva_Version_Code < TMVA_VERSION(4,0,0)) {
283  is >> tmp >> lseq
284  >> tmp >> ivar
285  >> tmp >> cutVal
286  >> tmp >> cutType
287  >> tmp >> nsig
288  >> tmp >> nbkg
289  >> tmp >> nEv
290  >> tmp >> nsig_unweighted
291  >> tmp >> nbkg_unweighted
292  >> tmp >> nEv_unweighted
293  >> tmp >> separationIndex
294  >> tmp >> separationGain
295  >> tmp >> nodeType;
296  } else {
297  is >> tmp >> lseq
298  >> tmp >> ivar
299  >> tmp >> cutVal
300  >> tmp >> cutType
301  >> tmp >> nsig
302  >> tmp >> nbkg
303  >> tmp >> nEv
304  >> tmp >> nsig_unweighted
305  >> tmp >> nbkg_unweighted
306  >> tmp >> nEv_unweighted
307  >> tmp >> separationIndex
308  >> tmp >> separationGain
309  >> tmp >> response
310  >> tmp >> nodeType
311  >> tmp >> cc;
312  }
313 
314  this->SetSelector((UInt_t)ivar);
315  this->SetCutValue(cutVal);
316  this->SetCutType(cutType);
317  this->SetNodeType(nodeType);
318  if (fTrainInfo){
319  this->SetNSigEvents(nsig);
320  this->SetNBkgEvents(nbkg);
321  this->SetNEvents(nEv);
322  this->SetNSigEvents_unweighted(nsig_unweighted);
323  this->SetNBkgEvents_unweighted(nbkg_unweighted);
324  this->SetNEvents_unweighted(nEv_unweighted);
325  this->SetSeparationIndex(separationIndex);
326  this->SetSeparationGain(separationGain);
327  this->SetPurity();
328  // this->SetResponse(response); old .txt weightfiles don't know regression yet
329  this->SetCC(cc);
330  }
331 
332  return kTRUE;
333 }
334 
335 ////////////////////////////////////////////////////////////////////////////////
336 /// clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
337 
339 {
340  SetNSigEvents(0);
341  SetNBkgEvents(0);
342  SetNEvents(0);
343  SetNSigEvents_unweighted(0);
344  SetNBkgEvents_unweighted(0);
345  SetNEvents_unweighted(0);
346  SetSeparationIndex(-1);
347  SetSeparationGain(-1);
348  SetPurity();
349 
350  if (this->GetLeft() != NULL) ((DecisionTreeNode*)(this->GetLeft()))->ClearNodeAndAllDaughters();
351  if (this->GetRight() != NULL) ((DecisionTreeNode*)(this->GetRight()))->ClearNodeAndAllDaughters();
352 }
353 
354 ////////////////////////////////////////////////////////////////////////////////
355 /// temporary stored node values (number of events, etc.) that originate
356 /// not from the training but from the validation data (used in pruning)
357 
359  SetNBValidation( 0.0 );
360  SetNSValidation( 0.0 );
361  SetSumTarget( 0 );
362  SetSumTarget2( 0 );
363 
364  if(GetLeft() != NULL && GetRight() != NULL) {
365  GetLeft()->ResetValidationData();
366  GetRight()->ResetValidationData();
367  }
368 }
369 
370 ////////////////////////////////////////////////////////////////////////////////
371 /// printout of the node (can be read in with ReadDataRecord)
372 
373 void TMVA::DecisionTreeNode::PrintPrune( std::ostream& os ) const {
374  os << "----------------------" << std::endl
375  << "|~T_t| " << GetNTerminal() << std::endl
376  << "R(t): " << GetNodeR() << std::endl
377  << "R(T_t): " << GetSubTreeR() << std::endl
378  << "g(t): " << GetAlpha() << std::endl
379  << "G(t): " << GetAlphaMinSubtree() << std::endl;
380 }
381 
382 ////////////////////////////////////////////////////////////////////////////////
383 /// recursive printout of the node and its daughters
384 
385 void TMVA::DecisionTreeNode::PrintRecPrune( std::ostream& os ) const {
386  this->PrintPrune(os);
387  if(this->GetLeft() != NULL && this->GetRight() != NULL) {
388  ((DecisionTreeNode*)this->GetLeft())->PrintRecPrune(os);
389  ((DecisionTreeNode*)this->GetRight())->PrintRecPrune(os);
390  }
391 }
392 
393 ////////////////////////////////////////////////////////////////////////////////
394 
396 {
397  if (fTrainInfo) fTrainInfo->fCC = cc;
398  else Log() << kFATAL << "call to SetCC without trainingInfo" << Endl;
399 }
400 
401 ////////////////////////////////////////////////////////////////////////////////
402 /// return the minimum of variable ivar from the training sample
403 /// that pass/end up in this node
404 
406  if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMin[ivar];
407  else Log() << kFATAL << "You asked for Min of the event sample in node for variable "
408  << ivar << " that is out of range" << Endl;
409  return -9999;
410 }
411 
412 ////////////////////////////////////////////////////////////////////////////////
413 /// return the maximum of variable ivar from the training sample
414 /// that pass/end up in this node
415 
417  if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMax[ivar];
418  else Log() << kFATAL << "You asked for Max of the event sample in node for variable "
419  << ivar << " that is out of range" << Endl;
420  return 9999;
421 }
422 
423 ////////////////////////////////////////////////////////////////////////////////
424 /// set the minimum of variable ivar from the training sample
425 /// that pass/end up in this node
426 
428  if ( fTrainInfo) {
429  if ( ivar >= fTrainInfo->fSampleMin.size()) fTrainInfo->fSampleMin.resize(ivar+1);
430  fTrainInfo->fSampleMin[ivar]=xmin;
431  }
432 }
433 
434 ////////////////////////////////////////////////////////////////////////////////
435 /// set the maximum of variable ivar from the training sample
436 /// that pass/end up in this node
437 
439  if( ! fTrainInfo ) return;
440  if ( ivar >= fTrainInfo->fSampleMax.size() )
441  fTrainInfo->fSampleMax.resize(ivar+1);
442  fTrainInfo->fSampleMax[ivar]=xmax;
443 }
444 
445 ////////////////////////////////////////////////////////////////////////////////
446 
447 void TMVA::DecisionTreeNode::ReadAttributes(void* node, UInt_t /* tmva_Version_Code */ )
448 {
449  Float_t tempNSigEvents,tempNBkgEvents;
450 
451  Int_t nCoef;
452  if (gTools().HasAttr(node, "NCoef")){
453  gTools().ReadAttr(node, "NCoef", nCoef );
454  this->SetNFisherCoeff(nCoef);
455  Double_t tmp;
456  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {
457  gTools().ReadAttr(node, Form("fC%d",i), tmp );
458  this->SetFisherCoeff(i,tmp);
459  }
460  }else{
461  this->SetNFisherCoeff(0);
462  }
463  gTools().ReadAttr(node, "IVar", fSelector );
464  gTools().ReadAttr(node, "Cut", fCutValue );
465  gTools().ReadAttr(node, "cType", fCutType );
466  if (gTools().HasAttr(node,"res")) gTools().ReadAttr(node, "res", fResponse);
467  if (gTools().HasAttr(node,"rms")) gTools().ReadAttr(node, "rms", fRMS);
468  // else {
469  if( gTools().HasAttr(node, "purity") ) {
470  gTools().ReadAttr(node, "purity",fPurity );
471  } else {
472  gTools().ReadAttr(node, "nS", tempNSigEvents );
473  gTools().ReadAttr(node, "nB", tempNBkgEvents );
474  fPurity = tempNSigEvents / (tempNSigEvents + tempNBkgEvents);
475  }
476  // }
477  gTools().ReadAttr(node, "nType", fNodeType );
478 }
479 
480 
481 ////////////////////////////////////////////////////////////////////////////////
482 /// add attribute to xml
483 
485 {
486  gTools().AddAttr(node, "NCoef", GetNFisherCoeff());
487  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++)
488  gTools().AddAttr(node, Form("fC%d",i), this->GetFisherCoeff(i));
489 
490  gTools().AddAttr(node, "IVar", GetSelector());
491  gTools().AddAttr(node, "Cut", GetCutValue());
492  gTools().AddAttr(node, "cType", GetCutType());
493 
494  //UInt_t analysisType = (dynamic_cast<const TMVA::DecisionTree*>(GetParentTree()) )->GetAnalysisType();
495  // if ( analysisType == TMVA::Types:: kRegression) {
496  gTools().AddAttr(node, "res", GetResponse());
497  gTools().AddAttr(node, "rms", GetRMS());
498  //} else if ( analysisType == TMVA::Types::kClassification) {
499  gTools().AddAttr(node, "purity",GetPurity());
500  //}
501  gTools().AddAttr(node, "nType", GetNodeType());
502 }
503 
504 ////////////////////////////////////////////////////////////////////////////////
505 /// set fisher coefficients
506 
508 {
509  if ((Int_t) fFisherCoeff.size()<ivar+1) fFisherCoeff.resize(ivar+1) ;
510  fFisherCoeff[ivar]=coeff;
511 }
512 
513 ////////////////////////////////////////////////////////////////////////////////
514 /// adding attributes to tree node (well, was used in BinarySearchTree,
515 /// and somehow I guess someone programmed it such that we need this in
516 /// this tree too, although we don't..)
517 
518 void TMVA::DecisionTreeNode::AddContentToNode( std::stringstream& /*s*/ ) const
519 {
520 }
521 
522 ////////////////////////////////////////////////////////////////////////////////
523 /// reading attributes from tree node (well, was used in BinarySearchTree,
524 /// and somehow I guess someone programmed it such that we need this in
525 /// this tree too, although we don't..)
526 
527 void TMVA::DecisionTreeNode::ReadContent( std::stringstream& /*s*/ )
528 {
529 }
530 ////////////////////////////////////////////////////////////////////////////////
531 
533  TTHREAD_TLS_DECL_ARG(MsgLogger,logger,"DecisionTreeNode"); // static because there is a huge number of nodes...
534  return logger;
535 }
virtual void ReadAttributes(void *node, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
DTNodeTrainingInfo * fTrainInfo
flag to set node as terminal (i.e., without deleting its descendants)
float xmin
Definition: THbookFile.cxx:93
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:162
void SetFisherCoeff(Int_t ivar, Double_t coeff)
set fisher coefficients
virtual void AddContentToNode(std::stringstream &s) const
adding attributes to tree node (well, was used in BinarySearchTree, and somehow I guess someone progr...
static MsgLogger & Log()
float Float_t
Definition: RtypesCore.h:53
virtual void ReadContent(std::stringstream &s)
reading attributes from tree node (well, was used in BinarySearchTree, and somehow I guess someone pr...
virtual DecisionTreeNode * GetRight() const
virtual void AddAttributesToNode(void *node) const
add attribute to xml
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const Bool_t kFALSE
Definition: Rtypes.h:92
DecisionTreeNode()
constructor of an essentially "empty" node floating in space
virtual void SetRight(Node *r)
virtual DecisionTreeNode * GetLeft() const
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
Definition: Tools.h:308
ClassImp(TIterator) Bool_t TIterator return false
Compare two iterator objects.
Definition: TIterator.cxx:21
Tools & gTools()
Definition: Tools.cxx:79
virtual Bool_t GoesLeft(const Event &) const
test event if it decends the tree at this node to the left
void PrintRecPrune(std::ostream &os) const
recursive printout of the node and its daughters
void ResetValidationData()
temporary stored node values (number of events, etc.) that originate not from the training but from t...
void PrintPrune(std::ostream &os) const
printout of the node (can be read in with ReadDataRecord)
virtual void SetLeft(Node *l)
void SetSampleMin(UInt_t ivar, Float_t xmin)
set the minimum of variable ivar from the training sample that pass/end up in this node ...
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
void SetPurity(void)
return the S/(S+B) (purity) for the node REM: even if nodes with purity 0.01 are very PURE background...
void ReadAttr(void *node, const char *, T &value)
Definition: Tools.h:295
float xmax
Definition: THbookFile.cxx:93
Float_t GetSampleMin(UInt_t ivar) const
return the minimum of variable ivar from the training sample that pass/end up in this node ...
virtual void SetParent(Node *p)
void SetSampleMax(UInt_t ivar, Float_t xmax)
set the maximum of variable ivar from the training sample that pass/end up in this node ...
#define TMVA_VERSION(a, b, c)
Definition: Version.h:48
#define ClassImp(name)
Definition: Rtypes.h:279
void Print(std::ostream &os, const OptionType &opt)
double Double_t
Definition: RtypesCore.h:55
unsigned long ULong_t
Definition: RtypesCore.h:51
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Definition: Event.cxx:231
virtual Bool_t ReadDataRecord(std::istream &is, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
Read the data block.
#define NULL
Definition: Rtypes.h:82
virtual void Print(std::ostream &os) const
print the node
void ClearNodeAndAllDaughters()
clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
double result[121]
const Bool_t kTRUE
Definition: Rtypes.h:91
virtual Bool_t GoesRight(const Event &) const
test event if it decends the tree at this node to the right
const Int_t n
Definition: legend1.C:16
virtual void PrintRec(std::ostream &os) const
recursively print the node and its daughters (–> print the 'tree')
virtual ~DecisionTreeNode()
destructor
Definition: math.cpp:60
Float_t GetSampleMax(UInt_t ivar) const
return the maximum of variable ivar from the training sample that pass/end up in this node ...