Logo ROOT   6.07/09
Reference Guide
DecisionTreeNode.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : TMVA::DecisionTreeNode *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Implementation of a Decision Tree Node *
12  * *
13  * Authors (alphabetical): *
14  * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
15  * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
16  * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
17  * Eckhard von Toerne <evt@physik.uni-bonn.de> - U. of Bonn, Germany *
18  * *
19  * CopyRight (c) 2009: *
20  * CERN, Switzerland *
21  * U. of Victoria, Canada *
22  * MPI-K Heidelberg, Germany *
23  * U. of Bonn, Germany *
24  * *
25  * Redistribution and use in source and binary forms, with or without *
26  * modification, are permitted according to the terms listed in LICENSE *
27  * (http://tmva.sourceforge.net/LICENSE) *
28  **********************************************************************************/
29 
30 //_______________________________________________________________________
31 //
32 // Node for the Decision Tree
33 //
34 // The node specifies ONE variable out of the given set of selection variable
35 // that is used to split the sample which "arrives" at the node, into a left
36 // (background-enhanced) and a right (signal-enhanced) sample.
37 //_______________________________________________________________________
38 
39 #include "TMVA/DecisionTreeNode.h"
40 
41 #include "TMVA/Types.h"
42 #include "TMVA/MsgLogger.h"
43 #include "TMVA/Tools.h"
44 #include "TMVA/Event.h"
45 
46 #include "ThreadLocalStorage.h"
47 #include "TString.h"
48 
49 #include <algorithm>
50 #include <exception>
51 #include <iomanip>
52 #include <limits>
53 #include <sstream>
54 
55 using std::string;
56 
58 
59 bool TMVA::DecisionTreeNode::fgIsTraining = false;
60 UInt_t TMVA::DecisionTreeNode::fgTmva_Version_Code = 0;
61 ////////////////////////////////////////////////////////////////////////////////
62 /// constructor of an essentially "empty" node floating in space
63 
65  : TMVA::Node(),
66  fCutValue(0),
67  fCutType ( kTRUE ),
68  fSelector ( -1 ),
69  fResponse(-99 ),
70  fRMS(0),
71  fNodeType (-99 ),
72  fPurity (-99),
73  fIsTerminalNode( kFALSE )
74 {
75  if (DecisionTreeNode::fgIsTraining){
76  fTrainInfo = new DTNodeTrainingInfo();
77  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
78  }
79  else {
80  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
81  fTrainInfo = 0;
82  }
83 }
84 
85 ////////////////////////////////////////////////////////////////////////////////
86 /// constructor of a daughter node as a daughter of 'p'
87 
89  : TMVA::Node(p, pos),
90  fCutValue( 0 ),
91  fCutType ( kTRUE ),
92  fSelector( -1 ),
93  fResponse(-99 ),
94  fRMS(0),
95  fNodeType( -99 ),
96  fPurity (-99),
97  fIsTerminalNode( kFALSE )
98 {
101  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
102  }
103  else {
104  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
105  fTrainInfo = 0;
106  }
107 }
108 
109 ////////////////////////////////////////////////////////////////////////////////
110 /// copy constructor of a node. It will result in an explicit copy of
111 /// the node and recursively all it's daughters
112 
114  DecisionTreeNode* parent)
115  : TMVA::Node(n),
116  fCutValue( n.fCutValue ),
117  fCutType ( n.fCutType ),
118  fSelector( n.fSelector ),
119  fResponse( n.fResponse ),
120  fRMS ( n.fRMS),
121  fNodeType( n.fNodeType ),
122  fPurity ( n.fPurity),
124 {
125  this->SetParent( parent );
126  if (n.GetLeft() == 0 ) this->SetLeft(NULL);
127  else this->SetLeft( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetLeft())),this));
128 
129  if (n.GetRight() == 0 ) this->SetRight(NULL);
130  else this->SetRight( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetRight())),this));
131 
134  //std::cout << "Node constructor with TrainingINFO"<<std::endl;
135  }
136  else {
137  //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
138  fTrainInfo = 0;
139  }
140 }
141 
142 ////////////////////////////////////////////////////////////////////////////////
143 /// destructor
144 
146  delete fTrainInfo;
147 }
148 
149 
150 ////////////////////////////////////////////////////////////////////////////////
151 /// test event if it decends the tree at this node to the right
152 
154 {
155  Bool_t result;
156  // first check if the fisher criterium is used or ordinary cuts:
157  if (GetNFisherCoeff() == 0){
158 
159  result = (e.GetValue(this->GetSelector()) >= this->GetCutValue() );
160 
161  }else{
162 
163  Double_t fisher = this->GetFisherCoeff(fFisherCoeff.size()-1); // the offset
164  for (UInt_t ivar=0; ivar<fFisherCoeff.size()-1; ivar++)
165  fisher += this->GetFisherCoeff(ivar)*(e.GetValue(ivar));
166 
167  result = fisher > this->GetCutValue();
168  }
169 
170  if (fCutType == kTRUE) return result; //the cuts are selecting Signal ;
171  else return !result;
172 }
173 
174 ////////////////////////////////////////////////////////////////////////////////
175 /// test event if it decends the tree at this node to the left
176 
178 {
179  if (!this->GoesRight(e)) return kTRUE;
180  else return kFALSE;
181 }
182 
183 
184 ////////////////////////////////////////////////////////////////////////////////
185 /// return the S/(S+B) (purity) for the node
186 /// REM: even if nodes with purity 0.01 are very PURE background nodes, they still
187 /// get a small value of the purity.
188 
190 {
191  if ( ( this->GetNSigEvents() + this->GetNBkgEvents() ) > 0 ) {
192  fPurity = this->GetNSigEvents() / ( this->GetNSigEvents() + this->GetNBkgEvents());
193  }
194  else {
195  Log() << kINFO << "Zero events in purity calcuation , return purity=0.5" << Endl;
196  std::ostringstream oss;
197  this->Print(oss);
198  Log() <<oss.str();
199  fPurity = 0.5;
200  }
201  return;
202 }
203 
204 // print a node
205 ////////////////////////////////////////////////////////////////////////////////
206 ///print the node
207 
208 void TMVA::DecisionTreeNode::Print(std::ostream& os) const
209 {
210  os << "< *** " << std::endl;
211  os << " d: " << this->GetDepth()
212  << std::setprecision(6)
213  << "NCoef: " << this->GetNFisherCoeff();
214  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) { os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
215  os << " ivar: " << this->GetSelector()
216  << " cut: " << this->GetCutValue()
217  << " cType: " << this->GetCutType()
218  << " s: " << this->GetNSigEvents()
219  << " b: " << this->GetNBkgEvents()
220  << " nEv: " << this->GetNEvents()
221  << " suw: " << this->GetNSigEvents_unweighted()
222  << " buw: " << this->GetNBkgEvents_unweighted()
223  << " nEvuw: " << this->GetNEvents_unweighted()
224  << " sepI: " << this->GetSeparationIndex()
225  << " sepG: " << this->GetSeparationGain()
226  << " nType: " << this->GetNodeType()
227  << std::endl;
228 
229  os << "My address is " << long(this) << ", ";
230  if (this->GetParent() != NULL) os << " parent at addr: " << long(this->GetParent()) ;
231  if (this->GetLeft() != NULL) os << " left daughter at addr: " << long(this->GetLeft());
232  if (this->GetRight() != NULL) os << " right daughter at addr: " << long(this->GetRight()) ;
233 
234  os << " **** > " << std::endl;
235 }
236 
237 ////////////////////////////////////////////////////////////////////////////////
238 ///recursively print the node and its daughters (--> print the 'tree')
239 
240 void TMVA::DecisionTreeNode::PrintRec(std::ostream& os) const
241 {
242  os << this->GetDepth()
243  << std::setprecision(6)
244  << " " << this->GetPos()
245  << "NCoef: " << this->GetNFisherCoeff();
246  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
247  os << " ivar: " << this->GetSelector()
248  << " cut: " << this->GetCutValue()
249  << " cType: " << this->GetCutType()
250  << " s: " << this->GetNSigEvents()
251  << " b: " << this->GetNBkgEvents()
252  << " nEv: " << this->GetNEvents()
253  << " suw: " << this->GetNSigEvents_unweighted()
254  << " buw: " << this->GetNBkgEvents_unweighted()
255  << " nEvuw: " << this->GetNEvents_unweighted()
256  << " sepI: " << this->GetSeparationIndex()
257  << " sepG: " << this->GetSeparationGain()
258  << " res: " << this->GetResponse()
259  << " rms: " << this->GetRMS()
260  << " nType: " << this->GetNodeType();
261  if (this->GetCC() > 10000000000000.) os << " CC: " << 100000. << std::endl;
262  else os << " CC: " << this->GetCC() << std::endl;
263 
264  if (this->GetLeft() != NULL) this->GetLeft() ->PrintRec(os);
265  if (this->GetRight() != NULL) this->GetRight()->PrintRec(os);
266 }
267 
268 ////////////////////////////////////////////////////////////////////////////////
269 /// Read the data block
270 
271 Bool_t TMVA::DecisionTreeNode::ReadDataRecord( std::istream& is, UInt_t tmva_Version_Code )
272 {
273  fgTmva_Version_Code=tmva_Version_Code;
274  string tmp;
275 
276  Float_t cutVal, cutType, nsig, nbkg, nEv, nsig_unweighted, nbkg_unweighted, nEv_unweighted;
277  Float_t separationIndex, separationGain, response(-99), cc(0);
278  Int_t depth, ivar, nodeType;
279  ULong_t lseq;
280  char pos;
281 
282  is >> depth; // 2
283  if ( depth==-1 ) { return kFALSE; }
284  // if ( depth==-1 ) { delete this; return kFALSE; }
285  is >> pos ; // r
286  this->SetDepth(depth);
287  this->SetPos(pos);
288 
289  if (tmva_Version_Code < TMVA_VERSION(4,0,0)) {
290  is >> tmp >> lseq
291  >> tmp >> ivar
292  >> tmp >> cutVal
293  >> tmp >> cutType
294  >> tmp >> nsig
295  >> tmp >> nbkg
296  >> tmp >> nEv
297  >> tmp >> nsig_unweighted
298  >> tmp >> nbkg_unweighted
299  >> tmp >> nEv_unweighted
300  >> tmp >> separationIndex
301  >> tmp >> separationGain
302  >> tmp >> nodeType;
303  } else {
304  is >> tmp >> lseq
305  >> tmp >> ivar
306  >> tmp >> cutVal
307  >> tmp >> cutType
308  >> tmp >> nsig
309  >> tmp >> nbkg
310  >> tmp >> nEv
311  >> tmp >> nsig_unweighted
312  >> tmp >> nbkg_unweighted
313  >> tmp >> nEv_unweighted
314  >> tmp >> separationIndex
315  >> tmp >> separationGain
316  >> tmp >> response
317  >> tmp >> nodeType
318  >> tmp >> cc;
319  }
320 
321  this->SetSelector((UInt_t)ivar);
322  this->SetCutValue(cutVal);
323  this->SetCutType(cutType);
324  this->SetNodeType(nodeType);
325  if (fTrainInfo){
326  this->SetNSigEvents(nsig);
327  this->SetNBkgEvents(nbkg);
328  this->SetNEvents(nEv);
329  this->SetNSigEvents_unweighted(nsig_unweighted);
330  this->SetNBkgEvents_unweighted(nbkg_unweighted);
331  this->SetNEvents_unweighted(nEv_unweighted);
332  this->SetSeparationIndex(separationIndex);
333  this->SetSeparationGain(separationGain);
334  this->SetPurity();
335  // this->SetResponse(response); old .txt weightfiles don't know regression yet
336  this->SetCC(cc);
337  }
338 
339  return kTRUE;
340 }
341 
342 ////////////////////////////////////////////////////////////////////////////////
343 /// clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
344 
346 {
347  SetNSigEvents(0);
348  SetNBkgEvents(0);
349  SetNEvents(0);
353  SetSeparationIndex(-1);
354  SetSeparationGain(-1);
355  SetPurity();
356 
357  if (this->GetLeft() != NULL) ((DecisionTreeNode*)(this->GetLeft()))->ClearNodeAndAllDaughters();
358  if (this->GetRight() != NULL) ((DecisionTreeNode*)(this->GetRight()))->ClearNodeAndAllDaughters();
359 }
360 
361 ////////////////////////////////////////////////////////////////////////////////
362 /// temporary stored node values (number of events, etc.) that originate
363 /// not from the training but from the validation data (used in pruning)
364 
366  SetNBValidation( 0.0 );
367  SetNSValidation( 0.0 );
368  SetSumTarget( 0 );
369  SetSumTarget2( 0 );
370 
371  if(GetLeft() != NULL && GetRight() != NULL) {
374  }
375 }
376 
377 ////////////////////////////////////////////////////////////////////////////////
378 /// printout of the node (can be read in with ReadDataRecord)
379 
380 void TMVA::DecisionTreeNode::PrintPrune( std::ostream& os ) const {
381  os << "----------------------" << std::endl
382  << "|~T_t| " << GetNTerminal() << std::endl
383  << "R(t): " << GetNodeR() << std::endl
384  << "R(T_t): " << GetSubTreeR() << std::endl
385  << "g(t): " << GetAlpha() << std::endl
386  << "G(t): " << GetAlphaMinSubtree() << std::endl;
387 }
388 
389 ////////////////////////////////////////////////////////////////////////////////
390 /// recursive printout of the node and its daughters
391 
392 void TMVA::DecisionTreeNode::PrintRecPrune( std::ostream& os ) const {
393  this->PrintPrune(os);
394  if(this->GetLeft() != NULL && this->GetRight() != NULL) {
395  ((DecisionTreeNode*)this->GetLeft())->PrintRecPrune(os);
396  ((DecisionTreeNode*)this->GetRight())->PrintRecPrune(os);
397  }
398 }
399 
400 ////////////////////////////////////////////////////////////////////////////////
401 
403 {
404  if (fTrainInfo) fTrainInfo->fCC = cc;
405  else Log() << kFATAL << "call to SetCC without trainingInfo" << Endl;
406 }
407 
408 ////////////////////////////////////////////////////////////////////////////////
409 /// return the minimum of variable ivar from the training sample
410 /// that pass/end up in this node
411 
413  if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMin[ivar];
414  else Log() << kFATAL << "You asked for Min of the event sample in node for variable "
415  << ivar << " that is out of range" << Endl;
416  return -9999;
417 }
418 
419 ////////////////////////////////////////////////////////////////////////////////
420 /// return the maximum of variable ivar from the training sample
421 /// that pass/end up in this node
422 
424  if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMax[ivar];
425  else Log() << kFATAL << "You asked for Max of the event sample in node for variable "
426  << ivar << " that is out of range" << Endl;
427  return 9999;
428 }
429 
430 ////////////////////////////////////////////////////////////////////////////////
431 /// set the minimum of variable ivar from the training sample
432 /// that pass/end up in this node
433 
435  if ( fTrainInfo) {
436  if ( ivar >= fTrainInfo->fSampleMin.size()) fTrainInfo->fSampleMin.resize(ivar+1);
437  fTrainInfo->fSampleMin[ivar]=xmin;
438  }
439 }
440 
441 ////////////////////////////////////////////////////////////////////////////////
442 /// set the maximum of variable ivar from the training sample
443 /// that pass/end up in this node
444 
446  if( ! fTrainInfo ) return;
447  if ( ivar >= fTrainInfo->fSampleMax.size() )
448  fTrainInfo->fSampleMax.resize(ivar+1);
449  fTrainInfo->fSampleMax[ivar]=xmax;
450 }
451 
452 ////////////////////////////////////////////////////////////////////////////////
453 
454 void TMVA::DecisionTreeNode::ReadAttributes(void* node, UInt_t /* tmva_Version_Code */ )
455 {
456  Float_t tempNSigEvents,tempNBkgEvents;
457 
458  Int_t nCoef;
459  if (gTools().HasAttr(node, "NCoef")){
460  gTools().ReadAttr(node, "NCoef", nCoef );
461  this->SetNFisherCoeff(nCoef);
462  Double_t tmp;
463  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {
464  gTools().ReadAttr(node, Form("fC%d",i), tmp );
465  this->SetFisherCoeff(i,tmp);
466  }
467  }else{
468  this->SetNFisherCoeff(0);
469  }
470  gTools().ReadAttr(node, "IVar", fSelector );
471  gTools().ReadAttr(node, "Cut", fCutValue );
472  gTools().ReadAttr(node, "cType", fCutType );
473  if (gTools().HasAttr(node,"res")) gTools().ReadAttr(node, "res", fResponse);
474  if (gTools().HasAttr(node,"rms")) gTools().ReadAttr(node, "rms", fRMS);
475  // else {
476  if( gTools().HasAttr(node, "purity") ) {
477  gTools().ReadAttr(node, "purity",fPurity );
478  } else {
479  gTools().ReadAttr(node, "nS", tempNSigEvents );
480  gTools().ReadAttr(node, "nB", tempNBkgEvents );
481  fPurity = tempNSigEvents / (tempNSigEvents + tempNBkgEvents);
482  }
483  // }
484  gTools().ReadAttr(node, "nType", fNodeType );
485 }
486 
487 
488 ////////////////////////////////////////////////////////////////////////////////
489 /// add attribute to xml
490 
492 {
493  gTools().AddAttr(node, "NCoef", GetNFisherCoeff());
494  for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++)
495  gTools().AddAttr(node, Form("fC%d",i), this->GetFisherCoeff(i));
496 
497  gTools().AddAttr(node, "IVar", GetSelector());
498  gTools().AddAttr(node, "Cut", GetCutValue());
499  gTools().AddAttr(node, "cType", GetCutType());
500 
501  //UInt_t analysisType = (dynamic_cast<const TMVA::DecisionTree*>(GetParentTree()) )->GetAnalysisType();
502  // if ( analysisType == TMVA::Types:: kRegression) {
503  gTools().AddAttr(node, "res", GetResponse());
504  gTools().AddAttr(node, "rms", GetRMS());
505  //} else if ( analysisType == TMVA::Types::kClassification) {
506  gTools().AddAttr(node, "purity",GetPurity());
507  //}
508  gTools().AddAttr(node, "nType", GetNodeType());
509 }
510 
511 ////////////////////////////////////////////////////////////////////////////////
512 /// set fisher coefficients
513 
515 {
516  if ((Int_t) fFisherCoeff.size()<ivar+1) fFisherCoeff.resize(ivar+1) ;
517  fFisherCoeff[ivar]=coeff;
518 }
519 
520 ////////////////////////////////////////////////////////////////////////////////
521 /// adding attributes to tree node (well, was used in BinarySearchTree,
522 /// and somehow I guess someone programmed it such that we need this in
523 /// this tree too, although we don't..)
524 
525 void TMVA::DecisionTreeNode::AddContentToNode( std::stringstream& /*s*/ ) const
526 {
527 }
528 
529 ////////////////////////////////////////////////////////////////////////////////
530 /// reading attributes from tree node (well, was used in BinarySearchTree,
531 /// and somehow I guess someone programmed it such that we need this in
532 /// this tree too, although we don't..)
533 
534 void TMVA::DecisionTreeNode::ReadContent( std::stringstream& /*s*/ )
535 {
536 }
537 ////////////////////////////////////////////////////////////////////////////////
538 
540  TTHREAD_TLS_DECL_ARG(MsgLogger,logger,"DecisionTreeNode"); // static because there is a huge number of nodes...
541  return logger;
542 }
virtual void ReadAttributes(void *node, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
DTNodeTrainingInfo * fTrainInfo
flag to set node as terminal (i.e., without deleting its descendants)
float xmin
Definition: THbookFile.cxx:93
void SetSelector(Short_t i)
MsgLogger & Endl(MsgLogger &ml)
Definition: MsgLogger.h:162
void SetFisherCoeff(Int_t ivar, Double_t coeff)
set fisher coefficients
virtual void AddContentToNode(std::stringstream &s) const
adding attributes to tree node (well, was used in BinarySearchTree, and somehow I guess someone progr...
static MsgLogger & Log()
float Float_t
Definition: RtypesCore.h:53
virtual void ReadContent(std::stringstream &s)
reading attributes from tree node (well, was used in BinarySearchTree, and somehow I guess someone pr...
UInt_t GetNFisherCoeff() const
Double_t GetCC() const
std::vector< Float_t > fSampleMax
Double_t GetAlpha() const
virtual DecisionTreeNode * GetRight() const
Float_t GetRMS(void) const
virtual void AddAttributesToNode(void *node) const
add attribute to xml
Int_t GetNodeType(void) const
int Int_t
Definition: RtypesCore.h:41
bool Bool_t
Definition: RtypesCore.h:59
const Bool_t kFALSE
Definition: Rtypes.h:92
DecisionTreeNode()
constructor of an essentially "empty" node floating in space
virtual void SetRight(Node *r)
virtual DecisionTreeNode * GetLeft() const
std::vector< Float_t > fSampleMin
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
Definition: Tools.h:309
virtual DecisionTreeNode * GetParent() const
void SetNSigEvents_unweighted(Float_t s)
void SetNBValidation(Double_t b)
DecisionTreeNode(const DecisionTreeNode &n, DecisionTreeNode *parent=NULL)
copy constructor of a node.
Float_t GetValue(UInt_t ivar) const
return value of i&#39;th variable
Definition: Event.cxx:233
void SetNFisherCoeff(Int_t nvars)
void SetDepth(UInt_t d)
Definition: Node.h:115
Tools & gTools()
Definition: Tools.cxx:79
UInt_t GetDepth() const
Definition: Node.h:118
virtual Bool_t GoesLeft(const Event &) const
test event if it decends the tree at this node to the left
Float_t GetPurity(void) const
Bool_t GetCutType(void) const
void SetSeparationGain(Float_t sep)
void PrintRecPrune(std::ostream &os) const
recursive printout of the node and its daughters
void ResetValidationData()
temporary stored node values (number of events, etc.) that originate not from the training but from t...
void SetNBkgEvents(Float_t b)
void SetNSValidation(Double_t s)
Double_t GetSubTreeR() const
Double_t GetAlphaMinSubtree() const
void SetNEvents(Float_t nev)
void SetSumTarget2(Float_t t2)
Float_t GetNBkgEvents(void) const
void PrintPrune(std::ostream &os) const
printout of the node (can be read in with ReadDataRecord)
Int_t GetNTerminal() const
virtual void SetLeft(Node *l)
Double_t GetFisherCoeff(Int_t ivar) const
void SetSampleMin(UInt_t ivar, Float_t xmin)
set the minimum of variable ivar from the training sample that pass/end up in this node ...
void SetCutValue(Float_t c)
unsigned int UInt_t
Definition: RtypesCore.h:42
char * Form(const char *fmt,...)
Float_t GetNEvents(void) const
void SetPurity(void)
return the S/(S+B) (purity) for the node REM: even if nodes with purity 0.01 are very PURE background...
void ReadAttr(void *node, const char *, T &value)
Definition: Tools.h:296
float xmax
Definition: THbookFile.cxx:93
Float_t GetSampleMin(UInt_t ivar) const
return the minimum of variable ivar from the training sample that pass/end up in this node ...
void SetSumTarget(Float_t t)
virtual void SetParent(Node *p)
void SetSampleMax(UInt_t ivar, Float_t xmax)
set the maximum of variable ivar from the training sample that pass/end up in this node ...
#define TMVA_VERSION(a, b, c)
Definition: Version.h:48
static UInt_t fgTmva_Version_Code
#define ClassImp(name)
Definition: Rtypes.h:279
Double_t GetNodeR() const
char GetPos() const
Definition: Node.h:124
double Double_t
Definition: RtypesCore.h:55
unsigned long ULong_t
Definition: RtypesCore.h:51
you should not use this method at all Int_t Int_t Double_t Double_t Double_t e
Definition: TRolke.cxx:630
void SetNSigEvents(Float_t s)
Short_t GetSelector() const
void SetNBkgEvents_unweighted(Float_t b)
Abstract ClassifierFactory template that handles arbitrary types.
virtual Bool_t ReadDataRecord(std::istream &is, UInt_t tmva_Version_Code=TMVA_VERSION_CODE)
Read the data block.
Float_t GetSeparationGain(void) const
Float_t GetSeparationIndex(void) const
#define NULL
Definition: Rtypes.h:82
Float_t GetNSigEvents(void) const
virtual void Print(std::ostream &os) const
print the node
std::vector< Double_t > fFisherCoeff
void ClearNodeAndAllDaughters()
clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
Float_t GetNBkgEvents_unweighted(void) const
void SetSeparationIndex(Float_t sep)
Float_t GetNSigEvents_unweighted(void) const
double result[121]
const Bool_t kTRUE
Definition: Rtypes.h:91
virtual Bool_t GoesRight(const Event &) const
test event if it decends the tree at this node to the right
const Int_t n
Definition: legend1.C:16
Float_t GetResponse(void) const
virtual void PrintRec(std::ostream &os) const
recursively print the node and its daughters (–> print the &#39;tree&#39;)
virtual ~DecisionTreeNode()
destructor
Float_t GetNEvents_unweighted(void) const
void SetPos(char s)
Definition: Node.h:121
void SetNEvents_unweighted(Float_t nev)
Float_t GetSampleMax(UInt_t ivar) const
return the maximum of variable ivar from the training sample that pass/end up in this node ...
Float_t GetCutValue(void) const