Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
DecisionTreeNode.cxx
Go to the documentation of this file.
1// @(#)root/tmva $Id$
2// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss, Eckhard von Toerne
3
4/**********************************************************************************
5 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6 * Package: TMVA *
7 * Class : TMVA::DecisionTreeNode *
8 * *
9 * *
10 * Description: *
11 * Implementation of a Decision Tree Node *
12 * *
13 * Authors (alphabetical): *
14 * Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
15 * Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
16 * Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
17 * Eckhard von Toerne <evt@physik.uni-bonn.de> - U. of Bonn, Germany *
18 * *
19 * CopyRight (c) 2009: *
20 * CERN, Switzerland *
21 * U. of Victoria, Canada *
22 * MPI-K Heidelberg, Germany *
23 * U. of Bonn, Germany *
24 * *
25 * Redistribution and use in source and binary forms, with or without *
26 * modification, are permitted according to the terms listed in LICENSE *
27 * (see tmva/doc/LICENSE) *
28 **********************************************************************************/
29
30/*! \class TMVA::
31\ingroup TMVA
32
33Node for the Decision Tree.
34
35The node specifies ONE variable out of the given set of selection variable
36that is used to split the sample which "arrives" at the node, into a left
37(background-enhanced) and a right (signal-enhanced) sample.
38
39*/
40
42
43#include "TMVA/Types.h"
44#include "TMVA/MsgLogger.h"
45#include "TMVA/Tools.h"
46#include "TMVA/Event.h"
47
48#include "ThreadLocalStorage.h"
49#include "TString.h"
50
51#include <algorithm>
52#include <exception>
53#include <iomanip>
54#include <limits>
55#include <sstream>
56
57using std::string;
58
59
62
63////////////////////////////////////////////////////////////////////////////////
64/// constructor of an essentially "empty" node floating in space
65
67 : TMVA::Node(),
68 fCutValue(0),
69 fCutType ( kTRUE ),
70 fSelector ( -1 ),
71 fResponse(-99 ),
72 fRMS(0),
73 fNodeType (-99 ),
74 fPurity (-99),
75 fIsTerminalNode( kFALSE )
76{
79 //std::cout << "Node constructor with TrainingINFO"<<std::endl;
80 }
81 else {
82 //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
83 fTrainInfo = nullptr;
84 }
85}
86
87////////////////////////////////////////////////////////////////////////////////
88/// constructor of a daughter node as a daughter of 'p'
89
91 : TMVA::Node(p, pos),
92 fCutValue( 0 ),
93 fCutType ( kTRUE ),
94 fSelector( -1 ),
95 fResponse(-99 ),
96 fRMS(0),
97 fNodeType( -99 ),
98 fPurity (-99),
99 fIsTerminalNode( kFALSE )
100{
103 //std::cout << "Node constructor with TrainingINFO"<<std::endl;
104 }
105 else {
106 //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
107 fTrainInfo = nullptr;
108 }
109}
110
111////////////////////////////////////////////////////////////////////////////////
112/// copy constructor of a node. It will result in an explicit copy of
113/// the node and recursively all it's daughters
114
116 DecisionTreeNode* parent)
117 : TMVA::Node(n),
118 fCutValue( n.fCutValue ),
119 fCutType ( n.fCutType ),
120 fSelector( n.fSelector ),
121 fResponse( n.fResponse ),
122 fRMS ( n.fRMS),
123 fNodeType( n.fNodeType ),
124 fPurity ( n.fPurity),
125 fIsTerminalNode( n.fIsTerminalNode )
126{
127 this->SetParent( parent );
128 if (n.GetLeft() == 0 ) this->SetLeft(NULL);
129 else this->SetLeft( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetLeft())),this));
130
131 if (n.GetRight() == 0 ) this->SetRight(NULL);
132 else this->SetRight( new DecisionTreeNode( *((DecisionTreeNode*)(n.GetRight())),this));
133
135 fTrainInfo = new DTNodeTrainingInfo(*(n.fTrainInfo));
136 //std::cout << "Node constructor with TrainingINFO"<<std::endl;
137 }
138 else {
139 //std::cout << "**Node constructor WITHOUT TrainingINFO"<<std::endl;
140 fTrainInfo = nullptr;
141 }
142}
143
144////////////////////////////////////////////////////////////////////////////////
145/// destructor
146
148 delete fTrainInfo;
149}
150
151////////////////////////////////////////////////////////////////////////////////
152/// test event if it descends the tree at this node to the right
153
155{
157 // first check if the fisher criterium is used or ordinary cuts:
158 if (GetNFisherCoeff() == 0){
159
160 result = (e.GetValueFast(this->GetSelector()) >= this->GetCutValue() );
161
162 }else{
163
164 Double_t fisher = this->GetFisherCoeff(fFisherCoeff.size()-1); // the offset
165 for (UInt_t ivar=0; ivar<fFisherCoeff.size()-1; ivar++)
166 fisher += this->GetFisherCoeff(ivar)*(e.GetValueFast(ivar));
167
168 result = fisher > this->GetCutValue();
169 }
170
171 if (fCutType == kTRUE) return result; //the cuts are selecting Signal ;
172 else return !result;
173}
174
175////////////////////////////////////////////////////////////////////////////////
176/// test event if it descends the tree at this node to the left
177
179{
180 if (!this->GoesRight(e)) return kTRUE;
181 else return kFALSE;
182}
183
184
185////////////////////////////////////////////////////////////////////////////////
186/// return the S/(S+B) (purity) for the node
187/// REM: even if nodes with purity 0.01 are very PURE background nodes, they still
188/// get a small value of the purity.
189
191{
192 if ( ( this->GetNSigEvents() + this->GetNBkgEvents() ) > 0 ) {
193 fPurity = this->GetNSigEvents() / ( this->GetNSigEvents() + this->GetNBkgEvents());
194 }
195 else {
196 Log() << kINFO << "Zero events in purity calculation , return purity=0.5" << Endl;
197 std::ostringstream oss;
198 this->Print(oss);
199 Log() <<oss.str();
200 fPurity = 0.5;
201 }
202 return;
203}
204
205////////////////////////////////////////////////////////////////////////////////
206///print the node
207
208void TMVA::DecisionTreeNode::Print(std::ostream& os) const
209{
210 os << "< *** " << std::endl;
211 os << " d: " << this->GetDepth()
212 << std::setprecision(6)
213 << "NCoef: " << this->GetNFisherCoeff();
214 for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) { os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
215 os << " ivar: " << this->GetSelector()
216 << " cut: " << this->GetCutValue()
217 << " cType: " << this->GetCutType()
218 << " s: " << this->GetNSigEvents()
219 << " b: " << this->GetNBkgEvents()
220 << " nEv: " << this->GetNEvents()
221 << " suw: " << this->GetNSigEvents_unweighted()
222 << " buw: " << this->GetNBkgEvents_unweighted()
223 << " nEvuw: " << this->GetNEvents_unweighted()
224 << " sepI: " << this->GetSeparationIndex()
225 << " sepG: " << this->GetSeparationGain()
226 << " nType: " << this->GetNodeType()
227 << std::endl;
228
229 os << "My address is " << (Longptr_t)this << ", ";
230 if (this->GetParent() != NULL) os << " parent at addr: " << (Longptr_t)this->GetParent();
231 if (this->GetLeft() != NULL) os << " left daughter at addr: " << (Longptr_t)this->GetLeft();
232 if (this->GetRight() != NULL) os << " right daughter at addr: " << (Longptr_t)this->GetRight();
233
234 os << " **** > " << std::endl;
235}
236
237////////////////////////////////////////////////////////////////////////////////
238/// recursively print the node and its daughters (--> print the 'tree')
239
240void TMVA::DecisionTreeNode::PrintRec(std::ostream& os) const
241{
242 os << this->GetDepth()
243 << std::setprecision(6)
244 << " " << this->GetPos()
245 << "NCoef: " << this->GetNFisherCoeff();
246 for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {os << "fC"<<i<<": " << this->GetFisherCoeff(i);}
247 os << " ivar: " << this->GetSelector()
248 << " cut: " << this->GetCutValue()
249 << " cType: " << this->GetCutType()
250 << " s: " << this->GetNSigEvents()
251 << " b: " << this->GetNBkgEvents()
252 << " nEv: " << this->GetNEvents()
253 << " suw: " << this->GetNSigEvents_unweighted()
254 << " buw: " << this->GetNBkgEvents_unweighted()
255 << " nEvuw: " << this->GetNEvents_unweighted()
256 << " sepI: " << this->GetSeparationIndex()
257 << " sepG: " << this->GetSeparationGain()
258 << " res: " << this->GetResponse()
259 << " rms: " << this->GetRMS()
260 << " nType: " << this->GetNodeType();
261 if (this->GetCC() > 10000000000000.) os << " CC: " << 100000. << std::endl;
262 else os << " CC: " << this->GetCC() << std::endl;
263
264 if (this->GetLeft() != NULL) this->GetLeft() ->PrintRec(os);
265 if (this->GetRight() != NULL) this->GetRight()->PrintRec(os);
266}
267
268////////////////////////////////////////////////////////////////////////////////
269/// Read the data block
270
272{
273 fgTmva_Version_Code=tmva_Version_Code;
274 string tmp;
275
277 Float_t separationIndex, separationGain, response(-99), cc(0);
280 char pos;
281
282 is >> depth; // 2
283 if ( depth==-1 ) { return kFALSE; }
284 // if ( depth==-1 ) { delete this; return kFALSE; }
285 is >> pos ; // r
286 this->SetDepth(depth);
287 this->SetPos(pos);
288
289 if (tmva_Version_Code < TMVA_VERSION(4,0,0)) {
290 is >> tmp >> lseq
291 >> tmp >> ivar
292 >> tmp >> cutVal
293 >> tmp >> cutType
294 >> tmp >> nsig
295 >> tmp >> nbkg
296 >> tmp >> nEv
297 >> tmp >> nsig_unweighted
298 >> tmp >> nbkg_unweighted
299 >> tmp >> nEv_unweighted
300 >> tmp >> separationIndex
301 >> tmp >> separationGain
302 >> tmp >> nodeType;
303 } else {
304 is >> tmp >> lseq
305 >> tmp >> ivar
306 >> tmp >> cutVal
307 >> tmp >> cutType
308 >> tmp >> nsig
309 >> tmp >> nbkg
310 >> tmp >> nEv
311 >> tmp >> nsig_unweighted
312 >> tmp >> nbkg_unweighted
313 >> tmp >> nEv_unweighted
314 >> tmp >> separationIndex
315 >> tmp >> separationGain
316 >> tmp >> response
317 >> tmp >> nodeType
318 >> tmp >> cc;
319 }
320
321 this->SetSelector((UInt_t)ivar);
322 this->SetCutValue(cutVal);
323 this->SetCutType(cutType);
324 this->SetNodeType(nodeType);
325 if (fTrainInfo){
326 this->SetNSigEvents(nsig);
327 this->SetNBkgEvents(nbkg);
328 this->SetNEvents(nEv);
329 this->SetNSigEvents_unweighted(nsig_unweighted);
330 this->SetNBkgEvents_unweighted(nbkg_unweighted);
331 this->SetNEvents_unweighted(nEv_unweighted);
332 this->SetSeparationIndex(separationIndex);
333 this->SetSeparationGain(separationGain);
334 this->SetPurity();
335 // this->SetResponse(response); old .txt weightfiles don't know regression yet
336 this->SetCC(cc);
337 }
338
339 return kTRUE;
340}
341
342////////////////////////////////////////////////////////////////////////////////
343/// clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
344
346{
347 SetNSigEvents(0);
348 SetNBkgEvents(0);
349 SetNEvents(0);
350 SetNSigEvents_unweighted(0);
351 SetNBkgEvents_unweighted(0);
352 SetNEvents_unweighted(0);
353 SetSeparationIndex(-1);
354 SetSeparationGain(-1);
355 SetPurity();
356
357 if (this->GetLeft() != NULL) ((DecisionTreeNode*)(this->GetLeft()))->ClearNodeAndAllDaughters();
358 if (this->GetRight() != NULL) ((DecisionTreeNode*)(this->GetRight()))->ClearNodeAndAllDaughters();
359}
360
361////////////////////////////////////////////////////////////////////////////////
362/// temporary stored node values (number of events, etc.) that originate
363/// not from the training but from the validation data (used in pruning)
364
366 SetNBValidation( 0.0 );
367 SetNSValidation( 0.0 );
368 SetSumTarget( 0 );
369 SetSumTarget2( 0 );
370
371 if(GetLeft() != NULL && GetRight() != NULL) {
372 GetLeft()->ResetValidationData();
373 GetRight()->ResetValidationData();
374 }
375}
376
377////////////////////////////////////////////////////////////////////////////////
378/// printout of the node (can be read in with ReadDataRecord)
379
380void TMVA::DecisionTreeNode::PrintPrune( std::ostream& os ) const {
381 os << "----------------------" << std::endl
382 << "|~T_t| " << GetNTerminal() << std::endl
383 << "R(t): " << GetNodeR() << std::endl
384 << "R(T_t): " << GetSubTreeR() << std::endl
385 << "g(t): " << GetAlpha() << std::endl
386 << "G(t): " << GetAlphaMinSubtree() << std::endl;
387}
388
389////////////////////////////////////////////////////////////////////////////////
390/// recursive printout of the node and its daughters
391
392void TMVA::DecisionTreeNode::PrintRecPrune( std::ostream& os ) const {
393 this->PrintPrune(os);
394 if(this->GetLeft() != NULL && this->GetRight() != NULL) {
395 ((DecisionTreeNode*)this->GetLeft())->PrintRecPrune(os);
396 ((DecisionTreeNode*)this->GetRight())->PrintRecPrune(os);
397 }
398}
399
400////////////////////////////////////////////////////////////////////////////////
401/// Set CC, if traininfo defined, otherwise Log Fatal
402
404{
405 if (fTrainInfo) fTrainInfo->fCC = cc;
406 else Log() << kFATAL << "call to SetCC without trainingInfo" << Endl;
407}
408
409////////////////////////////////////////////////////////////////////////////////
410/// return the minimum of variable ivar from the training sample
411/// that pass/end up in this node, if traininfo defined, otherwise Log Fatal
412/// and return -9999
413
415 if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMin[ivar];
416 else Log() << kFATAL << "You asked for Min of the event sample in node for variable "
417 << ivar << " that is out of range" << Endl;
418 return -9999;
419}
420
421////////////////////////////////////////////////////////////////////////////////
422/// return the maximum of variable ivar from the training sample
423/// that pass/end up in this node, if traininfo defined, otherwise Log Fatal and
424/// return 9999
425
427 if (fTrainInfo && ivar < fTrainInfo->fSampleMin.size()) return fTrainInfo->fSampleMax[ivar];
428 else Log() << kFATAL << "You asked for Max of the event sample in node for variable "
429 << ivar << " that is out of range" << Endl;
430 return 9999;
431}
432
433////////////////////////////////////////////////////////////////////////////////
434/// set the minimum of variable ivar from the training sample
435/// that pass/end up in this node, if traininfo defined
436
438 if ( fTrainInfo) {
439 if ( ivar >= fTrainInfo->fSampleMin.size()) fTrainInfo->fSampleMin.resize(ivar+1);
440 fTrainInfo->fSampleMin[ivar]=xmin;
441 }
442}
443
444////////////////////////////////////////////////////////////////////////////////
445/// set the maximum of variable ivar from the training sample
446/// that pass/end up in this node, if traininfo defined
447
449 if( ! fTrainInfo ) return;
450 if ( ivar >= fTrainInfo->fSampleMax.size() )
451 fTrainInfo->fSampleMax.resize(ivar+1);
452 fTrainInfo->fSampleMax[ivar]=xmax;
453}
454
455////////////////////////////////////////////////////////////////////////////////
456
457void TMVA::DecisionTreeNode::ReadAttributes(void* node, UInt_t /* tmva_Version_Code */ )
458{
460
461 Int_t nCoef;
462 if (gTools().HasAttr(node, "NCoef")){
463 gTools().ReadAttr(node, "NCoef", nCoef );
464 this->SetNFisherCoeff(nCoef);
465 Double_t tmp;
466 for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++) {
467 gTools().ReadAttr(node, TString::Format("fC%d",i).Data(), tmp);
468 this->SetFisherCoeff(i,tmp);
469 }
470 }else{
471 this->SetNFisherCoeff(0);
472 }
473 gTools().ReadAttr(node, "IVar", fSelector );
474 gTools().ReadAttr(node, "Cut", fCutValue );
475 gTools().ReadAttr(node, "cType", fCutType );
476 if (gTools().HasAttr(node,"res")) gTools().ReadAttr(node, "res", fResponse);
477 if (gTools().HasAttr(node,"rms")) gTools().ReadAttr(node, "rms", fRMS);
478 // else {
479 if( gTools().HasAttr(node, "purity") ) {
480 gTools().ReadAttr(node, "purity",fPurity );
481 } else {
482 gTools().ReadAttr(node, "nS", tempNSigEvents );
483 gTools().ReadAttr(node, "nB", tempNBkgEvents );
485 }
486 // }
487 gTools().ReadAttr(node, "nType", fNodeType );
488}
489
490
491////////////////////////////////////////////////////////////////////////////////
492/// add attribute to xml
493
495{
496 gTools().AddAttr(node, "NCoef", GetNFisherCoeff());
497 for (Int_t i=0; i< (Int_t) this->GetNFisherCoeff(); i++)
498 gTools().AddAttr(node, TString::Format("fC%d",i).Data(), this->GetFisherCoeff(i));
499
500 gTools().AddAttr(node, "IVar", GetSelector());
501 gTools().AddAttr(node, "Cut", GetCutValue());
502 gTools().AddAttr(node, "cType", GetCutType());
503
504 //UInt_t analysisType = (dynamic_cast<const TMVA::DecisionTree*>(GetParentTree()) )->GetAnalysisType();
505 // if ( analysisType == TMVA::Types:: kRegression) {
506 gTools().AddAttr(node, "res", GetResponse());
507 gTools().AddAttr(node, "rms", GetRMS());
508 //} else if ( analysisType == TMVA::Types::kClassification) {
509 gTools().AddAttr(node, "purity",GetPurity());
510 //}
511 gTools().AddAttr(node, "nType", GetNodeType());
512}
513
514////////////////////////////////////////////////////////////////////////////////
515/// set fisher coefficients
516
518{
519 if ((Int_t) fFisherCoeff.size()<ivar+1) fFisherCoeff.resize(ivar+1) ;
520 fFisherCoeff[ivar]=coeff;
521}
522
523////////////////////////////////////////////////////////////////////////////////
524/// adding attributes to tree node (well, was used in BinarySearchTree,
525/// and somehow I guess someone programmed it such that we need this in
526/// this tree too, although we don't..)
527
528void TMVA::DecisionTreeNode::AddContentToNode( std::stringstream& /*s*/ ) const
529{
530}
531
532////////////////////////////////////////////////////////////////////////////////
533/// reading attributes from tree node (well, was used in BinarySearchTree,
534/// and somehow I guess someone programmed it such that we need this in
535/// this tree too, although we don't..)
536
537void TMVA::DecisionTreeNode::ReadContent( std::stringstream& /*s*/ )
538{
539}
540////////////////////////////////////////////////////////////////////////////////
541
543 TTHREAD_TLS_DECL_ARG(MsgLogger,logger,"DecisionTreeNode"); // static because there is a huge number of nodes...
544 return logger;
545}
546
547////////////////////////////////////////////////////////////////////////////////
549 fgIsTraining = on;
550}
551////////////////////////////////////////////////////////////////////////////////
553 fgTmva_Version_Code = code;
554}
555////////////////////////////////////////////////////////////////////////////////
557 return fgIsTraining;
558}
559////////////////////////////////////////////////////////////////////////////////
561 return fgTmva_Version_Code;
562}
#define e(i)
Definition RSha256.hxx:103
bool Bool_t
Boolean (0=false, 1=true) (bool)
Definition RtypesCore.h:77
int Int_t
Signed integer 4 bytes (int)
Definition RtypesCore.h:59
long Longptr_t
Integer large enough to hold a pointer (platform-dependent)
Definition RtypesCore.h:89
unsigned long ULong_t
Unsigned long integer 4 bytes (unsigned long). Size depends on architecture.
Definition RtypesCore.h:69
unsigned int UInt_t
Unsigned integer 4 bytes (unsigned int)
Definition RtypesCore.h:60
float Float_t
Float 4 bytes (float)
Definition RtypesCore.h:71
constexpr Bool_t kFALSE
Definition RtypesCore.h:108
constexpr Bool_t kTRUE
Definition RtypesCore.h:107
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
winID h TVirtualViewer3D TVirtualGLPainter p
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t result
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void on
float xmin
float xmax
void Print(GNN_Data &d, std::string txt="")
#define TMVA_VERSION(a, b, c)
Definition Version.h:48
void SetCC(Double_t cc)
Set CC, if traininfo defined, otherwise Log Fatal.
DTNodeTrainingInfo * fTrainInfo
virtual ~DecisionTreeNode()
destructor
void SetParent(Node *p) override
static void SetIsTraining(bool on)
void PrintPrune(std::ostream &os) const
printout of the node (can be read in with ReadDataRecord)
void PrintRecPrune(std::ostream &os) const
recursive printout of the node and its daughters
void SetFisherCoeff(Int_t ivar, Double_t coeff)
set fisher coefficients
Bool_t GoesLeft(const Event &) const override
test event if it descends the tree at this node to the left
static UInt_t fgTmva_Version_Code
set only when read from weightfile
void SetLeft(Node *l) override
void SetSampleMax(UInt_t ivar, Float_t xmax)
set the maximum of variable ivar from the training sample that pass/end up in this node,...
void ClearNodeAndAllDaughters()
clear the nodes (their S/N, Nevents etc), just keep the structure of the tree
static void SetTmvaVersionCode(UInt_t code)
void SetPurity(void)
return the S/(S+B) (purity) for the node REM: even if nodes with purity 0.01 are very PURE background...
void ReadAttributes(void *node, UInt_t tmva_Version_Code=262657) override
void AddContentToNode(std::stringstream &s) const override
adding attributes to tree node (well, was used in BinarySearchTree, and somehow I guess someone progr...
void AddAttributesToNode(void *node) const override
add attribute to xml
DecisionTreeNode()
constructor of an essentially "empty" node floating in space
void Print(std::ostream &os) const override
print the node
static UInt_t GetTmvaVersionCode()
void ReadContent(std::stringstream &s) override
reading attributes from tree node (well, was used in BinarySearchTree, and somehow I guess someone pr...
Bool_t GoesRight(const Event &) const override
test event if it descends the tree at this node to the right
static MsgLogger & Log()
void ResetValidationData()
temporary stored node values (number of events, etc.) that originate not from the training but from t...
void SetRight(Node *r) override
Bool_t ReadDataRecord(std::istream &is, UInt_t tmva_Version_Code=262657) override
Read the data block.
static bool fgIsTraining
static variable to flag training phase in which we need fTrainInfo
void PrintRec(std::ostream &os) const override
recursively print the node and its daughters (--> print the 'tree')
Float_t GetSampleMax(UInt_t ivar) const
return the maximum of variable ivar from the training sample that pass/end up in this node,...
Float_t GetSampleMin(UInt_t ivar) const
return the minimum of variable ivar from the training sample that pass/end up in this node,...
void SetSampleMin(UInt_t ivar, Float_t xmin)
set the minimum of variable ivar from the training sample that pass/end up in this node,...
ostringstream derivative to redirect and format output
Definition MsgLogger.h:57
Node for the BinarySearch or Decision Trees.
Definition Node.h:58
void ReadAttr(void *node, const char *, T &value)
read attribute from xml
Definition Tools.h:329
void AddAttr(void *node, const char *, const T &value, Int_t precision=16)
add attribute to xml
Definition Tools.h:347
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
Definition TString.cxx:2384
const Int_t n
Definition legend1.C:16
create variable transformations
Tools & gTools()
MsgLogger & Endl(MsgLogger &ml)
Definition MsgLogger.h:148