95 , fDetailedMonitoring(
kFALSE)
98 , fBaggedSampleFraction(0)
99 , fBoostedMethodTitle(methodTitle)
101 , fMonitorBoostedMethod(
kFALSE)
106 , fOverlap_integral(0.0)
109 fMVAvalues =
new std::vector<Float_t>;
110 fDataSetManager =
NULL;
111 fHistoricBoolOption =
kFALSE;
120 , fDetailedMonitoring(
kFALSE)
123 , fBaggedSampleFraction(0)
124 , fBoostedMethodTitle(
"")
125 , fBoostedMethodOptions(
"")
126 , fMonitorBoostedMethod(
kFALSE)
131 , fOverlap_integral(0.0)
144 fMethodWeight.clear();
148 fTrainSigMVAHist.clear();
149 fTrainBgdMVAHist.clear();
150 fBTrainSigMVAHist.clear();
151 fBTrainBgdMVAHist.clear();
152 fTestSigMVAHist.clear();
153 fTestBgdMVAHist.clear();
177 DeclareOptionRef( fBoostNum = 1,
"Boost_Num",
178 "Number of times the classifier is boosted" );
180 DeclareOptionRef( fMonitorBoostedMethod =
kTRUE,
"Boost_MonitorMethod",
181 "Write monitoring histograms for each boosted classifier" );
183 DeclareOptionRef( fDetailedMonitoring =
kFALSE,
"Boost_DetailedMonitoring",
184 "Produce histograms for detailed boost monitoring" );
186 DeclareOptionRef( fBoostType =
"AdaBoost",
"Boost_Type",
"Boosting type for the classifiers" );
187 AddPreDefVal(
TString(
"RealAdaBoost"));
188 AddPreDefVal(
TString(
"AdaBoost"));
189 AddPreDefVal(
TString(
"Bagging"));
191 DeclareOptionRef(fBaggedSampleFraction=.6,
"Boost_BaggedSampleFraction",
"Relative size of bagged event sample to original size of the data sample (used whenever bagging is used)" );
193 DeclareOptionRef( fAdaBoostBeta = 1.0,
"Boost_AdaBoostBeta",
194 "The ADA boost parameter that sets the effect of every boost step on the events' weights" );
196 DeclareOptionRef( fTransformString =
"step",
"Boost_Transform",
197 "Type of transform applied to every boosted method linear, log, step" );
199 AddPreDefVal(
TString(
"linear"));
201 AddPreDefVal(
TString(
"gauss"));
203 DeclareOptionRef( fRandomSeed = 0,
"Boost_RandomSeed",
204 "Seed for random number generator used for bagging" );
219 DeclareOptionRef( fHistoricOption =
"ByError",
"Boost_MethodWeightType",
220 "How to set the final weight of the boosted classifiers" );
221 AddPreDefVal(
TString(
"ByError"));
222 AddPreDefVal(
TString(
"Average"));
223 AddPreDefVal(
TString(
"ByROC"));
224 AddPreDefVal(
TString(
"ByOverlap"));
225 AddPreDefVal(
TString(
"LastMethod"));
227 DeclareOptionRef( fHistoricOption =
"step",
"Boost_Transform",
228 "Type of transform applied to every boosted method linear, log, step" );
230 AddPreDefVal(
TString(
"linear"));
232 AddPreDefVal(
TString(
"gauss"));
237 AddPreDefVal(
TString(
"HighEdgeGauss"));
238 AddPreDefVal(
TString(
"HighEdgeCoPara"));
241 DeclareOptionRef( fHistoricBoolOption,
"Boost_RecalculateMVACut",
242 "Recalculate the classifier MVA Signallike cut at every boost iteration" );
252 fBoostedMethodTitle = methodTitle;
275 results->Store(
new TH1F(
"MethodWeight",
"Normalized Classifier Weight",fBoostNum,0,fBoostNum),
"ClassifierWeight");
276 results->Store(
new TH1F(
"BoostWeight",
"Boost Weight",fBoostNum,0,fBoostNum),
"BoostWeight");
277 results->Store(
new TH1F(
"ErrFraction",
"Error Fraction (by boosted event weights)",fBoostNum,0,fBoostNum),
"ErrorFraction");
278 if (fDetailedMonitoring){
279 results->Store(
new TH1F(
"ROCIntegral_test",
"ROC integral of single classifier (testing sample)",fBoostNum,0,fBoostNum),
"ROCIntegral_test");
280 results->Store(
new TH1F(
"ROCIntegralBoosted_test",
"ROC integral of boosted method (testing sample)",fBoostNum,0,fBoostNum),
"ROCIntegralBoosted_test");
281 results->Store(
new TH1F(
"ROCIntegral_train",
"ROC integral of single classifier (training sample)",fBoostNum,0,fBoostNum),
"ROCIntegral_train");
282 results->Store(
new TH1F(
"ROCIntegralBoosted_train",
"ROC integral of boosted method (training sample)",fBoostNum,0,fBoostNum),
"ROCIntegralBoosted_train");
283 results->Store(
new TH1F(
"OverlapIntegal_train",
"Overlap integral (training sample)",fBoostNum,0,fBoostNum),
"Overlap");
287 results->GetHist(
"ClassifierWeight")->GetXaxis()->SetTitle(
"Index of boosted classifier");
288 results->GetHist(
"ClassifierWeight")->GetYaxis()->SetTitle(
"Classifier Weight");
289 results->GetHist(
"BoostWeight")->GetXaxis()->SetTitle(
"Index of boosted classifier");
290 results->GetHist(
"BoostWeight")->GetYaxis()->SetTitle(
"Boost Weight");
291 results->GetHist(
"ErrorFraction")->GetXaxis()->SetTitle(
"Index of boosted classifier");
292 results->GetHist(
"ErrorFraction")->GetYaxis()->SetTitle(
"Error Fraction");
293 if (fDetailedMonitoring){
294 results->GetHist(
"ROCIntegral_test")->GetXaxis()->SetTitle(
"Index of boosted classifier");
295 results->GetHist(
"ROCIntegral_test")->GetYaxis()->SetTitle(
"ROC integral of single classifier");
296 results->GetHist(
"ROCIntegralBoosted_test")->GetXaxis()->SetTitle(
"Number of boosts");
297 results->GetHist(
"ROCIntegralBoosted_test")->GetYaxis()->SetTitle(
"ROC integral boosted");
298 results->GetHist(
"ROCIntegral_train")->GetXaxis()->SetTitle(
"Index of boosted classifier");
299 results->GetHist(
"ROCIntegral_train")->GetYaxis()->SetTitle(
"ROC integral of single classifier");
300 results->GetHist(
"ROCIntegralBoosted_train")->GetXaxis()->SetTitle(
"Number of boosts");
301 results->GetHist(
"ROCIntegralBoosted_train")->GetYaxis()->SetTitle(
"ROC integral boosted");
302 results->GetHist(
"Overlap")->GetXaxis()->SetTitle(
"Index of boosted classifier");
303 results->GetHist(
"Overlap")->GetYaxis()->SetTitle(
"Overlap integral");
306 results->Store(
new TH1F(
"SoverBtotal",
"S/B in reweighted training sample",fBoostNum,0,fBoostNum),
"SoverBtotal");
307 results->GetHist(
"SoverBtotal")->GetYaxis()->SetTitle(
"S/B (boosted sample)");
308 results->GetHist(
"SoverBtotal")->GetXaxis()->SetTitle(
"Index of boosted classifier");
310 results->Store(
new TH1F(
"SeparationGain",
"SeparationGain",fBoostNum,0,fBoostNum),
"SeparationGain");
311 results->GetHist(
"SeparationGain")->GetYaxis()->SetTitle(
"SeparationGain");
312 results->GetHist(
"SeparationGain")->GetXaxis()->SetTitle(
"Index of boosted classifier");
316 fMonitorTree=
new TTree(
"MonitorBoost",
"Boost variables");
317 fMonitorTree->Branch(
"iMethod",&fCurrentMethodIdx,
"iMethod/I");
318 fMonitorTree->Branch(
"boostWeight",&fBoostWeight,
"boostWeight/D");
319 fMonitorTree->Branch(
"errorFraction",&fMethodError,
"errorFraction/D");
320 fMonitorBoostedMethod =
kTRUE;
329 Log() << kDEBUG <<
"CheckSetup: fBoostType="<<fBoostType <<
Endl;
330 Log() << kDEBUG <<
"CheckSetup: fAdaBoostBeta="<<fAdaBoostBeta<<
Endl;
331 Log() << kDEBUG <<
"CheckSetup: fBoostWeight="<<fBoostWeight<<
Endl;
332 Log() << kDEBUG <<
"CheckSetup: fMethodError="<<fMethodError<<
Endl;
333 Log() << kDEBUG <<
"CheckSetup: fBoostNum="<<fBoostNum <<
Endl;
334 Log() << kDEBUG <<
"CheckSetup: fRandomSeed=" << fRandomSeed<<
Endl;
335 Log() << kDEBUG <<
"CheckSetup: fTrainSigMVAHist.size()="<<fTrainSigMVAHist.size()<<
Endl;
336 Log() << kDEBUG <<
"CheckSetup: fTestSigMVAHist.size()="<<fTestSigMVAHist.size()<<
Endl;
337 Log() << kDEBUG <<
"CheckSetup: fMonitorBoostedMethod=" << (fMonitorBoostedMethod?
"true" :
"false") <<
Endl;
338 Log() << kDEBUG <<
"CheckSetup: MName=" << fBoostedMethodName <<
" Title="<< fBoostedMethodTitle<<
Endl;
339 Log() << kDEBUG <<
"CheckSetup: MOptions="<< fBoostedMethodOptions <<
Endl;
340 Log() << kDEBUG <<
"CheckSetup: fMonitorTree=" << fMonitorTree <<
Endl;
341 Log() << kDEBUG <<
"CheckSetup: fCurrentMethodIdx=" <<fCurrentMethodIdx <<
Endl;
342 if (fMethods.size()>0) Log() << kDEBUG <<
"CheckSetup: fMethods[0]" <<fMethods[0]<<
Endl;
343 Log() << kDEBUG <<
"CheckSetup: fMethodWeight.size()" << fMethodWeight.size() <<
Endl;
344 if (fMethodWeight.size()>0) Log() << kDEBUG <<
"CheckSetup: fMethodWeight[0]="<<fMethodWeight[0]<<
Endl;
345 Log() << kDEBUG <<
"CheckSetup: trying to repair things" <<
Endl;
360 if (Data()->GetNTrainingEvents()==0) Log() << kFATAL <<
"<Train> Data() has zero events" <<
Endl;
363 if (fMethods.size() > 0) fMethods.clear();
364 fMVAvalues->resize(Data()->GetNTrainingEvents(), 0.0);
366 Log() << kINFO <<
"Training "<< fBoostNum <<
" " << fBoostedMethodName <<
" with title " << fBoostedMethodTitle <<
" Classifiers ... patience please" <<
Endl;
387 for (fCurrentMethodIdx=0;fCurrentMethodIdx<fBoostNum;fCurrentMethodIdx++) {
392 fBoostedMethodName.Data(), GetJobName(),
TString::Format(
"%s_B%04i", fBoostedMethodTitle.Data(), fCurrentMethodIdx),
393 DataInfo(), fBoostedMethodOptions);
399 if (fCurrentMethod==0) {
400 Log() << kFATAL <<
"uups.. guess the booking of the " << fCurrentMethodIdx <<
"-th classifier somehow failed" <<
Endl;
408 Log() << kFATAL <<
"Method with type kCategory cannot be casted to MethodCategory. /MethodBoost" <<
Endl;
409 methCat->fDataSetManager = fDataSetManager;
412 fCurrentMethod->SetMsgType(kWARNING);
413 fCurrentMethod->SetupMethod();
414 fCurrentMethod->ParseOptions();
416 fCurrentMethod->SetAnalysisType( GetAnalysisType() );
417 fCurrentMethod->ProcessSetup();
418 fCurrentMethod->CheckSetup();
422 fCurrentMethod->RerouteTransformationHandler (&(this->GetTransformationHandler()));
428 if (fMonitorBoostedMethod) {
434 fCurrentMethod->BaseDir()->cd();
440 timer.DrawProgressBar( fCurrentMethodIdx );
444 if (fBoostType==
"Bagging") Bagging();
447 if(!IsSilentFile())fCurrentMethod->WriteMonitoringHistosToFile();
453 if(!IsSilentFile())
if (fCurrentMethodIdx==0 && fMonitorBoostedMethod) CreateMVAHistorgrams();
461 SingleBoost(fCurrentMethod);
464 results->GetHist(
"BoostWeight")->SetBinContent(fCurrentMethodIdx+1,fBoostWeight);
465 results->GetHist(
"ErrorFraction")->SetBinContent(fCurrentMethodIdx+1,fMethodError);
467 if (fDetailedMonitoring) {
471 results->GetHist(
"ROCIntegral_train")->SetBinContent(fCurrentMethodIdx+1, fROC_training);
473 results->GetHist(
"Overlap")->SetBinContent(fCurrentMethodIdx+1, fOverlap_integral);
478 fMonitorTree->Fill();
482 Log() << kDEBUG <<
"AdaBoost (methodErr) err = " << fMethodError <<
Endl;
485 timer.DrawProgressBar( fBoostNum );
486 fBoostNum = fCurrentMethodIdx+1;
487 Log() << kINFO <<
"Error rate has reached 0.5 ("<< fMethodError<<
"), boosting process stopped at #" << fBoostNum <<
" classifier" <<
Endl;
489 Log() << kINFO <<
"The classifier might be too strong to boost with Beta = " << fAdaBoostBeta <<
", try reducing it." <<
Endl;
501 for (fCurrentMethodIdx=0;fCurrentMethodIdx<fBoostNum;fCurrentMethodIdx++) {
504 timer1->DrawProgressBar( fCurrentMethodIdx );
506 if (fCurrentMethodIdx==fBoostNum) {
507 Log() << kINFO <<
"Elapsed time: " <<
timer1->GetElapsedTime()
511 TH1F* tmp =
dynamic_cast<TH1F*
>(
results->GetHist(
"ClassifierWeight") );
512 if (tmp) tmp->SetBinContent(fCurrentMethodIdx+1,fMethodWeight[fCurrentMethodIdx]);
521 if (fMethods.size()==1) fMethodWeight[0] = 1.0;
532 fBoostedMethodOptions=GetOptions();
539 if (fBoostNum <=0) Log() << kFATAL <<
"CreateHistograms called before fBoostNum is initialized" <<
Endl;
544 if (DataInfo().GetClassInfo(
"Signal") != 0) {
545 signalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
572 ev->SetBoostWeight( 1.0 );
581 if (fMonitorBoostedMethod) {
589 fTrainSigMVAHist[
imtd]->SetDirectory(dir);
590 fTrainSigMVAHist[
imtd]->Write();
591 fTrainBgdMVAHist[
imtd]->SetDirectory(dir);
592 fTrainBgdMVAHist[
imtd]->Write();
593 fBTrainSigMVAHist[
imtd]->SetDirectory(dir);
594 fBTrainSigMVAHist[
imtd]->Write();
595 fBTrainBgdMVAHist[
imtd]->SetDirectory(dir);
596 fBTrainBgdMVAHist[
imtd]->Write();
603 fMonitorTree->Write();
611 if (fMonitorBoostedMethod) {
613 if (fMethods.size()<
nloop)
nloop = fMethods.size();
619 if (DataInfo().IsSignal(
ev)) {
621 fTestSigMVAHist[
imtd]->Fill(fMethods[
imtd]->GetMvaValue(),
w);
626 fTestBgdMVAHist[
imtd]->Fill(fMethods[
imtd]->GetMvaValue(),
w);
641 if (fMethods.size()<
nloop)
nloop = fMethods.size();
642 if (fMonitorBoostedMethod) {
648 dir =
mva->BaseDir();
649 if (dir==0)
continue;
651 fTestSigMVAHist[
imtd]->SetDirectory(dir);
652 fTestSigMVAHist[
imtd]->Write();
653 fTestBgdMVAHist[
imtd]->SetDirectory(dir);
654 fTestBgdMVAHist[
imtd]->Write();
674 meth->SetSilentFile(IsSilentFile());
675 if(IsModelPersistence()){
680 meth->SetModelPersistence(IsModelPersistence());
694 const Int_t nBins=10001;
713 if (fDetailedMonitoring){
724 if (DataInfo().IsSignal(GetEvent(
ievt))){
751 mvaSC->SetBinContent(1,
mvaS->GetBinContent(1));
752 mvaBC->SetBinContent(1,
mvaB->GetBinContent(1));
826 <<
" idx="<<fCurrentMethodIdx
836 Log() << kDEBUG <<
"(old step) Setting method cut to " <<
method->GetSignalReferenceCut()<<
Endl;
855 else if (fBoostType==
"RealAdaBoost")
returnVal = this->AdaBoost (
method,0);
856 else if (fBoostType==
"Bagging")
returnVal = this->Bagging ();
858 Log() << kFATAL <<
"<Boost> unknown boost option " << fBoostType<<
" called" <<
Endl;
869 Log() << kWARNING <<
" AdaBoost called without classifier reference - needed for calculating AdaBoost " <<
Endl;
880 Log() << kDEBUG <<
" individual mva cut value = " <<
method->GetSignalReferenceCut() <<
Endl;
891 MVAProb->AddEvent(fMVAvalues->at(
evt),
ev->GetWeight(),
ev->GetClass());
901 sig=DataInfo().IsSignal(
ev);
902 v = fMVAvalues->at(
ievt);
907 if (fMonitorBoostedMethod) {
909 fBTrainSigMVAHist[fCurrentMethodIdx]->Fill(
v,
w);
910 fTrainSigMVAHist[fCurrentMethodIdx]->Fill(
v,
ev->GetOriginalWeight());
913 fBTrainBgdMVAHist[fCurrentMethodIdx]->Fill(
v,
w);
914 fTrainBgdMVAHist[fCurrentMethodIdx]->Fill(
v,
ev->GetOriginalWeight());
920 if (sig ==
method->IsSignalLike(fMVAvalues->at(
ievt))){
943 if (fMethodError == 0) {
944 Log() << kWARNING <<
"Your classifier worked perfectly on the training sample --> serious overtraining expected and no boosting done " <<
Endl;
1035 ev->SetBoostWeight(
trandom->PoissonD(fBaggedSampleFraction));
1053 Log() <<
"This method combines several classifier of one species in a "<<
Endl;
1054 Log() <<
"single multivariate quantity via the boost algorithm." <<
Endl;
1055 Log() <<
"the output is a weighted sum over all individual classifiers" <<
Endl;
1056 Log() <<
"By default, the AdaBoost method is employed, which gives " <<
Endl;
1057 Log() <<
"events that were misclassified in the previous tree a larger " <<
Endl;
1058 Log() <<
"weight in the training of the following classifier."<<
Endl;
1059 Log() <<
"Optionally, Bagged boosting can also be applied." <<
Endl;
1063 Log() <<
"The most important parameter in the configuration is the "<<
Endl;
1064 Log() <<
"number of boosts applied (Boost_Num) and the choice of boosting"<<
Endl;
1065 Log() <<
"(Boost_Type), which can be set to either AdaBoost or Bagging." <<
Endl;
1066 Log() <<
"AdaBoosting: The most important parameters in this configuration" <<
Endl;
1067 Log() <<
"is the beta parameter (Boost_AdaBoostBeta) " <<
Endl;
1068 Log() <<
"When boosting a linear classifier, it is sometimes advantageous"<<
Endl;
1069 Log() <<
"to transform the MVA output non-linearly. The following options" <<
Endl;
1070 Log() <<
"are available: step, log, and minmax, the default is no transform."<<
Endl;
1072 Log() <<
"Some classifiers are hard to boost and do not improve much in"<<
Endl;
1073 Log() <<
"their performance by boosting them, some even slightly deteriorate"<<
Endl;
1074 Log() <<
"due to the boosting." <<
Endl;
1075 Log() <<
"The booking of the boost method is special since it requires"<<
Endl;
1076 Log() <<
"the booing of the method to be boosted and the boost itself."<<
Endl;
1077 Log() <<
"This is solved by booking the method to be boosted and to add"<<
Endl;
1078 Log() <<
"all Boost parameters, which all begin with \"Boost_\" to the"<<
Endl;
1079 Log() <<
"options string. The factory separates the options and initiates"<<
Endl;
1080 Log() <<
"the boost process. The TMVA macro directory contains the example"<<
Endl;
1081 Log() <<
"macro \"Boost.C\"" <<
Endl;
1100 for (
UInt_t i=0;i< fMethods.size(); i++){
1103 Double_t val = fTmpEvent ?
m->GetMvaValue(fTmpEvent) :
m->GetMvaValue();
1107 if (fTransformString ==
"linear"){
1110 else if (fTransformString ==
"log"){
1115 else if (fTransformString ==
"step" ){
1116 if (
m->IsSignalLike(val)) val = 1.;
1119 else if (fTransformString ==
"gauss"){
1123 Log() << kFATAL <<
"error unknown transformation " << fTransformString<<
Endl;
1126 norm +=fMethodWeight[i];
1158 Data()->SetCurrentType(
eTT);
1165 Log() << kFATAL <<
" What do you do? Your method:"
1166 << fMethods.back()->GetName()
1167 <<
" seems not to be a propper TMVA method"
1180 for (
UInt_t i=0; i<=fCurrentMethodIdx; i++)
1184 for (
UInt_t i=0; i<=fCurrentMethodIdx; i++)
1191 std::vector <Float_t>*
mvaRes;
1195 mvaRes =
new std::vector <Float_t>(GetNEvents());
1208 if (DataInfo().GetClassInfo(
"Signal") != 0) {
1209 signalClass = DataInfo().GetClassInfo(
"Signal")->GetNumber();
1234 if (DataInfo().IsSignal(
ev))
1253 fOverlap_integral = 0.0;
1284 Log() << kFATAL <<
"dynamic cast to MethodBase* failed" <<
Endl;
1290 fMVAvalues->at(
ievt) =
method->GetMvaValue();
1312 results->Store(
new TH1I(
"NodesBeforePruning",
"nodes before pruning",this->GetBoostNum(),0,this->GetBoostNum()),
"NodesBeforePruning");
1313 results->Store(
new TH1I(
"NodesAfterPruning",
"nodes after pruning",this->GetBoostNum(),0,this->GetBoostNum()),
"NodesAfterPruning");
1326 Log() << kINFO <<
"<Train> average number of nodes before/after pruning : "
1327 <<
results->GetHist(
"NodesBeforePruning")->GetMean() <<
" / "
1328 <<
results->GetHist(
"NodesAfterPruning")->GetMean()
1339 Log() << kDEBUG <<
"No detailed boost monitoring for "
1341 <<
" yet available " <<
Endl;
1349 if (fDetailedMonitoring){
1351 if (DataInfo().GetNVariables() == 2) {
#define REGISTER_METHOD(CLASS)
for example
float Float_t
Float 4 bytes (float)
ROOT::Detail::TRangeCast< T, true > TRangeDynCast
TRangeDynCast is an adapter class that allows the typed iteration through a TCollection.
Option_t Option_t TPoint TPoint const char GetTextMagnitude GetFillStyle GetLineColor GetLineWidth GetMarkerStyle GetTextAlign GetTextColor GetTextSize void char Point_t Rectangle_t WindowAttributes_t Float_t Float_t Float_t Int_t Int_t UInt_t UInt_t Rectangle_t Int_t Int_t Window_t TString Int_t GCValues_t GetPrimarySelectionOwner GetDisplay GetScreen GetColormap GetNativeEvent const char const char dpyName wid window const char font_name cursor keysym reg const char only_if_exist regb h Point_t winding char text const char depth char const char Int_t count const char ColorStruct_t color const char Pixmap_t Pixmap_t PictureAttributes_t attr const char char ret_data h unsigned char height h Atom_t Int_t ULong_t ULong_t unsigned char prop_list Atom_t Atom_t Atom_t Time_t type
Describe directory structure in memory.
virtual Bool_t cd()
Change current directory to "this" directory.
1-D histogram with a double per channel (see TH1 documentation)
1-D histogram with a float per channel (see TH1 documentation)
1-D histogram with an int per channel (see TH1 documentation)
TH1 is the base class of all histogram classes in ROOT.
2-D histogram with a float per channel (see TH1 documentation)
Service class for 2-D histogram classes.
static ClassifierFactory & Instance()
access to the ClassifierFactory singleton creates the instance if needed
class TMVA::Config::VariablePlotting fVariablePlotting
Class that contains all the data information.
Implementation of the GiniIndex as separation criterion.
Interface for all concrete MVA method implementations.
Virtual base Class for all MVA method.
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
virtual void TestClassification()
initialization
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification
Class for boosting a TMVA method.
void MonitorBoost(Types::EBoostStage stage, UInt_t methodIdx=0)
fill various monitoring histograms from information of the individual classifiers that have been boos...
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype) override
writes all MVA evaluation histograms to file
void ResetBoostWeights()
resetting back the boosted weights of the events to 1
void DeclareCompatibilityOptions() override
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
void ProcessOptions() override
process user options
void SingleTrain()
initialization
virtual void TestClassification() override
initialization
DataSetManager * fDataSetManager
DSMTEST.
void CreateMVAHistorgrams()
Bool_t fHistoricBoolOption
historic variable, only needed for "CompatibilityOptions"
void DeclareOptions() override
Double_t AdaBoost(MethodBase *method, Bool_t useYesNoLeaf)
the standard (discrete or real) AdaBoost algorithm
Bool_t BookMethod(Types::EMVA theMethod, TString methodTitle, TString theOption)
just registering the string from which the boosted classifier will be created
void InitHistos()
initialisation routine
void GetHelpMessage() const override
Get help message text.
Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t) override
Boost can handle classification with 2 classes and regression with one regression-target.
Double_t GetMvaValue(Double_t *err=nullptr, Double_t *errUpper=nullptr) override
return boosted MVA response
Double_t GetBoostROCIntegral(Bool_t, Types::ETreeType, Bool_t CalcOverlapIntergral=kFALSE)
Calculate the ROC integral of a single classifier or even the whole boosted classifier.
const Ranking * CreateRanking() override
Double_t SingleBoost(MethodBase *method)
void Train(void) override
Double_t Bagging()
Bagging or Bootstrap boosting, gives new random poisson weight for every event.
void WriteMonitoringHistosToFile(void) const override
write special monitoring histograms to file dummy implementation here --------------—
std::vector< Float_t > * fMVAvalues
mva values for the last trained method
virtual ~MethodBoost(void)
destructor
void FindMVACut(MethodBase *method)
find the CUT on the individual MVA that defines an event as correct or misclassified (to be used in t...
void CheckSetup() override
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
Class for categorizing the phase space.
Virtual base class for combining several TMVA method.
std::vector< IMethod * > fMethods
vector of all classifiers
Analysis of Boosted Decision Trees.
static void InhibitOutput()
static void EnableOutput()
PDF wrapper for histograms; uses user-defined spline interpolation.
Ranking for variables in method (implementation)
Class that is the base-class for a vector of result.
An interface to calculate the "SeparationGain" for different separation criteria used in various trai...
Timing information for training and evaluation of MVA methods.
Singleton class for Global types used by TMVA.
static Types & Instance()
The single instance of "Types" if existing already, or create it (Singleton)
@ kMaxTreeType
also used as temporary storage for trees not yet assigned for testing;training...
Random number generator class based on M.
static TString Format(const char *fmt,...)
Static method which formats a string using a printf style format descriptor and return a TString.
A TTree represents a columnar dataset.
create variable transformations
MsgLogger & Endl(MsgLogger &ml)
Double_t Gaus(Double_t x, Double_t mean=0, Double_t sigma=1, Bool_t norm=kFALSE)
Calculates a gaussian function with mean and sigma.
Short_t Max(Short_t a, Short_t b)
Returns the largest of a and b.
Double_t Exp(Double_t x)
Returns the base-e exponential function of x, which is e raised to the power x.
Double_t Log(Double_t x)
Returns the natural logarithm of x.
Short_t Min(Short_t a, Short_t b)
Returns the smallest of a and b.